{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998


        Arity and eta expansion
-}

{-# LANGUAGE CPP #-}

-- | Arity and eta expansion
module GHC.Core.Opt.Arity
   ( -- Finding arity
     manifestArity, joinRhsArity, exprArity
   , findRhsArity, cheapArityType
   , ArityOpts(..)

   -- ** Eta expansion
   , exprEtaExpandArity, etaExpand, etaExpandAT

   -- ** Eta reduction
   , tryEtaReduce

   -- ** ArityType
   , ArityType, mkBotArityType
   , arityTypeArity, idArityType

   -- ** Bottoming things
   , exprIsDeadEnd, exprBotStrictness_maybe, arityTypeBotSigs_maybe

   -- ** typeArity and the state hack
   , typeArity, typeOneShots, typeOneShot
   , isOneShotBndr
   , isStateHackType

   -- * Lambdas
   , zapLamBndrs


   -- ** Join points
   , etaExpandToJoinPoint, etaExpandToJoinPointRule

   -- ** Coercions and casts
   , pushCoArg, pushCoArgs, pushCoValArg, pushCoTyArg
   , pushCoercionIntoLambda, pushCoDataCon, collectBindersPushingCo
   )
where

import GHC.Prelude

import GHC.Core
import GHC.Core.FVs
import GHC.Core.Utils
import GHC.Core.DataCon
import GHC.Core.TyCon     ( tyConArity )
import GHC.Core.TyCon.RecWalk     ( initRecTc, checkRecTc )
import GHC.Core.Predicate ( isDictTy, isEvVar, isCallStackPredTy, isCallStackTy )
import GHC.Core.Multiplicity

-- We have two sorts of substitution:
--   GHC.Core.Subst.Subst, and GHC.Core.TyCo.Subst
-- Both have substTy, substCo  Hence need for qualification
import GHC.Core.Subst    as Core
import GHC.Core.Type     as Type
import GHC.Core.Coercion as Type
import GHC.Core.TyCo.Compare( eqType )

import GHC.Types.Demand
import GHC.Types.Cpr( CprSig, mkCprSig, botCpr )
import GHC.Types.Id
import GHC.Types.Var
import GHC.Types.Var.Env
import GHC.Types.Var.Set
import GHC.Types.Basic
import GHC.Types.Tickish

import GHC.Builtin.Types.Prim
import GHC.Builtin.Uniques

import GHC.Data.FastString
import GHC.Data.Graph.UnVar
import GHC.Data.Pair

import GHC.Utils.GlobalVars( unsafeHasNoStateHack )
import GHC.Utils.Constants (debugIsOn)
import GHC.Utils.Outputable
import GHC.Utils.Panic
import GHC.Utils.Misc

import Data.Maybe( isJust )

{-
************************************************************************
*                                                                      *
              manifestArity and exprArity
*                                                                      *
************************************************************************

exprArity is a cheap-and-cheerful version of exprEtaExpandArity.
It tells how many things the expression can be applied to before doing
any work.  It doesn't look inside cases, lets, etc.  The idea is that
exprEtaExpandArity will do the hard work, leaving something that's easy
for exprArity to grapple with.  In particular, Simplify uses exprArity to
compute the ArityInfo for the Id.

Originally I thought that it was enough just to look for top-level lambdas, but
it isn't.  I've seen this

        foo = PrelBase.timesInt

We want foo to get arity 2 even though the eta-expander will leave it
unchanged, in the expectation that it'll be inlined.  But occasionally it
isn't, because foo is blacklisted (used in a rule).

Similarly, see the ok_note check in exprEtaExpandArity.  So
        f = __inline_me (\x -> e)
won't be eta-expanded.

And in any case it seems more robust to have exprArity be a bit more intelligent.
But note that   (\x y z -> f x y z)
should have arity 3, regardless of f's arity.
-}

manifestArity :: CoreExpr -> Arity
-- ^ manifestArity sees how many leading value lambdas there are,
--   after looking through casts
manifestArity :: CoreExpr -> Int
manifestArity (Lam TyVar
v CoreExpr
e) | TyVar -> Bool
isId TyVar
v        = Int
1 Int -> Int -> Int
forall a. Num a => a -> a -> a
+ CoreExpr -> Int
manifestArity CoreExpr
e
                        | Bool
otherwise     = CoreExpr -> Int
manifestArity CoreExpr
e
manifestArity (Tick CoreTickish
t CoreExpr
e) | Bool -> Bool
not (CoreTickish -> Bool
forall (pass :: TickishPass). GenTickish pass -> Bool
tickishIsCode CoreTickish
t) =  CoreExpr -> Int
manifestArity CoreExpr
e
manifestArity (Cast CoreExpr
e Coercion
_)                = CoreExpr -> Int
manifestArity CoreExpr
e
manifestArity CoreExpr
_                         = Int
0

joinRhsArity :: CoreExpr -> JoinArity
-- Join points are supposed to have manifestly-visible
-- lambdas at the top: no ticks, no casts, nothing
-- Moreover, type lambdas count in JoinArity
-- NB: For non-recursive bindings, the join arity of the binding may actually be
-- less that the number of manifestly-visible lambdas.
-- See Note [Join arity prediction based on joinRhsArity] in GHC.Core.Opt.OccurAnal
joinRhsArity :: CoreExpr -> Int
joinRhsArity (Lam TyVar
_ CoreExpr
e) = Int
1 Int -> Int -> Int
forall a. Num a => a -> a -> a
+ CoreExpr -> Int
joinRhsArity CoreExpr
e
joinRhsArity CoreExpr
_         = Int
0


---------------
exprBotStrictness_maybe :: CoreExpr -> Maybe (Arity, DmdSig, CprSig)
-- A cheap and cheerful function that identifies bottoming functions
-- and gives them a suitable strictness and CPR signatures.
-- It's used during float-out
exprBotStrictness_maybe :: CoreExpr -> Maybe (Int, DmdSig, CprSig)
exprBotStrictness_maybe CoreExpr
e = SafeArityType -> Maybe (Int, DmdSig, CprSig)
arityTypeBotSigs_maybe (HasDebugCallStack => CoreExpr -> SafeArityType
CoreExpr -> SafeArityType
cheapArityType CoreExpr
e)

arityTypeBotSigs_maybe :: ArityType ->  Maybe (Arity, DmdSig, CprSig)
-- Arity of a divergent function
arityTypeBotSigs_maybe :: SafeArityType -> Maybe (Int, DmdSig, CprSig)
arityTypeBotSigs_maybe (AT [ATLamInfo]
lams Divergence
div)
  | Divergence -> Bool
isDeadEndDiv Divergence
div = (Int, DmdSig, CprSig) -> Maybe (Int, DmdSig, CprSig)
forall a. a -> Maybe a
Just ( Int
arity
                            , Int -> Divergence -> DmdSig
mkVanillaDmdSig Int
arity Divergence
botDiv
                            , Int -> Cpr -> CprSig
mkCprSig Int
arity Cpr
botCpr)
  | Bool
otherwise        = Maybe (Int, DmdSig, CprSig)
forall a. Maybe a
Nothing
  where
    arity :: Int
arity = [ATLamInfo] -> Int
forall a. [a] -> Int
forall (t :: * -> *) a. Foldable t => t a -> Int
length [ATLamInfo]
lams


{- Note [exprArity for applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we come to an application we check that the arg is trivial.
   eg  f (fac x) does not have arity 2,
                 even if f has arity 3!

* We require that is trivial rather merely cheap.  Suppose f has arity 2.
  Then    f (Just y)
  has arity 0, because if we gave it arity 1 and then inlined f we'd get
          let v = Just y in \w. <f-body>
  which has arity 0.  And we try to maintain the invariant that we don't
  have arity decreases.

*  The `max 0` is important!  (\x y -> f x) has arity 2, even if f is
   unknown, hence arity 0


************************************************************************
*                                                                      *
              typeArity and the "state hack"
*                                                                      *
********************************************************************* -}


typeArity :: Type -> Arity
-- ^ (typeArity ty) says how many arrows GHC can expose in 'ty', after
-- looking through newtypes.  More generally, (typeOneShots ty) returns
-- ty's [OneShotInfo], based only on the type itself, using typeOneShot
-- on the argument type to access the "state hack".
typeArity :: Type -> Int
typeArity = [OneShotInfo] -> Int
forall a. [a] -> Int
forall (t :: * -> *) a. Foldable t => t a -> Int
length ([OneShotInfo] -> Int) -> (Type -> [OneShotInfo]) -> Type -> Int
forall b c a. (b -> c) -> (a -> b) -> a -> c
. Type -> [OneShotInfo]
typeOneShots

typeOneShots :: Type -> [OneShotInfo]
-- How many value arrows are visible in the type?
-- We look through foralls, and newtypes
-- See Note [Arity invariants for bindings]
typeOneShots :: Type -> [OneShotInfo]
typeOneShots Type
ty
  = RecTcChecker -> Type -> [OneShotInfo]
go RecTcChecker
initRecTc Type
ty
  where
    go :: RecTcChecker -> Type -> [OneShotInfo]
go RecTcChecker
rec_nts Type
ty
      | Just (TyVar
tcv, Type
ty')  <- Type -> Maybe (TyVar, Type)
splitForAllTyCoVar_maybe Type
ty
      = if TyVar -> Bool
isCoVar TyVar
tcv
        then TyVar -> OneShotInfo
idOneShotInfo TyVar
tcv OneShotInfo -> [OneShotInfo] -> [OneShotInfo]
forall a. a -> [a] -> [a]
: RecTcChecker -> Type -> [OneShotInfo]
go RecTcChecker
rec_nts Type
ty'
        else RecTcChecker -> Type -> [OneShotInfo]
go RecTcChecker
rec_nts Type
ty'

      | Just (FunTyFlag
_,Type
_,Type
arg,Type
res) <- Type -> Maybe (FunTyFlag, Type, Type, Type)
splitFunTy_maybe Type
ty
      = Type -> OneShotInfo
typeOneShot Type
arg OneShotInfo -> [OneShotInfo] -> [OneShotInfo]
forall a. a -> [a] -> [a]
: RecTcChecker -> Type -> [OneShotInfo]
go RecTcChecker
rec_nts Type
res

      | Just (TyCon
tc,[Type]
tys) <- HasDebugCallStack => Type -> Maybe (TyCon, [Type])
Type -> Maybe (TyCon, [Type])
splitTyConApp_maybe Type
ty
      , Just (Type
ty', Coercion
_) <- TyCon -> [Type] -> Maybe (Type, Coercion)
instNewTyCon_maybe TyCon
tc [Type]
tys
      , Just RecTcChecker
rec_nts' <- RecTcChecker -> TyCon -> Maybe RecTcChecker
checkRecTc RecTcChecker
rec_nts TyCon
tc  -- See Note [Expanding newtypes and products]
                                                -- in GHC.Core.TyCon
--   , not (isClassTyCon tc)    -- Do not eta-expand through newtype classes
--                              -- See Note [Newtype classes and eta expansion]
--                              (no longer required)
      = RecTcChecker -> Type -> [OneShotInfo]
go RecTcChecker
rec_nts' Type
ty'
        -- Important to look through non-recursive newtypes, so that, eg
        --      (f x)   where f has arity 2, f :: Int -> IO ()
        -- Here we want to get arity 1 for the result!
        --
        -- AND through a layer of recursive newtypes
        -- e.g. newtype Stream m a b = Stream (m (Either b (a, Stream m a b)))

      | Bool
otherwise
      = []

typeOneShot :: Type -> OneShotInfo
typeOneShot :: Type -> OneShotInfo
typeOneShot Type
ty
   | Type -> Bool
isStateHackType Type
ty = OneShotInfo
OneShotLam
   | Bool
otherwise          = OneShotInfo
NoOneShotInfo

-- | Like 'idOneShotInfo', but taking the Horrible State Hack in to account
-- See Note [The state-transformer hack] in "GHC.Core.Opt.Arity"
idStateHackOneShotInfo :: Id -> OneShotInfo
idStateHackOneShotInfo :: TyVar -> OneShotInfo
idStateHackOneShotInfo TyVar
id
    | Type -> Bool
isStateHackType (TyVar -> Type
idType TyVar
id) = OneShotInfo
OneShotLam
    | Bool
otherwise                   = TyVar -> OneShotInfo
idOneShotInfo TyVar
id

-- | Returns whether the lambda associated with the 'Id' is
--   certainly applied at most once
-- This one is the "business end", called externally.
-- It works on type variables as well as Ids, returning True
-- Its main purpose is to encapsulate the Horrible State Hack
-- See Note [The state-transformer hack] in "GHC.Core.Opt.Arity"
isOneShotBndr :: Var -> Bool
isOneShotBndr :: TyVar -> Bool
isOneShotBndr TyVar
var
  | TyVar -> Bool
isTyVar TyVar
var                              = Bool
True
  | OneShotInfo
OneShotLam <- TyVar -> OneShotInfo
idStateHackOneShotInfo TyVar
var = Bool
True
  | Bool
otherwise                                = Bool
False

isStateHackType :: Type -> Bool
isStateHackType :: Type -> Bool
isStateHackType Type
ty
  | Bool
unsafeHasNoStateHack   -- Switch off with -fno-state-hack
  = Bool
False
  | Bool
otherwise
  = case Type -> Maybe TyCon
tyConAppTyCon_maybe Type
ty of
        Just TyCon
tycon -> TyCon
tycon TyCon -> TyCon -> Bool
forall a. Eq a => a -> a -> Bool
== TyCon
statePrimTyCon
        Maybe TyCon
_          -> Bool
False
        -- This is a gross hack.  It claims that
        -- every function over realWorldStatePrimTy is a one-shot
        -- function.  This is pretty true in practice, and makes a big
        -- difference.  For example, consider
        --      a `thenST` \ r -> ...E...
        -- The early full laziness pass, if it doesn't know that r is one-shot
        -- will pull out E (let's say it doesn't mention r) to give
        --      let lvl = E in a `thenST` \ r -> ...lvl...
        -- When `thenST` gets inlined, we end up with
        --      let lvl = E in \s -> case a s of (r, s') -> ...lvl...
        -- and we don't re-inline E.
        --
        -- It would be better to spot that r was one-shot to start with, but
        -- I don't want to rely on that.
        --
        -- Another good example is in fill_in in PrelPack.hs.  We should be able to
        -- spot that fill_in has arity 2 (and when Keith is done, we will) but we can't yet.


{- Note [Arity invariants for bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have the following invariants for let-bindings

  (1) In any binding f = e,
         idArity f <= typeArity (idType f)
      We enforce this with trimArityType, called in findRhsArity;
      see Note [Arity trimming].

      Note that we enforce this only for /bindings/.  We do /not/ insist that
         arityTypeArity (arityType e) <= typeArity (exprType e)
      because that is quite a bit more expensive to guaranteed; it would
      mean checking at every Cast in the recursive arityType, for example.

  (2) If typeArity (exprType e) = n,
      then manifestArity (etaExpand e n) = n

      That is, etaExpand can always expand as much as typeArity says
      (or less, of course). So the case analysis in etaExpand and in
      typeArity must match.

      Consequence: because of (1), if we eta-expand to (idArity f), we will
      end up with n manifest lambdas.

   (3) In any binding f = e,
         idArity f <= arityTypeArity (safeArityType (arityType e))
       That is, we call safeArityType before attributing e's arityType to f.
       See Note [SafeArityType].

       So we call safeArityType in findRhsArity.

Suppose we have
   f :: Int -> Int -> Int
   f x y = x+y    -- Arity 2

   g :: F Int
   g = case <cond> of { True  -> f |> co1
                      ; False -> g |> co2 }

where F is a type family.  Now, we can't eta-expand g to have arity 2,
because etaExpand, which works off the /type/ of the expression
(albeit looking through newtypes), doesn't know how to make an
eta-expanded binding
   g = (\a b. case x of ...) |> co
because it can't make up `co` or the types of `a` and `b`.

So invariant (1) ensures that every binding has an arity that is no greater
than the typeArity of the RHS; and invariant (2) ensures that etaExpand
and handle what typeArity says.

Why is this important?  Because

  - In GHC.Iface.Tidy we use exprArity/manifestArity to fix the *final
    arity* of each top-level Id, and in

  - In CorePrep we use etaExpand on each rhs, so that the visible
    lambdas actually match that arity, which in turn means that the
    StgRhs has a number of lambdas that precisely matches the arity.

Note [Arity trimming]
~~~~~~~~~~~~~~~~~~~~~
Invariant (1) of Note [Arity invariants for bindings] is upheld by findRhsArity,
which calls trimArityType to trim the ArityType to match the Arity of the
binding.  Failing to do so, and hence breaking invariant (1) led to #5441.

How to trim?  If we end in topDiv, it's easy.  But we must take great care with
dead ends (i.e. botDiv). Suppose the expression was (\x y. error "urk"),
we'll get \??.⊥.  We absolutely must not trim that to \?.⊥, because that
claims that ((\x y. error "urk") |> co) diverges when given one argument,
which it absolutely does not. And Bad Things happen if we think something
returns bottom when it doesn't (#16066).

So, if we need to trim a dead-ending arity type, switch (conservatively) to
topDiv.

Historical note: long ago, we unconditionally switched to topDiv when we
encountered a cast, but that is far too conservative: see #5475

Note [Newtype classes and eta expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    NB: this nasty special case is no longer required, because
    for newtype classes we don't use the class-op rule mechanism
    at all.  See Note [Single-method classes] in GHC.Tc.TyCl.Instance. SLPJ May 2013

-------- Old out of date comments, just for interest -----------
We have to be careful when eta-expanding through newtypes.  In general
it's a good idea, but annoyingly it interacts badly with the class-op
rule mechanism.  Consider

   class C a where { op :: a -> a }
   instance C b => C [b] where
     op x = ...

These translate to

   co :: forall a. (a->a) ~ C a

   $copList :: C b -> [b] -> [b]
   $copList d x = ...

   $dfList :: C b -> C [b]
   {-# DFunUnfolding = [$copList] #-}
   $dfList d = $copList d |> co@[b]

Now suppose we have:

   dCInt :: C Int

   blah :: [Int] -> [Int]
   blah = op ($dfList dCInt)

Now we want the built-in op/$dfList rule will fire to give
   blah = $copList dCInt

But with eta-expansion 'blah' might (and in #3772, which is
slightly more complicated, does) turn into

   blah = op (\eta. ($dfList dCInt |> sym co) eta)

and now it is *much* harder for the op/$dfList rule to fire, because
exprIsConApp_maybe won't hold of the argument to op.  I considered
trying to *make* it hold, but it's tricky and I gave up.

The test simplCore/should_compile/T3722 is an excellent example.
-------- End of old out of date comments, just for interest -----------
-}

{- ********************************************************************
*                                                                      *
                  Zapping lambda binders
*                                                                      *
********************************************************************* -}

zapLamBndrs :: FullArgCount -> [Var] -> [Var]
-- If (\xyz. t) appears under-applied to only two arguments,
-- we must zap the occ-info on x,y, because they appear (in 't') under the \z.
-- See Note [Occurrence analysis for lambda binders] in GHc.Core.Opt.OccurAnal
--
-- NB: both `arg_count` and `bndrs` include both type and value args/bndrs
zapLamBndrs :: Int -> [TyVar] -> [TyVar]
zapLamBndrs Int
arg_count [TyVar]
bndrs
  | Bool
no_need_to_zap = [TyVar]
bndrs
  | Bool
otherwise      = Int -> [TyVar] -> [TyVar]
zap_em Int
arg_count [TyVar]
bndrs
  where
    no_need_to_zap :: Bool
no_need_to_zap = (TyVar -> Bool) -> [TyVar] -> Bool
forall (t :: * -> *) a. Foldable t => (a -> Bool) -> t a -> Bool
all TyVar -> Bool
isOneShotBndr (Int -> [TyVar] -> [TyVar]
forall a. Int -> [a] -> [a]
drop Int
arg_count [TyVar]
bndrs)

    zap_em :: FullArgCount -> [Var] -> [Var]
    zap_em :: Int -> [TyVar] -> [TyVar]
zap_em Int
0 [TyVar]
bs = [TyVar]
bs
    zap_em Int
_ [] = []
    zap_em Int
n (TyVar
b:[TyVar]
bs) | TyVar -> Bool
isTyVar TyVar
b = TyVar
b              TyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
: Int -> [TyVar] -> [TyVar]
zap_em (Int
nInt -> Int -> Int
forall a. Num a => a -> a -> a
-Int
1) [TyVar]
bs
                    | Bool
otherwise = TyVar -> TyVar
zapLamIdInfo TyVar
b TyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
: Int -> [TyVar] -> [TyVar]
zap_em (Int
nInt -> Int -> Int
forall a. Num a => a -> a -> a
-Int
1) [TyVar]
bs


{- *********************************************************************
*                                                                      *
           Computing the "arity" of an expression
*                                                                      *
************************************************************************

Note [Definition of arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The "arity" of an expression 'e' is n if
   applying 'e' to *fewer* than n *value* arguments
   converges rapidly

Or, to put it another way

   there is no work lost in duplicating the partial
   application (e x1 .. x(n-1))

In the divergent case, no work is lost by duplicating because if the thing
is evaluated once, that's the end of the program.

Or, to put it another way, in any context C

   C[ (\x1 .. xn. e x1 .. xn) ]
         is as efficient as
   C[ e ]

It's all a bit more subtle than it looks:

Note [One-shot lambdas]
~~~~~~~~~~~~~~~~~~~~~~~
Consider one-shot lambdas
                let x = expensive in \y z -> E
We want this to have arity 1 if the \y-abstraction is a 1-shot lambda.

Note [Dealing with bottom]
~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC does some transformations that are technically unsound wrt
bottom, because doing so improves arities... a lot!  We describe
them in this Note.

The flag -fpedantic-bottoms (off by default) restore technically
correct behaviour at the cots of efficiency.

It's mostly to do with eta-expansion.  Consider

   f = \x -> case x of
               True  -> \s -> e1
               False -> \s -> e2

This happens all the time when f :: Bool -> IO ()
In this case we do eta-expand, in order to get that \s to the
top, and give f arity 2.

This isn't really right in the presence of seq.  Consider
        (f bot) `seq` 1

This should diverge!  But if we eta-expand, it won't.  We ignore this
"problem" (unless -fpedantic-bottoms is on), because being scrupulous
would lose an important transformation for many programs. (See
#5587 for an example.)

Consider also
        f = \x -> error "foo"
Here, arity 1 is fine.  But if it looks like this (see #22068)
        f = \x -> case x of
                        True  -> error "foo"
                        False -> \y -> x+y
then we want to get arity 2.  Technically, this isn't quite right, because
        (f True) `seq` 1
should diverge, but it'll converge if we eta-expand f.  Nevertheless, we
do so; it improves some programs significantly, and increasing convergence
isn't a bad thing.  Hence the ABot/ATop in ArityType.

So these two transformations aren't always the Right Thing, and we
have several tickets reporting unexpected behaviour resulting from
this transformation.  So we try to limit it as much as possible:

 (1) Do NOT move a lambda outside a known-bottom case expression
       case undefined of { (a,b) -> \y -> e }
     This showed up in #5557

 (2) Do NOT move a lambda outside a case unless
     (a) The scrutinee is ok-for-speculation, or
     (b) more liberally: the scrutinee is cheap (e.g. a variable), and
         -fpedantic-bottoms is not enforced (see #2915 for an example)

Of course both (1) and (2) are readily defeated by disguising the bottoms.

There also is an interaction with Note [Combining arity type with demand info],
outlined in Wrinkle (CAD1).

Note [Newtype arity]
~~~~~~~~~~~~~~~~~~~~
Non-recursive newtypes are transparent, and should not get in the way.
We do (currently) eta-expand recursive newtypes too.  So if we have, say

        newtype T = MkT ([T] -> Int)

Suppose we have
        e = coerce T f
where f has arity 1.  Then: etaExpandArity e = 1;
that is, etaExpandArity looks through the coerce.

When we eta-expand e to arity 1: eta_expand 1 e T
we want to get:                  coerce T (\x::[T] -> (coerce ([T]->Int) e) x)

  HOWEVER, note that if you use coerce bogusly you can ge
        coerce Int negate
  And since negate has arity 2, you might try to eta expand.  But you can't
  decompose Int to a function type.   Hence the final case in eta_expand.

Note [The state-transformer hack]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
        f = e
where e has arity n.  Then, if we know from the context that f has
a usage type like
        t1 -> ... -> tn -1-> t(n+1) -1-> ... -1-> tm -> ...
then we can expand the arity to m.  This usage type says that
any application (x e1 .. en) will be applied to uniquely to (m-n) more args
Consider f = \x. let y = <expensive>
                 in case x of
                      True  -> foo
                      False -> \(s:RealWorld) -> e
where foo has arity 1.  Then we want the state hack to
apply to foo too, so we can eta expand the case.

Then we expect that if f is applied to one arg, it'll be applied to two
(that's the hack -- we don't really know, and sometimes it's false)
See also Id.isOneShotBndr.

Note [State hack and bottoming functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's a terrible idea to use the state hack on a bottoming function.
Here's what happens (#2861):

  f :: String -> IO T
  f = \p. error "..."

Eta-expand, using the state hack:

  f = \p. (\s. ((error "...") |> g1) s) |> g2
  g1 :: IO T ~ (S -> (S,T))
  g2 :: (S -> (S,T)) ~ IO T

Extrude the g2

  f' = \p. \s. ((error "...") |> g1) s
  f = f' |> (String -> g2)

Discard args for bottoming function

  f' = \p. \s. ((error "...") |> g1 |> g3
  g3 :: (S -> (S,T)) ~ (S,T)

Extrude g1.g3

  f'' = \p. \s. (error "...")
  f' = f'' |> (String -> S -> g1.g3)

And now we can repeat the whole loop.  Aargh!  The bug is in applying the
state hack to a function which then swallows the argument.

This arose in another guise in #3959.  Here we had

     catch# (throw exn >> return ())

Note that (throw :: forall a e. Exn e => e -> a) is called with [a = IO ()].
After inlining (>>) we get

     catch# (\_. throw {IO ()} exn)

We must *not* eta-expand to

     catch# (\_ _. throw {...} exn)

because 'catch#' expects to get a (# _,_ #) after applying its argument to
a State#, not another function!

In short, we use the state hack to allow us to push let inside a lambda,
but not to introduce a new lambda.


Note [ArityType]
~~~~~~~~~~~~~~~~
ArityType can be thought of as an abstraction of an expression.
The ArityType
   AT [ (IsCheap,     NoOneShotInfo)
      , (IsExpensive, OneShotLam)
      , (IsCheap,     OneShotLam) ] Dunno)

abstracts an expression like
   \x. let <expensive> in
       \y{os}.
       \z{os}. blah

In general we have (AT lams div).  Then
* In lams :: [(Cost,OneShotInfo)]
  * The Cost flag describes the part of the expression down
    to the first (value) lambda.
  * The OneShotInfo flag gives the one-shot info on that lambda.

* If 'div' is dead-ending ('isDeadEndDiv'), then application to
  'length lams' arguments will surely diverge, similar to the situation
  with 'DmdType'.

ArityType is the result of a compositional analysis on expressions,
from which we can decide the real arity of the expression (extracted
with function exprEtaExpandArity).

We use the following notation:
  at  ::= \p1..pn.div
  div ::= T | x | ⊥
  p   ::= (c o)
  c   ::= X | C    -- Expensive or Cheap
  o   ::= ? | 1    -- NotOneShot or OneShotLam
We may omit the \. if n = 0.
And ⊥ stands for `AT [] botDiv`

Here is an example demonstrating the notation:
  \(C?)(X1)(C1).T
stands for
   AT [ (IsCheap,NoOneShotInfo)
      , (IsExpensive,OneShotLam)
      , (IsCheap,OneShotLam) ]
      topDiv

See the 'Outputable' instance for more information. It's pretty simple.

How can we use ArityType?  Example:
      f = \x\y. let v = <expensive> in
          \s(one-shot) \t(one-shot). blah
      'f' has arity type \(C?)(C?)(X1)(C1).T
      The one-shot-ness means we can, in effect, push that
      'let' inside the \st, and expand to arity 4

Suppose f = \xy. x+y
Then  f             :: \(C?)(C?).T
      f v           :: \(C?).T
      f <expensive> :: \(X?).T

Here is what the fields mean. If an arbitrary expression 'f' has
ArityType 'at', then

 * If @at = AT [o1,..,on] botDiv@ (notation: \o1..on.⊥), then @f x1..xn@
   definitely diverges. Partial applications to fewer than n args may *or
   may not* diverge.  Ditto exnDiv.

 * If `f` has ArityType `at` we can eta-expand `f` to have (aritTypeOneShots at)
   arguments without losing sharing. This function checks that the either
   there are no expensive expressions, or the lambdas are one-shots.

   NB 'f' is an arbitrary expression, eg @f = g e1 e2@.  This 'f' can have
   arity type @AT oss _@, with @length oss > 0@, only if e1 e2 are themselves
   cheap.

 * In both cases, @f@, @f x1@, ... @f x1 ... x(n-1)@ are definitely
   really functions, or bottom, but *not* casts from a data type, in
   at least one case branch.  (If it's a function in one case branch but
   an unsafe cast from a data type in another, the program is bogus.)
   So eta expansion is dynamically ok; see Note [State hack and
   bottoming functions], the part about catch#

Wrinkles

* Wrinkle [Bottoming functions]: see function 'arityLam'.
  We treat bottoming functions as one-shot, because there is no point
  in floating work outside the lambda, and it's fine to float it inside.

  For example, this is fine (see test stranal/sigs/BottomFromInnerLambda)
       let x = <expensive> in \y. error (g x y)
       ==> \y. let x = <expensive> in error (g x y)

  Idea: perhaps we could enforce this invariant with
     data Arity Type = TopAT [(Cost, OneShotInfo)] | DivAT [Cost]


Note [SafeArityType]
~~~~~~~~~~~~~~~~~~~~
The function safeArityType trims an ArityType to return a "safe" ArityType,
for which we use a type synonym SafeArityType.  It is "safe" in the sense
that (arityTypeArity at) really reflects the arity of the expression, whereas
a regular ArityType might have more lambdas in its [ATLamInfo] that the
(cost-free) arity of the expression.

For example
   \x.\y.let v = expensive in \z. blah
has
   arityType = AT [C?, C?, X?, C?] Top
But the expression actually has arity 2, not 4, because of the X.
So safeArityType will trim it to (AT [C?, C?] Top), whose [ATLamInfo]
now reflects the (cost-free) arity of the expression

Why do we ever need an "unsafe" ArityType, such as the example above?
Because its (cost-free) arity may increased by combineWithCallCards
in findRhsArity. See Note [Combining arity type with demand info].

Thus the function `arityType` returns a regular "unsafe" ArityType, that
goes deeply into the lambdas (including under IsExpensive). But that is
very local; most ArityTypes are indeed "safe".  We use the type synonym
SafeArityType to indicate where we believe the ArityType is safe.
-}

-- | The analysis lattice of arity analysis. It is isomorphic to
--
-- @
--    data ArityType'
--      = AEnd Divergence
--      | ALam OneShotInfo ArityType'
-- @
--
-- Which is easier to display the Hasse diagram for:
--
-- @
--  ALam OneShotLam at
--          |
--      AEnd topDiv
--          |
--  ALam NoOneShotInfo at
--          |
--      AEnd exnDiv
--          |
--      AEnd botDiv
-- @
--
-- where the @at@ fields of @ALam@ are inductively subject to the same order.
-- That is, @ALam os at1 < ALam os at2@ iff @at1 < at2@.
--
-- Why the strange Top element?
--   See Note [Combining case branches: optimistic one-shot-ness]
--
-- We rely on this lattice structure for fixed-point iteration in
-- 'findRhsArity'. For the semantics of 'ArityType', see Note [ArityType].
data ArityType  -- See Note [ArityType]
  = AT ![ATLamInfo] !Divergence
    -- ^ `AT oss div` is an abstraction of the expression, which describes
    -- its lambdas, and how much work appears where.
    -- See Note [ArityType] for more information
    --
    -- If `div` is dead-ending ('isDeadEndDiv'), then application to
    -- `length os` arguments will surely diverge, similar to the situation
    -- with 'DmdType'.
  deriving SafeArityType -> SafeArityType -> Bool
(SafeArityType -> SafeArityType -> Bool)
-> (SafeArityType -> SafeArityType -> Bool) -> Eq SafeArityType
forall a. (a -> a -> Bool) -> (a -> a -> Bool) -> Eq a
$c== :: SafeArityType -> SafeArityType -> Bool
== :: SafeArityType -> SafeArityType -> Bool
$c/= :: SafeArityType -> SafeArityType -> Bool
/= :: SafeArityType -> SafeArityType -> Bool
Eq

type ATLamInfo = (Cost,OneShotInfo)
     -- ^ Info about one lambda in an ArityType
     -- See Note [ArityType]

type SafeArityType = ArityType -- See Note [SafeArityType]

data Cost = IsCheap | IsExpensive
          deriving( Cost -> Cost -> Bool
(Cost -> Cost -> Bool) -> (Cost -> Cost -> Bool) -> Eq Cost
forall a. (a -> a -> Bool) -> (a -> a -> Bool) -> Eq a
$c== :: Cost -> Cost -> Bool
== :: Cost -> Cost -> Bool
$c/= :: Cost -> Cost -> Bool
/= :: Cost -> Cost -> Bool
Eq )

allCosts :: (a -> Cost) -> [a] -> Cost
allCosts :: forall a. (a -> Cost) -> [a] -> Cost
allCosts a -> Cost
f [a]
xs = (a -> Cost -> Cost) -> Cost -> [a] -> Cost
forall a b. (a -> b -> b) -> b -> [a] -> b
forall (t :: * -> *) a b.
Foldable t =>
(a -> b -> b) -> b -> t a -> b
foldr (Cost -> Cost -> Cost
addCost (Cost -> Cost -> Cost) -> (a -> Cost) -> a -> Cost -> Cost
forall b c a. (b -> c) -> (a -> b) -> a -> c
. a -> Cost
f) Cost
IsCheap [a]
xs

addCost :: Cost -> Cost -> Cost
addCost :: Cost -> Cost -> Cost
addCost Cost
IsCheap Cost
IsCheap = Cost
IsCheap
addCost Cost
_       Cost
_       = Cost
IsExpensive

-- | This is the BNF of the generated output:
--
-- @
-- @
--
-- We format
-- @AT [o1,..,on] topDiv@ as @\o1..on.T@ and
-- @AT [o1,..,on] botDiv@ as @\o1..on.⊥@, respectively.
-- More concretely, @AT [NOI,OS,OS] topDiv@ is formatted as @\?11.T@.
-- If the one-shot info is empty, we omit the leading @\.@.
instance Outputable ArityType where
  ppr :: SafeArityType -> SDoc
ppr (AT [ATLamInfo]
oss Divergence
div)
    | [ATLamInfo] -> Bool
forall a. [a] -> Bool
forall (t :: * -> *) a. Foldable t => t a -> Bool
null [ATLamInfo]
oss  = Divergence -> SDoc
forall {doc}. IsLine doc => Divergence -> doc
pp_div Divergence
div
    | Bool
otherwise = Char -> SDoc
forall doc. IsLine doc => Char -> doc
char Char
'\\' SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<> [SDoc] -> SDoc
forall doc. IsLine doc => [doc] -> doc
hcat ((ATLamInfo -> SDoc) -> [ATLamInfo] -> [SDoc]
forall a b. (a -> b) -> [a] -> [b]
map ATLamInfo -> SDoc
forall {doc}. IsLine doc => ATLamInfo -> doc
pp_os [ATLamInfo]
oss) SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<> SDoc
forall doc. IsLine doc => doc
dot SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<> Divergence -> SDoc
forall {doc}. IsLine doc => Divergence -> doc
pp_div Divergence
div
    where
      pp_div :: Divergence -> doc
pp_div Divergence
Diverges = Char -> doc
forall doc. IsLine doc => Char -> doc
char Char
'⊥'
      pp_div Divergence
ExnOrDiv = Char -> doc
forall doc. IsLine doc => Char -> doc
char Char
'x'
      pp_div Divergence
Dunno    = Char -> doc
forall doc. IsLine doc => Char -> doc
char Char
'T'
      pp_os :: ATLamInfo -> doc
pp_os (Cost
IsCheap,     OneShotInfo
OneShotLam)    = String -> doc
forall doc. IsLine doc => String -> doc
text String
"(C1)"
      pp_os (Cost
IsExpensive, OneShotInfo
OneShotLam)    = String -> doc
forall doc. IsLine doc => String -> doc
text String
"(X1)"
      pp_os (Cost
IsCheap,     OneShotInfo
NoOneShotInfo) = String -> doc
forall doc. IsLine doc => String -> doc
text String
"(C?)"
      pp_os (Cost
IsExpensive, OneShotInfo
NoOneShotInfo) = String -> doc
forall doc. IsLine doc => String -> doc
text String
"(X?)"

mkBotArityType :: [OneShotInfo] -> ArityType
mkBotArityType :: [OneShotInfo] -> SafeArityType
mkBotArityType [OneShotInfo]
oss = [ATLamInfo] -> Divergence -> SafeArityType
AT [(Cost
IsCheap,OneShotInfo
os) | OneShotInfo
os <- [OneShotInfo]
oss] Divergence
botDiv

botArityType :: ArityType
botArityType :: SafeArityType
botArityType = [OneShotInfo] -> SafeArityType
mkBotArityType []

topArityType :: ArityType
topArityType :: SafeArityType
topArityType = [ATLamInfo] -> Divergence -> SafeArityType
AT [] Divergence
topDiv

-- | The number of value args for the arity type
arityTypeArity :: SafeArityType -> Arity
arityTypeArity :: SafeArityType -> Int
arityTypeArity (AT [ATLamInfo]
lams Divergence
_) = [ATLamInfo] -> Int
forall a. [a] -> Int
forall (t :: * -> *) a. Foldable t => t a -> Int
length [ATLamInfo]
lams

arityTypeOneShots :: SafeArityType -> [OneShotInfo]
-- Returns a list only as long as the arity should be
arityTypeOneShots :: SafeArityType -> [OneShotInfo]
arityTypeOneShots (AT [ATLamInfo]
lams Divergence
_) = (ATLamInfo -> OneShotInfo) -> [ATLamInfo] -> [OneShotInfo]
forall a b. (a -> b) -> [a] -> [b]
map ATLamInfo -> OneShotInfo
forall a b. (a, b) -> b
snd [ATLamInfo]
lams

safeArityType :: ArityType -> SafeArityType
-- ^ Assuming this ArityType is all we know, find the arity of
-- the function, and trim the argument info (and Divergence)
-- to match that arity. See Note [SafeArityType]
safeArityType :: SafeArityType -> SafeArityType
safeArityType at :: SafeArityType
at@(AT [ATLamInfo]
lams Divergence
_)
  = case Int -> Cost -> [ATLamInfo] -> Maybe Int
go Int
0 Cost
IsCheap [ATLamInfo]
lams of
      Maybe Int
Nothing -> SafeArityType
at  -- No trimming needed
      Just Int
ar -> [ATLamInfo] -> Divergence -> SafeArityType
AT (Int -> [ATLamInfo] -> [ATLamInfo]
forall a. Int -> [a] -> [a]
take Int
ar [ATLamInfo]
lams) Divergence
topDiv
 where
   go :: Arity -> Cost -> [(Cost,OneShotInfo)] -> Maybe Arity
   go :: Int -> Cost -> [ATLamInfo] -> Maybe Int
go Int
_ Cost
_ [] = Maybe Int
forall a. Maybe a
Nothing
   go Int
ar Cost
ch1 ((Cost
ch2,OneShotInfo
os):[ATLamInfo]
lams)
      = case (Cost
ch1 Cost -> Cost -> Cost
`addCost` Cost
ch2, OneShotInfo
os) of
          (Cost
IsExpensive, OneShotInfo
NoOneShotInfo) -> Int -> Maybe Int
forall a. a -> Maybe a
Just Int
ar
          (Cost
ch,          OneShotInfo
_)             -> Int -> Cost -> [ATLamInfo] -> Maybe Int
go (Int
arInt -> Int -> Int
forall a. Num a => a -> a -> a
+Int
1) Cost
ch [ATLamInfo]
lams

infixl 2 `trimArityType`

trimArityType :: Arity -> ArityType -> ArityType
-- ^ Trim an arity type so that it has at most the given arity.
-- Any excess 'OneShotInfo's are truncated to 'topDiv', even if
-- they end in 'ABot'.  See Note [Arity trimming]
trimArityType :: Int -> SafeArityType -> SafeArityType
trimArityType Int
max_arity at :: SafeArityType
at@(AT [ATLamInfo]
lams Divergence
_)
  | [ATLamInfo]
lams [ATLamInfo] -> Int -> Bool
forall a. [a] -> Int -> Bool
`lengthAtMost` Int
max_arity = SafeArityType
at
  | Bool
otherwise                     = [ATLamInfo] -> Divergence -> SafeArityType
AT (Int -> [ATLamInfo] -> [ATLamInfo]
forall a. Int -> [a] -> [a]
take Int
max_arity [ATLamInfo]
lams) Divergence
topDiv

data ArityOpts = ArityOpts
  { ArityOpts -> Bool
ao_ped_bot :: !Bool -- See Note [Dealing with bottom]
  , ArityOpts -> Bool
ao_dicts_cheap :: !Bool -- See Note [Eta expanding through dictionaries]
  }

-- | The Arity returned is the number of value args the
-- expression can be applied to without doing much work
exprEtaExpandArity :: HasDebugCallStack => ArityOpts -> CoreExpr -> Maybe SafeArityType
-- exprEtaExpandArity is used when eta expanding
--      e  ==>  \xy -> e x y
-- Nothing if the expression has arity 0
exprEtaExpandArity :: HasDebugCallStack => ArityOpts -> CoreExpr -> Maybe SafeArityType
exprEtaExpandArity ArityOpts
opts CoreExpr
e
  | AT [] Divergence
_ <- SafeArityType
arity_type
  = Maybe SafeArityType
forall a. Maybe a
Nothing
  | Bool
otherwise
  = SafeArityType -> Maybe SafeArityType
forall a. a -> Maybe a
Just SafeArityType
arity_type
  where
    arity_type :: SafeArityType
arity_type = SafeArityType -> SafeArityType
safeArityType (HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType (ArityOpts -> Bool -> ArityEnv
findRhsArityEnv ArityOpts
opts Bool
False) CoreExpr
e)


{- *********************************************************************
*                                                                      *
                   findRhsArity
*                                                                      *
********************************************************************* -}

findRhsArity :: ArityOpts -> RecFlag -> Id -> CoreExpr
             -> (Bool, SafeArityType)
-- This implements the fixpoint loop for arity analysis
-- See Note [Arity analysis]
--
-- The Bool is True if the returned arity is greater than (exprArity rhs)
--     so the caller should do eta-expansion
-- That Bool is never True for join points, which are never eta-expanded
--
-- Returns an SafeArityType that is guaranteed trimmed to typeArity of 'bndr'
--         See Note [Arity trimming]

findRhsArity :: ArityOpts -> RecFlag -> TyVar -> CoreExpr -> (Bool, SafeArityType)
findRhsArity ArityOpts
opts RecFlag
is_rec TyVar
bndr CoreExpr
rhs
  | TyVar -> Bool
isJoinId TyVar
bndr
  = (Bool
False, SafeArityType
join_arity_type)
    -- False: see Note [Do not eta-expand join points]
    -- But do return the correct arity and bottom-ness, because
    -- these are used to set the bndr's IdInfo (#15517)
    -- Note [Invariants on join points] invariant 2b, in GHC.Core

  | Bool
otherwise
  = (Bool
arity_increased, SafeArityType
non_join_arity_type)
    -- arity_increased: eta-expand if we'll get more lambdas
    -- to the top of the RHS
  where
    old_arity :: Int
old_arity = CoreExpr -> Int
exprArity CoreExpr
rhs

    init_env :: ArityEnv
    init_env :: ArityEnv
init_env = ArityOpts -> Bool -> ArityEnv
findRhsArityEnv ArityOpts
opts (TyVar -> Bool
isJoinId TyVar
bndr)

    -- Non-join-points only
    non_join_arity_type :: SafeArityType
non_join_arity_type = case RecFlag
is_rec of
                             RecFlag
Recursive    -> Int -> SafeArityType -> SafeArityType
go Int
0 SafeArityType
botArityType
                             RecFlag
NonRecursive -> ArityEnv -> SafeArityType
step ArityEnv
init_env
    arity_increased :: Bool
arity_increased = SafeArityType -> Int
arityTypeArity SafeArityType
non_join_arity_type Int -> Int -> Bool
forall a. Ord a => a -> a -> Bool
> Int
old_arity

    -- Join-points only
    -- See Note [Arity for non-recursive join bindings]
    -- and Note [Arity for recursive join bindings]
    join_arity_type :: SafeArityType
join_arity_type = case RecFlag
is_rec of
                         RecFlag
Recursive    -> Int -> SafeArityType -> SafeArityType
go Int
0 SafeArityType
botArityType
                         RecFlag
NonRecursive -> Int -> SafeArityType -> SafeArityType
trimArityType Int
ty_arity (HasDebugCallStack => CoreExpr -> SafeArityType
CoreExpr -> SafeArityType
cheapArityType CoreExpr
rhs)

    ty_arity :: Int
ty_arity     = Type -> Int
typeArity (TyVar -> Type
idType TyVar
bndr)
    use_call_cards :: [Card]
use_call_cards = TyVar -> [Card]
useSiteCallCards TyVar
bndr

    step :: ArityEnv -> SafeArityType
    step :: ArityEnv -> SafeArityType
step ArityEnv
env = Int -> SafeArityType -> SafeArityType
trimArityType Int
ty_arity (SafeArityType -> SafeArityType) -> SafeArityType -> SafeArityType
forall a b. (a -> b) -> a -> b
$
               SafeArityType -> SafeArityType
safeArityType (SafeArityType -> SafeArityType) -> SafeArityType -> SafeArityType
forall a b. (a -> b) -> a -> b
$ -- See Note [Arity invariants for bindings], item (3)
               ArityEnv -> SafeArityType -> [Card] -> SafeArityType
combineWithCallCards ArityEnv
env (HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env CoreExpr
rhs) [Card]
use_call_cards
       -- trimArityType: see Note [Trim arity inside the loop]
       -- combineWithCallCards: take account of the demand on the
       -- binder.  Perhaps it is always called with 2 args
       --   let f = \x. blah in (f 3 4, f 1 9)
       -- f's demand-info says how many args it is called with

    -- The fixpoint iteration (go), done for recursive bindings. We
    -- always do one step, but usually that produces a result equal
    -- to old_arity, and then we stop right away, because old_arity
    -- is assumed to be sound. In other words, arities should never
    -- decrease.  Result: the common case is that there is just one
    -- iteration
    go :: Int -> SafeArityType -> SafeArityType
    go :: Int -> SafeArityType -> SafeArityType
go !Int
n cur_at :: SafeArityType
cur_at@(AT [ATLamInfo]
lams Divergence
div)
      | Bool -> Bool
not (Divergence -> Bool
isDeadEndDiv Divergence
div)           -- the "stop right away" case
      , [ATLamInfo] -> Int
forall a. [a] -> Int
forall (t :: * -> *) a. Foldable t => t a -> Int
length [ATLamInfo]
lams Int -> Int -> Bool
forall a. Ord a => a -> a -> Bool
<= Int
old_arity = SafeArityType
cur_at -- from above
      | SafeArityType
next_at SafeArityType -> SafeArityType -> Bool
forall a. Eq a => a -> a -> Bool
== SafeArityType
cur_at        = SafeArityType
cur_at
      | Bool
otherwise
         -- Warn if more than 2 iterations. Why 2? See Note [Exciting arity]
      = Bool -> String -> SDoc -> SafeArityType -> SafeArityType
forall a. HasCallStack => Bool -> String -> SDoc -> a -> a
warnPprTrace (Bool
debugIsOn Bool -> Bool -> Bool
&& Int
n Int -> Int -> Bool
forall a. Ord a => a -> a -> Bool
> Int
2)
            String
"Exciting arity"
            (Int -> SDoc -> SDoc
nest Int
2 (TyVar -> SDoc
forall a. Outputable a => a -> SDoc
ppr TyVar
bndr SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> SafeArityType -> SDoc
forall a. Outputable a => a -> SDoc
ppr SafeArityType
cur_at SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> SafeArityType -> SDoc
forall a. Outputable a => a -> SDoc
ppr SafeArityType
next_at SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$ CoreExpr -> SDoc
forall a. Outputable a => a -> SDoc
ppr CoreExpr
rhs)) (SafeArityType -> SafeArityType) -> SafeArityType -> SafeArityType
forall a b. (a -> b) -> a -> b
$
        Int -> SafeArityType -> SafeArityType
go (Int
nInt -> Int -> Int
forall a. Num a => a -> a -> a
+Int
1) SafeArityType
next_at
      where
        next_at :: SafeArityType
next_at = ArityEnv -> SafeArityType
step (ArityEnv -> TyVar -> SafeArityType -> ArityEnv
extendSigEnv ArityEnv
init_env TyVar
bndr SafeArityType
cur_at)

combineWithCallCards :: ArityEnv -> ArityType -> [Card] -> ArityType
-- See Note [Combining arity type with demand info]
combineWithCallCards :: ArityEnv -> SafeArityType -> [Card] -> SafeArityType
combineWithCallCards ArityEnv
env at :: SafeArityType
at@(AT [ATLamInfo]
lams Divergence
div) [Card]
cards
  | [ATLamInfo] -> Bool
forall a. [a] -> Bool
forall (t :: * -> *) a. Foldable t => t a -> Bool
null [ATLamInfo]
lams = SafeArityType
at
  | Bool
otherwise = [ATLamInfo] -> Divergence -> SafeArityType
AT ([ATLamInfo] -> [OneShotInfo] -> [ATLamInfo]
zip_lams [ATLamInfo]
lams [OneShotInfo]
oss) Divergence
div
  where
    oss :: [OneShotInfo]
oss = (Card -> OneShotInfo) -> [Card] -> [OneShotInfo]
forall a b. (a -> b) -> [a] -> [b]
map Card -> OneShotInfo
card_to_oneshot [Card]
cards
    card_to_oneshot :: Card -> OneShotInfo
card_to_oneshot Card
n
      | Card -> Bool
isAtMostOnce Card
n, Bool -> Bool
not (ArityEnv -> Bool
pedanticBottoms ArityEnv
env)
         -- Take care for -fpedantic-bottoms;
         -- see Note [Combining arity type with demand info], Wrinkle (CAD1)
      = OneShotInfo
OneShotLam
      | Card
n Card -> Card -> Bool
forall a. Eq a => a -> a -> Bool
== Card
C_11
         -- Safe to eta-expand even in the presence of -fpedantic-bottoms
         -- see Note [Combining arity type with demand info], Wrinkle (CAD1)
      = OneShotInfo
OneShotLam
      | Bool
otherwise
      = OneShotInfo
NoOneShotInfo
    zip_lams :: [ATLamInfo] -> [OneShotInfo] -> [ATLamInfo]
    zip_lams :: [ATLamInfo] -> [OneShotInfo] -> [ATLamInfo]
zip_lams [ATLamInfo]
lams []  = [ATLamInfo]
lams
    zip_lams []   [OneShotInfo]
oss | Divergence -> Bool
isDeadEndDiv Divergence
div = []
                      | Bool
otherwise        = [ (Cost
IsExpensive,OneShotInfo
OneShotLam)
                                           | OneShotInfo
_ <- (OneShotInfo -> Bool) -> [OneShotInfo] -> [OneShotInfo]
forall a. (a -> Bool) -> [a] -> [a]
takeWhile OneShotInfo -> Bool
isOneShotInfo [OneShotInfo]
oss]
    zip_lams ((Cost
ch,OneShotInfo
os1):[ATLamInfo]
lams) (OneShotInfo
os2:[OneShotInfo]
oss)
      = (Cost
ch, OneShotInfo
os1 OneShotInfo -> OneShotInfo -> OneShotInfo
`bestOneShot` OneShotInfo
os2) ATLamInfo -> [ATLamInfo] -> [ATLamInfo]
forall a. a -> [a] -> [a]
: [ATLamInfo] -> [OneShotInfo] -> [ATLamInfo]
zip_lams [ATLamInfo]
lams [OneShotInfo]
oss

useSiteCallCards :: Id -> [Card]
useSiteCallCards :: TyVar -> [Card]
useSiteCallCards TyVar
bndr
  = [Card]
call_arity_one_shots [Card] -> [Card] -> [Card]
`zip_cards` [Card]
dmd_one_shots
  where
    call_arity_one_shots :: [Card]
    call_arity_one_shots :: [Card]
call_arity_one_shots
      | Int
call_arity Int -> Int -> Bool
forall a. Eq a => a -> a -> Bool
== Int
0 = []
      | Bool
otherwise       = Card
C_0N Card -> [Card] -> [Card]
forall a. a -> [a] -> [a]
: Int -> Card -> [Card]
forall a. Int -> a -> [a]
replicate (Int
call_arityInt -> Int -> Int
forall a. Num a => a -> a -> a
-Int
1) Card
C_01
    -- Call Arity analysis says /however often the function is called/, it is
    -- always applied to this many arguments.
    -- The first C_0N is because of the "however often it is called" part.
    -- Thus if Call Arity says "always applied to 3 args" then the one-shot info
    -- we get is [C_0N, C_01, C_01]
    call_arity :: Int
call_arity = TyVar -> Int
idCallArity TyVar
bndr

    dmd_one_shots :: [Card]
    -- If the demand info is C(x,C(1,C(1,.))) then we know that an
    -- application to one arg is also an application to three
    dmd_one_shots :: [Card]
dmd_one_shots = case TyVar -> Demand
idDemandInfo TyVar
bndr of
      Demand
AbsDmd  -> [] -- There is no use in eta expanding
      Demand
BotDmd  -> [] -- when the binding could be dropped instead
      Card
_ :* SubDemand
sd -> SubDemand -> [Card]
callCards SubDemand
sd

    -- Take the *longer* list
    zip_cards :: [Card] -> [Card] -> [Card]
zip_cards (Card
n1:[Card]
ns1) (Card
n2:[Card]
ns2) = (Card
n1 Card -> Card -> Card
`glbCard` Card
n2) Card -> [Card] -> [Card]
forall a. a -> [a] -> [a]
: [Card] -> [Card] -> [Card]
zip_cards [Card]
ns1 [Card]
ns2
    zip_cards []       [Card]
ns2      = [Card]
ns2
    zip_cards [Card]
ns1      []       = [Card]
ns1

{- Note [Arity analysis]
~~~~~~~~~~~~~~~~~~~~~~~~
The motivating example for arity analysis is this:

  f = \x. let g = f (x+1)
          in \y. ...g...

What arity does f have?  Really it should have arity 2, but a naive
look at the RHS won't see that.  You need a fixpoint analysis which
says it has arity "infinity" the first time round.

This example happens a lot; it first showed up in Andy Gill's thesis,
fifteen years ago!  It also shows up in the code for 'rnf' on lists
in #4138.

We do the necessary, quite simple fixed-point iteration in 'findRhsArity',
which assumes for a single binding 'ABot' on the first run and iterates
until it finds a stable arity type. Two wrinkles

* We often have to ask (see the Case or Let case of 'arityType') whether some
  expression is cheap. In the case of an application, that depends on the arity
  of the application head! That's why we have our own version of 'exprIsCheap',
  'myExprIsCheap', that will integrate the optimistic arity types we have on
  f and g into the cheapness check.

* Consider this (#18793)

    go = \ds. case ds of
           []     -> id
           (x:ys) -> let acc = go ys in
                     case blah of
                       True  -> acc
                       False -> \ x1 -> acc (negate x1)

  We must propagate go's optimistically large arity to @acc@, so that the
  tail call to @acc@ in the True branch has sufficient arity.  This is done
  by the 'am_sigs' field in 'FindRhsArity', and 'lookupSigEnv' in the Var case
  of 'arityType'.

Note [Exciting arity]
~~~~~~~~~~~~~~~~~~~~~
The fixed-point iteration in 'findRhsArity' stabilises very quickly in almost
all cases. To get notified of cases where we need an usual number of iterations,
we emit a warning in debug mode, so that we can investigate and make sure that
we really can't do better. It's a gross hack, but catches real bugs (#18870).

Now, which number is "unusual"? We pick n > 2. Here's a pretty common and
expected example that takes two iterations and would ruin the specificity
of the warning (from T18937):

  f :: [Int] -> Int -> Int
  f []     = id
  f (x:xs) = let y = sum [0..x]
             in \z -> f xs (y + z)

Fixed-point iteration starts with arity type ⊥ for f. After the first
iteration, we get arity type \??.T, e.g. arity 2, because we unconditionally
'floatIn' the let-binding (see its bottom case).  After the second iteration,
we get arity type \?.T, e.g. arity 1, because now we are no longer allowed
to floatIn the non-cheap let-binding.  Which is all perfectly benign, but
means we do two iterations (well, actually 3 'step's to detect we are stable)
and don't want to emit the warning.

Note [Trim arity inside the loop]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's an example (from gadt/nbe.hs) which caused trouble.
  data Exp g t where
     Lam :: Ty a -> Exp (g,a) b -> Exp g (a->b)

  eval :: Exp g t -> g -> t
  eval (Lam _ e) g = \a -> eval e (g,a)

The danger is that we get arity 3 from analysing this; and the
next time arity 4, and so on for ever.  Solution: use trimArityType
on each iteration.

Note [Combining arity type with demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
   let f = \x. let y = <expensive> in \p \q{os}. blah
   in ...(f a b)...(f c d)...

* From the RHS we get an ArityType like
    AT [ (IsCheap,?), (IsExpensive,?), (IsCheap,OneShotLam) ] Dunno
  where "?" means NoOneShotInfo

* From the body, the demand analyser (or Call Arity) will tell us
  that the function is always applied to at least two arguments.

Combining these two pieces of info, we can get the final ArityType
    AT [ (IsCheap,?), (IsExpensive,OneShotLam), (IsCheap,OneShotLam) ] Dunno
result: arity=3, which is better than we could do from either
source alone.

The "combining" part is done by combineWithCallCards.  It
uses info from both Call Arity and demand analysis.

We may have /more/ call demands from the calls than we have lambdas
in the binding.  E.g.
    let f1 = \x. g x x in ...(f1 p q r)...
    -- Demand on f1 is C(x,C(1,C(1,L)))

    let f2 = \y. error y in ...(f2 p q r)...
    -- Demand on f2 is C(x,C(1,C(1,L)))

In both these cases we can eta expand f1 and f2 to arity 3.
But /only/ for called-once demands.  Suppose we had
    let f1 = \y. g x x in ...let h = f1 p q in ...(h r1)...(h r2)...

Now we don't want to eta-expand f1 to have 3 args; only two.
Nor, in the case of f2, do we want to push that error call under
a lambda.  Hence the takeWhile in combineWithDemandDoneShots.

Wrinkles:

(CAD1) #24296 exposed a subtle interaction with -fpedantic-bottoms
  (See Note [Dealing with bottom]). Consider

    let f = \x y. error "blah" in
    f 2 1 `seq` Just (f 3 2 1)
      -- Demand on f is C(x,C(1,C(M,L)))

  Usually, it is OK to consider a lambda that is called *at most* once (so call
  cardinality C_01, abbreviated M) a one-shot lambda and eta-expand over it.
  But with -fpedantic-bottoms that is no longer true: If we were to eta-expand
  f to arity 3, we'd discard the error raised when evaluating `f 2 1`.
  Hence in the presence of -fpedantic-bottoms, we must have C_11 for
  eta-expansion.

Note [Do not eta-expand join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Similarly to CPR (see Note [Don't w/w join points for CPR] in
GHC.Core.Opt.WorkWrap), a join point stands well to gain from its outer binding's
eta-expansion, and eta-expanding a join point is fraught with issues like how to
deal with a cast:

    let join $j1 :: IO ()
             $j1 = ...
             $j2 :: Int -> IO ()
             $j2 n = if n > 0 then $j1
                              else ...

    =>

    let join $j1 :: IO ()
             $j1 = (\eta -> ...)
                     `cast` N:IO :: State# RealWorld -> (# State# RealWorld, ())
                                 ~  IO ()
             $j2 :: Int -> IO ()
             $j2 n = (\eta -> if n > 0 then $j1
                                       else ...)
                     `cast` N:IO :: State# RealWorld -> (# State# RealWorld, ())
                                 ~  IO ()

The cast here can't be pushed inside the lambda (since it's not casting to a
function type), so the lambda has to stay, but it can't because it contains a
reference to a join point. In fact, $j2 can't be eta-expanded at all. Rather
than try and detect this situation (and whatever other situations crop up!), we
don't bother; again, any surrounding eta-expansion will improve these join
points anyway, since an outer cast can *always* be pushed inside. By the time
CorePrep comes around, the code is very likely to look more like this:

    let join $j1 :: State# RealWorld -> (# State# RealWorld, ())
             $j1 = (...) eta
             $j2 :: Int -> State# RealWorld -> (# State# RealWorld, ())
             $j2 = if n > 0 then $j1
                            else (...) eta

Note [Arity for recursive join bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
  f x = joinrec j 0 = \ a b c -> (a,x,b)
                j n = j (n-1)
        in j 20

Obviously `f` should get arity 4.  But it's a bit tricky:

1. Remember, we don't eta-expand join points; see
   Note [Do not eta-expand join points].

2. But even though we aren't going to eta-expand it, we still want `j` to get
   idArity=4, via the findRhsArity fixpoint.  Then when we are doing findRhsArity
   for `f`, we'll call arityType on f's RHS:
    - At the letrec-binding for `j` we'll whiz up an arity-4 ArityType
      for `j` (See Note [arityType for non-recursive let-bindings]
      in GHC.Core.Opt.Arity)b
    - At the occurrence (j 20) that arity-4 ArityType will leave an arity-3
      result.

3. All this, even though j's /join-arity/ (stored in the JoinId) is 1.
   This is is the Main Reason that we want the idArity to sometimes be
   larger than the join-arity c.f. Note [Invariants on join points] item 2b
   in GHC.Core.

4. Be very careful of things like this (#21755):
     g x = let j 0 = \y -> (x,y)
               j n = expensive n `seq` j (n-1)
           in j x
   Here we do /not/ want eta-expand `g`, lest we duplicate all those
   (expensive n) calls.

   But it's fine: the findRhsArity fixpoint calculation will compute arity-1
   for `j` (not arity 2); and that's just what we want. But we do need that
   fixpoint.

   Historical note: an earlier version of GHC did a hack in which we gave
   join points an ArityType of ABot, but that did not work with this #21755
   case.

5. arityType does not usually expect to encounter free join points;
   see GHC.Core.Opt.Arity Note [No free join points in arityType].
   But consider
          f x = join    j1 y = .... in
                joinrec j2 z = ...j1 y... in
                j2 v

   When doing findRhsArity on `j2` we'll encounter the free `j1`.
   But that is fine, because we aren't going to eta-expand `j2`;
   we just want to know its arity.  So we have a flag am_no_eta,
   switched on when doing findRhsArity on a join point RHS. If
   the flag is on, we allow free join points, but not otherwise.


Note [Arity for non-recursive join bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note [Arity for recursive join bindings] deals with recursive join
bindings. But what about /non-recursive/ones?  If we just call
findRhsArity, it will call arityType.  And that can be expensive when
we have deeply nested join points:
  join j1 x1 = join j2 x2 = join j3 x3 = blah3
                            in blah2
               in blah1
(e.g. test T18698b).

So we call cheapArityType instead.  It's good enough for practical
purposes.

(Side note: maybe we should use cheapArity for the RHS of let bindings
in the main arityType function.)
-}


{- *********************************************************************
*                                                                      *
                   arityType
*                                                                      *
********************************************************************* -}

arityLam :: Id -> ArityType -> ArityType
arityLam :: TyVar -> SafeArityType -> SafeArityType
arityLam TyVar
id (AT [ATLamInfo]
oss Divergence
div)
  = [ATLamInfo] -> Divergence -> SafeArityType
AT ((Cost
IsCheap, OneShotInfo
one_shot) ATLamInfo -> [ATLamInfo] -> [ATLamInfo]
forall a. a -> [a] -> [a]
: [ATLamInfo]
oss) Divergence
div
  where
    one_shot :: OneShotInfo
one_shot | Divergence -> Bool
isDeadEndDiv Divergence
div = OneShotInfo
OneShotLam
             | Bool
otherwise        = TyVar -> OneShotInfo
idStateHackOneShotInfo TyVar
id
    -- If the body diverges, treat it as one-shot: no point
    -- in floating out, and no penalty for floating in
    -- See Wrinkle [Bottoming functions] in Note [ArityType]

floatIn :: Cost -> ArityType -> ArityType
-- We have something like (let x = E in b),
-- where b has the given arity type.
-- NB: be as lazy as possible in the Cost-of-E argument;
--     we can often get away without ever looking at it
--     See Note [Care with nested expressions]
floatIn :: Cost -> SafeArityType -> SafeArityType
floatIn Cost
ch at :: SafeArityType
at@(AT [ATLamInfo]
lams Divergence
div)
  = case [ATLamInfo]
lams of
      []                 -> SafeArityType
at
      (Cost
IsExpensive,OneShotInfo
_):[ATLamInfo]
_  -> SafeArityType
at
      (Cost
_,OneShotInfo
os):[ATLamInfo]
lams        -> [ATLamInfo] -> Divergence -> SafeArityType
AT ((Cost
ch,OneShotInfo
os)ATLamInfo -> [ATLamInfo] -> [ATLamInfo]
forall a. a -> [a] -> [a]
:[ATLamInfo]
lams) Divergence
div

addWork :: ArityType -> ArityType
-- Add work to the outermost level of the arity type
addWork :: SafeArityType -> SafeArityType
addWork at :: SafeArityType
at@(AT [ATLamInfo]
lams Divergence
div)
  = case [ATLamInfo]
lams of
      []      -> SafeArityType
at
      ATLamInfo
lam:[ATLamInfo]
lams' -> [ATLamInfo] -> Divergence -> SafeArityType
AT (ATLamInfo -> ATLamInfo
add_work ATLamInfo
lam ATLamInfo -> [ATLamInfo] -> [ATLamInfo]
forall a. a -> [a] -> [a]
: [ATLamInfo]
lams') Divergence
div

add_work :: ATLamInfo -> ATLamInfo
add_work :: ATLamInfo -> ATLamInfo
add_work (Cost
_,OneShotInfo
os) = (Cost
IsExpensive,OneShotInfo
os)

arityApp :: ArityType -> Cost -> ArityType
-- Processing (fun arg) where at is the ArityType of fun,
-- Knock off an argument and behave like 'let'
arityApp :: SafeArityType -> Cost -> SafeArityType
arityApp (AT ((Cost
ch1,OneShotInfo
_):[ATLamInfo]
oss) Divergence
div) Cost
ch2 = Cost -> SafeArityType -> SafeArityType
floatIn (Cost
ch1 Cost -> Cost -> Cost
`addCost` Cost
ch2) ([ATLamInfo] -> Divergence -> SafeArityType
AT [ATLamInfo]
oss Divergence
div)
arityApp SafeArityType
at                     Cost
_   = SafeArityType
at

-- | Least upper bound in the 'ArityType' lattice.
-- See the haddocks on 'ArityType' for the lattice.
--
-- Used for branches of a @case@.
andArityType :: ArityEnv -> ArityType -> ArityType -> ArityType
andArityType :: ArityEnv -> SafeArityType -> SafeArityType -> SafeArityType
andArityType ArityEnv
env (AT (ATLamInfo
lam1:[ATLamInfo]
lams1) Divergence
div1) (AT (ATLamInfo
lam2:[ATLamInfo]
lams2) Divergence
div2)
  | AT [ATLamInfo]
lams' Divergence
div' <- ArityEnv -> SafeArityType -> SafeArityType -> SafeArityType
andArityType ArityEnv
env ([ATLamInfo] -> Divergence -> SafeArityType
AT [ATLamInfo]
lams1 Divergence
div1) ([ATLamInfo] -> Divergence -> SafeArityType
AT [ATLamInfo]
lams2 Divergence
div2)
  = [ATLamInfo] -> Divergence -> SafeArityType
AT ((ATLamInfo
lam1 ATLamInfo -> ATLamInfo -> ATLamInfo
`and_lam` ATLamInfo
lam2) ATLamInfo -> [ATLamInfo] -> [ATLamInfo]
forall a. a -> [a] -> [a]
: [ATLamInfo]
lams') Divergence
div'
  where
    (Cost
ch1,OneShotInfo
os1) and_lam :: ATLamInfo -> ATLamInfo -> ATLamInfo
`and_lam` (Cost
ch2,OneShotInfo
os2)
      = ( Cost
ch1 Cost -> Cost -> Cost
`addCost` Cost
ch2, OneShotInfo
os1 OneShotInfo -> OneShotInfo -> OneShotInfo
`bestOneShot` OneShotInfo
os2)
        -- bestOneShot: see Note [Combining case branches: optimistic one-shot-ness]

andArityType ArityEnv
env (AT [] Divergence
div1) SafeArityType
at2 = ArityEnv -> Divergence -> SafeArityType -> SafeArityType
andWithTail ArityEnv
env Divergence
div1 SafeArityType
at2
andArityType ArityEnv
env SafeArityType
at1 (AT [] Divergence
div2) = ArityEnv -> Divergence -> SafeArityType -> SafeArityType
andWithTail ArityEnv
env Divergence
div2 SafeArityType
at1

andWithTail :: ArityEnv -> Divergence -> ArityType -> ArityType
andWithTail :: ArityEnv -> Divergence -> SafeArityType -> SafeArityType
andWithTail ArityEnv
env Divergence
div1 at2 :: SafeArityType
at2@(AT [ATLamInfo]
lams2 Divergence
_)
  | Divergence -> Bool
isDeadEndDiv Divergence
div1    -- case x of { T -> error; F -> \y.e }
  = SafeArityType
at2                  -- See Note
  | ArityEnv -> Bool
pedanticBottoms ArityEnv
env  --    [Combining case branches: andWithTail]
  = [ATLamInfo] -> Divergence -> SafeArityType
AT [] Divergence
topDiv

  | Bool
otherwise  -- case x of { T -> plusInt <expensive>; F -> \y.e }
  = [ATLamInfo] -> Divergence -> SafeArityType
AT ((ATLamInfo -> ATLamInfo) -> [ATLamInfo] -> [ATLamInfo]
forall a b. (a -> b) -> [a] -> [b]
map ATLamInfo -> ATLamInfo
add_work [ATLamInfo]
lams2) Divergence
topDiv    -- We know div1 = topDiv
    -- See Note [Combining case branches: andWithTail]

{- Note [Combining case branches: optimistic one-shot-ness]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When combining the ArityTypes for two case branches (with
andArityType) and both ArityTypes have ATLamInfo, then we just combine
their expensive-ness and one-shot info.  The tricky point is when we
have

     case x of True -> \x{one-shot). blah1
               Fale -> \y.           blah2

Since one-shot-ness is about the /consumer/ not the /producer/, we
optimistically assume that if either branch is one-shot, we combine
the best of the two branches, on the (slightly dodgy) basis that if we
know one branch is one-shot, then they all must be.  Surprisingly,
this means that the one-shot arity type is effectively the top element
of the lattice.

Hence the call to `bestOneShot` in `andArityType`.

Here's an example:
  go = \x. let z = go e0
               go2 = \x. case x of
                           True  -> z
                           False -> \s(one-shot). e1
           in go2 x

We *really* want to respect the one-shot annotation provided by the
user and eta-expand go and go2.  In the first fixpoint iteration of
'go' we'll bind 'go' to botArityType (written \.⊥, see Note
[ArityType]).  So 'z' will get arityType \.⊥; so we end up combining
the True and False branches:

      \.⊥ `andArityType` \1.T

That gives \1.T (see Note [Combining case branches: andWithTail],
first bullet).  So 'go2' gets an arityType of \(C?)(C1).T, which is
what we want.

Note [Care with nested expressions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
    arityType (Just <big-expressions>)
We will take
    arityType Just = AT [(IsCheap,os)] topDiv
and then do
    arityApp (AT [(IsCheap os)] topDiv) (exprCost <big-expression>)
The result will be AT [] topDiv.  It doesn't matter what <big-expresison>
is!  The same is true of
    arityType (let x = <rhs> in <body>)
where the cost of <rhs> doesn't matter unless <body> has a useful
arityType.

TL;DR in `floatIn`, do not to look at the Cost argument until you have to.

I found this when looking at #24471, although I don't think it was really
the main culprit.

Note [Combining case branches: andWithTail]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When combining the ArityTypes for two case branches (with andArityType)
and one side or the other has run out of ATLamInfo; then we get
into `andWithTail`.

* If one branch is guaranteed bottom (isDeadEndDiv), we just take
  the other. Consider   case x of
             True  -> \x.  error "urk"
             False -> \xy. error "urk2"

  Remember: \o1..on.⊥ means "if you apply to n args, it'll definitely
  diverge".  So we need \??.⊥ for the whole thing, the /max/ of both
  arities.

* Otherwise, if pedantic-bottoms is on, we just have to return
  AT [] topDiv.  E.g. if we have
    f x z = case x of True  -> \y. blah
                      False -> z
  then we can't eta-expand, because that would change the behaviour
  of (f False bottom().

* But if pedantic-bottoms is not on, we allow ourselves to push
  `z` under a lambda (much as we allow ourselves to put the `case x`
  under a lambda).  However we know nothing about the expensiveness
  or one-shot-ness of `z`, so we'd better assume it looks like
  (Expensive, NoOneShotInfo) all the way. Remembering
  Note [Combining case branches: optimistic one-shot-ness],
  we just add work to ever ATLamInfo, keeping the one-shot-ness.

Note [Eta expanding through CallStacks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Just as it's good to eta-expand through dictionaries, so it is good to
do so through CallStacks.  #20103 is a case in point, where we got
  foo :: HasCallStack => Int -> Int
  foo = \(d::CallStack). let d2 = pushCallStack blah d in
        \(x:Int). blah

We really want to eta-expand this!  #20103 is quite convincing!
We do this regardless of -fdicts-cheap; it's not really a dictionary.

We also want to check both for (IP blah CallStack) and for CallStack itself.
We might have either
   d :: IP blah CallStack    -- Or HasCallStack
   d = (cs-expr :: CallStack) |> (nt-ax :: CallStack ~ IP blah CallStack)
or just
   cs :: CallStack
   cs = cs-expr

Test T20103 is an example of the latter.

Note [Eta expanding through dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the experimental -fdicts-cheap flag is on, we eta-expand through
dictionary bindings.  This improves arities. Thereby, it also
means that full laziness is less prone to floating out the
application of a function to its dictionary arguments, which
can thereby lose opportunities for fusion.  Example:
        foo :: Ord a => a -> ...
     foo = /\a \(d:Ord a). let d' = ...d... in \(x:a). ....
        -- So foo has arity 1

     f = \x. foo dInt $ bar x

The (foo DInt) is floated out, and makes ineffective a RULE
     foo (bar x) = ...

One could go further and make exprIsCheap reply True to any
dictionary-typed expression, but that's more work.
-}

---------------------------

data ArityEnv
  = AE { ArityEnv -> ArityOpts
am_opts :: !ArityOpts

       , ArityEnv -> IdEnv SafeArityType
am_sigs :: !(IdEnv SafeArityType)
         -- NB `SafeArityType` so we can use this in myIsCheapApp
         -- See Note [Arity analysis] for details about fixed-point iteration.

       , ArityEnv -> Bool
am_free_joins :: !Bool  -- True <=> free join points allowed
         -- Used /only/ to support assertion checks
       }

instance Outputable ArityEnv where
  ppr :: ArityEnv -> SDoc
ppr (AE { am_sigs :: ArityEnv -> IdEnv SafeArityType
am_sigs = IdEnv SafeArityType
sigs, am_free_joins :: ArityEnv -> Bool
am_free_joins = Bool
free_joins })
    = String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"AE" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc
braces ([SDoc] -> SDoc
forall doc. IsLine doc => [doc] -> doc
sep [ String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"free joins:" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> Bool -> SDoc
forall a. Outputable a => a -> SDoc
ppr Bool
free_joins
                                , String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"sigs:" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> IdEnv SafeArityType -> SDoc
forall a. Outputable a => a -> SDoc
ppr IdEnv SafeArityType
sigs ])

-- | The @ArityEnv@ used by 'findRhsArity'.
findRhsArityEnv :: ArityOpts -> Bool -> ArityEnv
findRhsArityEnv :: ArityOpts -> Bool -> ArityEnv
findRhsArityEnv ArityOpts
opts Bool
free_joins
  = AE { am_opts :: ArityOpts
am_opts       = ArityOpts
opts
       , am_free_joins :: Bool
am_free_joins = Bool
free_joins
       , am_sigs :: IdEnv SafeArityType
am_sigs       = IdEnv SafeArityType
forall a. VarEnv a
emptyVarEnv }

freeJoinsOK :: ArityEnv -> Bool
freeJoinsOK :: ArityEnv -> Bool
freeJoinsOK (AE { am_free_joins :: ArityEnv -> Bool
am_free_joins = Bool
free_joins }) = Bool
free_joins

-- First some internal functions in snake_case for deleting in certain VarEnvs
-- of the ArityType. Don't call these; call delInScope* instead!

modifySigEnv :: (IdEnv ArityType -> IdEnv ArityType) -> ArityEnv -> ArityEnv
modifySigEnv :: (IdEnv SafeArityType -> IdEnv SafeArityType)
-> ArityEnv -> ArityEnv
modifySigEnv IdEnv SafeArityType -> IdEnv SafeArityType
f env :: ArityEnv
env@(AE { am_sigs :: ArityEnv -> IdEnv SafeArityType
am_sigs = IdEnv SafeArityType
sigs }) = ArityEnv
env { am_sigs = f sigs }
{-# INLINE modifySigEnv #-}

del_sig_env :: Id -> ArityEnv -> ArityEnv -- internal!
del_sig_env :: TyVar -> ArityEnv -> ArityEnv
del_sig_env TyVar
id = (IdEnv SafeArityType -> IdEnv SafeArityType)
-> ArityEnv -> ArityEnv
modifySigEnv (\IdEnv SafeArityType
sigs -> IdEnv SafeArityType -> TyVar -> IdEnv SafeArityType
forall a. VarEnv a -> TyVar -> VarEnv a
delVarEnv IdEnv SafeArityType
sigs TyVar
id)
{-# INLINE del_sig_env #-}

del_sig_env_list :: [Id] -> ArityEnv -> ArityEnv -- internal!
del_sig_env_list :: [TyVar] -> ArityEnv -> ArityEnv
del_sig_env_list [TyVar]
ids = (IdEnv SafeArityType -> IdEnv SafeArityType)
-> ArityEnv -> ArityEnv
modifySigEnv (\IdEnv SafeArityType
sigs -> IdEnv SafeArityType -> [TyVar] -> IdEnv SafeArityType
forall a. VarEnv a -> [TyVar] -> VarEnv a
delVarEnvList IdEnv SafeArityType
sigs [TyVar]
ids)
{-# INLINE del_sig_env_list #-}

-- end of internal deletion functions

extendSigEnv :: ArityEnv -> Id -> SafeArityType -> ArityEnv
extendSigEnv :: ArityEnv -> TyVar -> SafeArityType -> ArityEnv
extendSigEnv ArityEnv
env TyVar
id SafeArityType
ar_ty
  = (IdEnv SafeArityType -> IdEnv SafeArityType)
-> ArityEnv -> ArityEnv
modifySigEnv (\IdEnv SafeArityType
sigs -> IdEnv SafeArityType
-> TyVar -> SafeArityType -> IdEnv SafeArityType
forall a. VarEnv a -> TyVar -> a -> VarEnv a
extendVarEnv IdEnv SafeArityType
sigs TyVar
id SafeArityType
ar_ty) (ArityEnv -> ArityEnv) -> ArityEnv -> ArityEnv
forall a b. (a -> b) -> a -> b
$
    ArityEnv
env

delInScope :: ArityEnv -> Id -> ArityEnv
delInScope :: ArityEnv -> TyVar -> ArityEnv
delInScope ArityEnv
env TyVar
id = TyVar -> ArityEnv -> ArityEnv
del_sig_env TyVar
id ArityEnv
env

delInScopeList :: ArityEnv -> [Id] -> ArityEnv
delInScopeList :: ArityEnv -> [TyVar] -> ArityEnv
delInScopeList ArityEnv
env [TyVar]
ids = [TyVar] -> ArityEnv -> ArityEnv
del_sig_env_list [TyVar]
ids ArityEnv
env

lookupSigEnv :: ArityEnv -> Id -> Maybe SafeArityType
lookupSigEnv :: ArityEnv -> TyVar -> Maybe SafeArityType
lookupSigEnv (AE { am_sigs :: ArityEnv -> IdEnv SafeArityType
am_sigs = IdEnv SafeArityType
sigs }) TyVar
id = IdEnv SafeArityType -> TyVar -> Maybe SafeArityType
forall a. VarEnv a -> TyVar -> Maybe a
lookupVarEnv IdEnv SafeArityType
sigs TyVar
id

-- | Whether the analysis should be pedantic about bottoms.
-- 'exprBotStrictness_maybe' always is.
pedanticBottoms :: ArityEnv -> Bool
pedanticBottoms :: ArityEnv -> Bool
pedanticBottoms (AE { am_opts :: ArityEnv -> ArityOpts
am_opts = ArityOpts{ ao_ped_bot :: ArityOpts -> Bool
ao_ped_bot = Bool
ped_bot }}) = Bool
ped_bot

exprCost :: ArityEnv -> CoreExpr -> Maybe Type -> Cost
exprCost :: ArityEnv -> CoreExpr -> Maybe Type -> Cost
exprCost ArityEnv
env CoreExpr
e Maybe Type
mb_ty
  | ArityEnv -> CoreExpr -> Maybe Type -> Bool
myExprIsCheap ArityEnv
env CoreExpr
e Maybe Type
mb_ty = Cost
IsCheap
  | Bool
otherwise                 = Cost
IsExpensive

-- | A version of 'exprIsCheap' that considers results from arity analysis
-- and optionally the expression's type.
-- Under 'exprBotStrictness_maybe', no expressions are cheap.
myExprIsCheap :: ArityEnv -> CoreExpr -> Maybe Type -> Bool
myExprIsCheap :: ArityEnv -> CoreExpr -> Maybe Type -> Bool
myExprIsCheap (AE { am_opts :: ArityEnv -> ArityOpts
am_opts = ArityOpts
opts, am_sigs :: ArityEnv -> IdEnv SafeArityType
am_sigs = IdEnv SafeArityType
sigs }) CoreExpr
e Maybe Type
mb_ty
  = Bool
cheap_dict Bool -> Bool -> Bool
|| CoreExpr -> Bool
cheap_fun CoreExpr
e
  where
    cheap_dict :: Bool
cheap_dict = case Maybe Type
mb_ty of
                     Maybe Type
Nothing -> Bool
False
                     Just Type
ty -> (ArityOpts -> Bool
ao_dicts_cheap ArityOpts
opts Bool -> Bool -> Bool
&& Type -> Bool
isDictTy Type
ty)
                                Bool -> Bool -> Bool
|| Type -> Bool
isCallStackPredTy Type
ty Bool -> Bool -> Bool
|| Type -> Bool
isCallStackTy Type
ty
        -- See Note [Eta expanding through dictionaries]
        -- See Note [Eta expanding through CallStacks]

    cheap_fun :: CoreExpr -> Bool
cheap_fun CoreExpr
e = CheapAppFun -> Bool -> CoreExpr -> Bool
exprIsCheapX (IdEnv SafeArityType -> CheapAppFun
myIsCheapApp IdEnv SafeArityType
sigs) Bool
False CoreExpr
e

-- | A version of 'isCheapApp' that considers results from arity analysis.
-- See Note [Arity analysis] for what's in the signature environment and why
-- it's important.
myIsCheapApp :: IdEnv SafeArityType -> CheapAppFun
myIsCheapApp :: IdEnv SafeArityType -> CheapAppFun
myIsCheapApp IdEnv SafeArityType
sigs TyVar
fn Int
n_val_args = case IdEnv SafeArityType -> TyVar -> Maybe SafeArityType
forall a. VarEnv a -> TyVar -> Maybe a
lookupVarEnv IdEnv SafeArityType
sigs TyVar
fn of

  -- Nothing means not a local function, fall back to regular
  -- 'GHC.Core.Utils.isCheapApp'
  Maybe SafeArityType
Nothing -> CheapAppFun
isCheapApp TyVar
fn Int
n_val_args

  -- `Just at` means local function with `at` as current SafeArityType.
  -- NB the SafeArityType bit: that means we can ignore the cost flags
  --    in 'lams', and just consider the length
  -- Roughly approximate what 'isCheapApp' is doing.
  Just (AT [ATLamInfo]
lams Divergence
div)
    | Divergence -> Bool
isDeadEndDiv Divergence
div -> Bool
True -- See Note [isCheapApp: bottoming functions] in GHC.Core.Utils
    | Int
n_val_args Int -> Int -> Bool
forall a. Eq a => a -> a -> Bool
== Int
0          -> Bool
True -- Essentially
    | Int
n_val_args Int -> Int -> Bool
forall a. Ord a => a -> a -> Bool
< [ATLamInfo] -> Int
forall a. [a] -> Int
forall (t :: * -> *) a. Foldable t => t a -> Int
length [ATLamInfo]
lams -> Bool
True -- isWorkFreeApp
    | Bool
otherwise                -> Bool
False

----------------
arityType :: HasDebugCallStack => ArityEnv -> CoreExpr -> ArityType
-- Precondition: all the free join points of the expression
--               are bound by the ArityEnv
-- See Note [No free join points in arityType]
--
-- Returns ArityType, not SafeArityType.  The caller must do
-- trimArityType if necessary.
arityType :: HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env (Var TyVar
v)
  | Just SafeArityType
at <- ArityEnv -> TyVar -> Maybe SafeArityType
lookupSigEnv ArityEnv
env TyVar
v -- Local binding
  = SafeArityType
at
  | Bool
otherwise
  = Bool -> SDoc -> SafeArityType -> SafeArityType
forall a. HasCallStack => Bool -> SDoc -> a -> a
assertPpr (ArityEnv -> Bool
freeJoinsOK ArityEnv
env Bool -> Bool -> Bool
|| Bool -> Bool
not (TyVar -> Bool
isJoinId TyVar
v)) (TyVar -> SDoc
forall a. Outputable a => a -> SDoc
ppr TyVar
v) (SafeArityType -> SafeArityType) -> SafeArityType -> SafeArityType
forall a b. (a -> b) -> a -> b
$
    -- All join-point should be in the ae_sigs
    -- See Note [No free join points in arityType]
    TyVar -> SafeArityType
idArityType TyVar
v

arityType ArityEnv
env (Cast CoreExpr
e Coercion
_)
  = HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env CoreExpr
e

        -- Lambdas; increase arity
arityType ArityEnv
env (Lam TyVar
x CoreExpr
e)
  | TyVar -> Bool
isId TyVar
x    = TyVar -> SafeArityType -> SafeArityType
arityLam TyVar
x (HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env' CoreExpr
e)
  | Bool
otherwise = HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env' CoreExpr
e
  where
    env' :: ArityEnv
env' = ArityEnv -> TyVar -> ArityEnv
delInScope ArityEnv
env TyVar
x

        -- Applications; decrease arity, except for types
arityType ArityEnv
env (App CoreExpr
fun (Type Type
_))
   = HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env CoreExpr
fun
arityType ArityEnv
env (App CoreExpr
fun CoreExpr
arg )
   = SafeArityType -> Cost -> SafeArityType
arityApp SafeArityType
fun_at Cost
arg_cost
   where
     fun_at :: SafeArityType
fun_at   = HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env CoreExpr
fun
     arg_cost :: Cost
arg_cost = ArityEnv -> CoreExpr -> Maybe Type -> Cost
exprCost ArityEnv
env CoreExpr
arg Maybe Type
forall a. Maybe a
Nothing

        -- Case/Let; keep arity if either the expression is cheap
        -- or it's a 1-shot lambda
        -- The former is not really right for Haskell
        --      f x = case x of { (a,b) -> \y. e }
        --  ===>
        --      f x y = case x of { (a,b) -> e }
        -- The difference is observable using 'seq'
        --
arityType ArityEnv
env (Case CoreExpr
scrut TyVar
bndr Type
_ [Alt TyVar]
alts)
  | CoreExpr -> Bool
exprIsDeadEnd CoreExpr
scrut Bool -> Bool -> Bool
|| [Alt TyVar] -> Bool
forall a. [a] -> Bool
forall (t :: * -> *) a. Foldable t => t a -> Bool
null [Alt TyVar]
alts
  = SafeArityType
botArityType    -- Do not eta expand. See (1) in Note [Dealing with bottom]

  | Bool -> Bool
not (ArityEnv -> Bool
pedanticBottoms ArityEnv
env)  -- See (2) in Note [Dealing with bottom]
  , ArityEnv -> CoreExpr -> Maybe Type -> Bool
myExprIsCheap ArityEnv
env CoreExpr
scrut (Type -> Maybe Type
forall a. a -> Maybe a
Just (TyVar -> Type
idType TyVar
bndr))
  = SafeArityType
alts_type

  | CoreExpr -> Bool
exprOkForSpeculation CoreExpr
scrut
  = SafeArityType
alts_type

  | Bool
otherwise            -- In the remaining cases we may not push
  = SafeArityType -> SafeArityType
addWork SafeArityType
alts_type    -- evaluation of the scrutinee in
  where
    env' :: ArityEnv
env' = ArityEnv -> TyVar -> ArityEnv
delInScope ArityEnv
env TyVar
bndr
    arity_type_alt :: Alt TyVar -> SafeArityType
arity_type_alt (Alt AltCon
_con [TyVar]
bndrs CoreExpr
rhs) = HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType (ArityEnv -> [TyVar] -> ArityEnv
delInScopeList ArityEnv
env' [TyVar]
bndrs) CoreExpr
rhs
    alts_type :: SafeArityType
alts_type = (SafeArityType -> SafeArityType -> SafeArityType)
-> [SafeArityType] -> SafeArityType
forall a. (a -> a -> a) -> [a] -> a
forall (t :: * -> *) a. Foldable t => (a -> a -> a) -> t a -> a
foldr1 (ArityEnv -> SafeArityType -> SafeArityType -> SafeArityType
andArityType ArityEnv
env) ((Alt TyVar -> SafeArityType) -> [Alt TyVar] -> [SafeArityType]
forall a b. (a -> b) -> [a] -> [b]
map Alt TyVar -> SafeArityType
arity_type_alt [Alt TyVar]
alts)

arityType ArityEnv
env (Let (NonRec TyVar
b CoreExpr
rhs) CoreExpr
e)
  = -- See Note [arityType for non-recursive let-bindings]
    Cost -> SafeArityType -> SafeArityType
floatIn Cost
rhs_cost (HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env' CoreExpr
e)
  where
    rhs_cost :: Cost
rhs_cost = ArityEnv -> CoreExpr -> Maybe Type -> Cost
exprCost ArityEnv
env CoreExpr
rhs (Type -> Maybe Type
forall a. a -> Maybe a
Just (TyVar -> Type
idType TyVar
b))
    env' :: ArityEnv
env'     = ArityEnv -> TyVar -> SafeArityType -> ArityEnv
extendSigEnv ArityEnv
env TyVar
b (SafeArityType -> SafeArityType
safeArityType (HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env CoreExpr
rhs))

arityType ArityEnv
env (Let (Rec [(TyVar, CoreExpr)]
prs) CoreExpr
e)
  = -- See Note [arityType for recursive let-bindings]
    Cost -> SafeArityType -> SafeArityType
floatIn (((TyVar, CoreExpr) -> Cost) -> [(TyVar, CoreExpr)] -> Cost
forall a. (a -> Cost) -> [a] -> Cost
allCosts (TyVar, CoreExpr) -> Cost
bind_cost [(TyVar, CoreExpr)]
prs) (HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env' CoreExpr
e)
  where
    bind_cost :: (TyVar, CoreExpr) -> Cost
bind_cost (TyVar
b,CoreExpr
e) = ArityEnv -> CoreExpr -> Maybe Type -> Cost
exprCost ArityEnv
env' CoreExpr
e (Type -> Maybe Type
forall a. a -> Maybe a
Just (TyVar -> Type
idType TyVar
b))
    env' :: ArityEnv
env'            = (ArityEnv -> (TyVar, CoreExpr) -> ArityEnv)
-> ArityEnv -> [(TyVar, CoreExpr)] -> ArityEnv
forall b a. (b -> a -> b) -> b -> [a] -> b
forall (t :: * -> *) b a.
Foldable t =>
(b -> a -> b) -> b -> t a -> b
foldl ArityEnv -> (TyVar, CoreExpr) -> ArityEnv
extend_rec ArityEnv
env [(TyVar, CoreExpr)]
prs
    extend_rec :: ArityEnv -> (Id,CoreExpr) -> ArityEnv
    extend_rec :: ArityEnv -> (TyVar, CoreExpr) -> ArityEnv
extend_rec ArityEnv
env (TyVar
b,CoreExpr
_) = ArityEnv -> TyVar -> SafeArityType -> ArityEnv
extendSigEnv ArityEnv
env TyVar
b  (SafeArityType -> ArityEnv) -> SafeArityType -> ArityEnv
forall a b. (a -> b) -> a -> b
$
                           TyVar -> SafeArityType
idArityType TyVar
b
      -- See Note [arityType for recursive let-bindings]

arityType ArityEnv
env (Tick CoreTickish
t CoreExpr
e)
  | Bool -> Bool
not (CoreTickish -> Bool
forall (pass :: TickishPass). GenTickish pass -> Bool
tickishIsCode CoreTickish
t)     = HasDebugCallStack => ArityEnv -> CoreExpr -> SafeArityType
ArityEnv -> CoreExpr -> SafeArityType
arityType ArityEnv
env CoreExpr
e

arityType ArityEnv
_ CoreExpr
_ = SafeArityType
topArityType

--------------------
idArityType :: Id -> ArityType
idArityType :: TyVar -> SafeArityType
idArityType TyVar
v
  | DmdSig
strict_sig <- TyVar -> DmdSig
idDmdSig TyVar
v
  , ([Demand]
ds, Divergence
div) <- DmdSig -> ([Demand], Divergence)
splitDmdSig DmdSig
strict_sig
  , Divergence -> Bool
isDeadEndDiv Divergence
div
  = [ATLamInfo] -> Divergence -> SafeArityType
AT ([Demand] -> [ATLamInfo] -> [ATLamInfo]
forall b a. [b] -> [a] -> [a]
takeList [Demand]
ds [ATLamInfo]
one_shots) Divergence
div

  | Type -> Bool
isEmptyTy Type
id_ty
  = SafeArityType
botArityType

  | Bool
otherwise
  = [ATLamInfo] -> Divergence -> SafeArityType
AT (Int -> [ATLamInfo] -> [ATLamInfo]
forall a. Int -> [a] -> [a]
take (TyVar -> Int
idArity TyVar
v) [ATLamInfo]
one_shots) Divergence
topDiv
  where
    id_ty :: Type
id_ty = TyVar -> Type
idType TyVar
v

    one_shots :: [(Cost,OneShotInfo)]  -- One-shot-ness derived from the type
    one_shots :: [ATLamInfo]
one_shots = Cost -> [Cost]
forall a. a -> [a]
repeat Cost
IsCheap [Cost] -> [OneShotInfo] -> [ATLamInfo]
forall a b. [a] -> [b] -> [(a, b)]
`zip` Type -> [OneShotInfo]
typeOneShots Type
id_ty

--------------------
cheapArityType :: HasDebugCallStack => CoreExpr -> ArityType
-- A fast and cheap version of arityType.
-- Returns an ArityType with IsCheap everywhere
-- c.f. GHC.Core.Utils.exprIsDeadEnd
--
-- /Can/ encounter a free join-point Id; e.g. via the call
--   in exprBotStrictness_maybe, which is called in lots
--   of places
--
-- Returns ArityType, not SafeArityType.  The caller must do
-- trimArityType if necessary.
cheapArityType :: HasDebugCallStack => CoreExpr -> SafeArityType
cheapArityType CoreExpr
e = CoreExpr -> SafeArityType
go CoreExpr
e
  where
    go :: CoreExpr -> SafeArityType
go (Var TyVar
v)                  = TyVar -> SafeArityType
idArityType TyVar
v
    go (Cast CoreExpr
e Coercion
_)               = CoreExpr -> SafeArityType
go CoreExpr
e
    go (Lam TyVar
x CoreExpr
e)  | TyVar -> Bool
isId TyVar
x      = TyVar -> SafeArityType -> SafeArityType
arityLam TyVar
x (CoreExpr -> SafeArityType
go CoreExpr
e)
                  | Bool
otherwise   = CoreExpr -> SafeArityType
go CoreExpr
e
    go (App CoreExpr
e CoreExpr
a)  | CoreExpr -> Bool
forall b. Expr b -> Bool
isTypeArg CoreExpr
a = CoreExpr -> SafeArityType
go CoreExpr
e
                  | Bool
otherwise   = CoreExpr -> SafeArityType -> SafeArityType
arity_app CoreExpr
a (CoreExpr -> SafeArityType
go CoreExpr
e)

    go (Tick CoreTickish
t CoreExpr
e) | Bool -> Bool
not (CoreTickish -> Bool
forall (pass :: TickishPass). GenTickish pass -> Bool
tickishIsCode CoreTickish
t) = CoreExpr -> SafeArityType
go CoreExpr
e

    -- Null alts: see Note [Empty case alternatives] in GHC.Core
    go (Case CoreExpr
_ TyVar
_ Type
_ [Alt TyVar]
alts) | [Alt TyVar] -> Bool
forall a. [a] -> Bool
forall (t :: * -> *) a. Foldable t => t a -> Bool
null [Alt TyVar]
alts = SafeArityType
botArityType

    -- Give up on let, case.  In particular, unlike arityType,
    -- we make no attempt to look inside let's.
    go CoreExpr
_ = SafeArityType
topArityType

    -- Specialised version of arityApp; all costs in ArityType are IsCheap
    -- See Note [exprArity for applications]
    -- NB: (1) coercions count as a value argument
    --     (2) we use the super-cheap exprIsTrivial rather than the
    --         more complicated and expensive exprIsCheap
    arity_app :: CoreExpr -> SafeArityType -> SafeArityType
arity_app CoreExpr
_ at :: SafeArityType
at@(AT [] Divergence
_) = SafeArityType
at
    arity_app CoreExpr
arg at :: SafeArityType
at@(AT ((Cost
cost,OneShotInfo
_):[ATLamInfo]
lams) Divergence
div)
       | Bool -> SDoc -> Bool -> Bool
forall a. HasCallStack => Bool -> SDoc -> a -> a
assertPpr (Cost
cost Cost -> Cost -> Bool
forall a. Eq a => a -> a -> Bool
== Cost
IsCheap) (SafeArityType -> SDoc
forall a. Outputable a => a -> SDoc
ppr SafeArityType
at SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$ CoreExpr -> SDoc
forall a. Outputable a => a -> SDoc
ppr CoreExpr
arg) (Bool -> Bool) -> Bool -> Bool
forall a b. (a -> b) -> a -> b
$
         Divergence -> Bool
isDeadEndDiv Divergence
div  = [ATLamInfo] -> Divergence -> SafeArityType
AT [ATLamInfo]
lams Divergence
div
       | CoreExpr -> Bool
exprIsTrivial CoreExpr
arg = [ATLamInfo] -> Divergence -> SafeArityType
AT [ATLamInfo]
lams Divergence
topDiv
       | Bool
otherwise         = SafeArityType
topArityType

---------------
exprArity :: CoreExpr -> Arity
-- ^ An approximate, even faster, version of 'cheapArityType'
-- Roughly   exprArity e = arityTypeArity (cheapArityType e)
-- But it's a bit less clever about bottoms
--
-- We do /not/ guarantee that exprArity e <= typeArity e
-- You may need to do arity trimming after calling exprArity
-- See Note [Arity trimming]
-- Reason: if we do arity trimming here we have take exprType
--         and that can be expensive if there is a large cast
exprArity :: CoreExpr -> Int
exprArity CoreExpr
e = CoreExpr -> Int
go CoreExpr
e
  where
    go :: CoreExpr -> Int
go (Var TyVar
v)                     = TyVar -> Int
idArity TyVar
v
    go (Lam TyVar
x CoreExpr
e) | TyVar -> Bool
isId TyVar
x          = CoreExpr -> Int
go CoreExpr
e Int -> Int -> Int
forall a. Num a => a -> a -> a
+ Int
1
                 | Bool
otherwise       = CoreExpr -> Int
go CoreExpr
e
    go (Tick CoreTickish
t CoreExpr
e) | Bool -> Bool
not (CoreTickish -> Bool
forall (pass :: TickishPass). GenTickish pass -> Bool
tickishIsCode CoreTickish
t) = CoreExpr -> Int
go CoreExpr
e
    go (Cast CoreExpr
e Coercion
_)                  = CoreExpr -> Int
go CoreExpr
e
    go (App CoreExpr
e (Type Type
_))            = CoreExpr -> Int
go CoreExpr
e
    go (App CoreExpr
f CoreExpr
a) | CoreExpr -> Bool
exprIsTrivial CoreExpr
a = (CoreExpr -> Int
go CoreExpr
f Int -> Int -> Int
forall a. Num a => a -> a -> a
- Int
1) Int -> Int -> Int
forall a. Ord a => a -> a -> a
`max` Int
0
        -- See Note [exprArity for applications]
        -- NB: coercions count as a value argument

    go CoreExpr
_                           = Int
0

---------------
exprIsDeadEnd :: CoreExpr -> Bool
-- See Note [Bottoming expressions]
-- This function is, in effect, just a specialised (and hence cheap)
--    version of cheapArityType:
--    exprIsDeadEnd e = case cheapArityType e of
--                         AT lams div -> null lams && isDeadEndDiv div
-- See also exprBotStrictness_maybe, which uses cheapArityType
exprIsDeadEnd :: CoreExpr -> Bool
exprIsDeadEnd CoreExpr
e
  = Int -> CoreExpr -> Bool
go Int
0 CoreExpr
e
  where
    go :: Arity -> CoreExpr -> Bool
    -- (go n e) = True <=> expr applied to n value args is bottom
    go :: Int -> CoreExpr -> Bool
go Int
_ (Lit {})                = Bool
False
    go Int
_ (Type {})               = Bool
False
    go Int
_ (Coercion {})           = Bool
False
    go Int
n (App CoreExpr
e CoreExpr
a) | CoreExpr -> Bool
forall b. Expr b -> Bool
isTypeArg CoreExpr
a = Int -> CoreExpr -> Bool
go Int
n CoreExpr
e
                   | Bool
otherwise   = Int -> CoreExpr -> Bool
go (Int
nInt -> Int -> Int
forall a. Num a => a -> a -> a
+Int
1) CoreExpr
e
    go Int
n (Tick CoreTickish
_ CoreExpr
e)              = Int -> CoreExpr -> Bool
go Int
n CoreExpr
e
    go Int
n (Cast CoreExpr
e Coercion
_)              = Int -> CoreExpr -> Bool
go Int
n CoreExpr
e
    go Int
n (Let Bind TyVar
_ CoreExpr
e)               = Int -> CoreExpr -> Bool
go Int
n CoreExpr
e
    go Int
n (Lam TyVar
v CoreExpr
e) | TyVar -> Bool
isTyVar TyVar
v   = Int -> CoreExpr -> Bool
go Int
n CoreExpr
e
                   | Bool
otherwise   = Bool
False

    go Int
_ (Case CoreExpr
_ TyVar
_ Type
_ [Alt TyVar]
alts)       = [Alt TyVar] -> Bool
forall a. [a] -> Bool
forall (t :: * -> *) a. Foldable t => t a -> Bool
null [Alt TyVar]
alts
       -- See Note [Empty case alternatives] in GHC.Core

    go Int
n (Var TyVar
v) | DmdSig -> Int -> Bool
isDeadEndAppSig (TyVar -> DmdSig
idDmdSig TyVar
v) Int
n = Bool
True
                 | Type -> Bool
isEmptyTy (TyVar -> Type
idType TyVar
v)           = Bool
True
                 | Bool
otherwise                      = Bool
False

{- Note [Bottoming expressions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A bottoming expression is guaranteed to diverge, or raise an
exception.  We can test for it in two different ways, and exprIsDeadEnd
checks for both of these situations:

* Visibly-bottom computations.  For example
      (error Int "Hello")
  is visibly bottom.  The strictness analyser also finds out if
  a function diverges or raises an exception, and puts that info
  in its strictness signature.

* Empty types.  If a type is empty, its only inhabitant is bottom.
  For example:
      data T
      f :: T -> Bool
      f = \(x:t). case x of Bool {}
  Since T has no data constructors, the case alternatives are of course
  empty.  However note that 'x' is not bound to a visibly-bottom value;
  it's the *type* that tells us it's going to diverge.

A GADT may also be empty even though it has constructors:
        data T a where
          T1 :: a -> T Bool
          T2 :: T Int
        ...(case (x::T Char) of {})...
Here (T Char) is uninhabited.  A more realistic case is (Int ~ Bool),
which is likewise uninhabited.

Note [No free join points in arityType]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we call arityType on this expression (EX1)
   \x . case x of True  -> \y. e
                  False -> $j 3
where $j is a join point.  It really makes no sense to talk of the arity
of this expression, because it has a free join point.  In particular, we
can't eta-expand the expression because we'd have do the same thing to the
binding of $j, and we can't see that binding.

If we had (EX2)
   \x. join $j y = blah
       case x of True  -> \y. e
                 False -> $j 3
then it would make perfect sense: we can determine $j's ArityType, and
propagate it to the usage site as usual.

But how can we get (EX1)?  It doesn't make much sense, because $j can't
be a join point under the \x anyway.  So we make it a precondition of
arityType that the argument has no free join-point Ids.  (This is checked
with an assert in the Var case of arityType.)

Wrinkles

* We /do/ allow free join point when doing findRhsArity for join-point
  right-hand sides. See Note [Arity for recursive join bindings]
  point (5) in GHC.Core.Opt.Simplify.Utils.

* The invariant (no free join point in arityType) risks being
  invalidated by one very narrow special case: runRW#

   join $j y = blah
   runRW# (\s. case x of True  -> \y. e
                         False -> $j x)

  We have special magic in OccurAnal, and Simplify to allow continuations to
  move into the body of a runRW# call.

  So we are careful never to attempt to eta-expand the (\s.blah) in the
  argument to runRW#, at least not when there is a literal lambda there,
  so that OccurAnal has seen it and allowed join points bound outside.
  See Note [No eta-expansion in runRW#] in GHC.Core.Opt.Simplify.Iteration.

Note [arityType for non-recursive let-bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For non-recursive let-bindings, we just get the arityType of the RHS,
and extend the environment.  That works nicely for things like this
(#18793):
  go = \ ds. case ds_a2CF of {
               []     -> id
               : y ys -> case y of { GHC.Types.I# x ->
                         let acc = go ys in
                         case x ># 42# of {
                            __DEFAULT -> acc
                            1# -> \x1. acc (negate x2)

Here we want to get a good arity for `acc`, based on the ArityType
of `go`.

All this is particularly important for join points. Consider this (#18328)

  f x = join j y = case y of
                      True -> \a. blah
                      False -> \b. blah
        in case x of
              A -> j True
              B -> \c. blah
              C -> j False

and suppose the join point is too big to inline.  Now, what is the
arity of f?  If we inlined the join point, we'd definitely say "arity
2" because we are prepared to push case-scrutinisation inside a
lambda. It's important that we extend the envt with j's ArityType, so
that we can use that information in the A/C branch of the case.

Note [arityType for recursive let-bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For /recursive/ bindings it's more difficult, to call arityType
(as we do in Note [arityType for non-recursive let-bindings])
because we don't have an ArityType to put in the envt for the
recursively bound Ids.  So for we satisfy ourselves with whizzing up
up an ArityType from the idArity of the function, via idArityType.

That is nearly equivalent to deleting the binder from the envt, at
which point we'll call idArityType at the occurrences.  But doing it
here means

  (a) we only call idArityType once, no matter how many
      occurrences, and

  (b) we can check (in the arityType (Var v) case) that
      we don't mention free join-point Ids. See
      Note [No free join points in arityType].

But see Note [Arity for recursive join bindings] in
GHC.Core.Opt.Simplify.Utils for dark corners.
-}

{-
%************************************************************************
%*                                                                      *
              The main eta-expander
%*                                                                      *
%************************************************************************

We go for:
   f = \x1..xn -> N  ==>   f = \x1..xn y1..ym -> N y1..ym
                                 (n >= 0)

where (in both cases)

        * The xi can include type variables

        * The yi are all value variables

        * N is a NORMAL FORM (i.e. no redexes anywhere)
          wanting a suitable number of extra args.

The biggest reason for doing this is for cases like

        f = \x -> case x of
                    True  -> \y -> e1
                    False -> \y -> e2

Here we want to get the lambdas together.  A good example is the nofib
program fibheaps, which gets 25% more allocation if you don't do this
eta-expansion.

We may have to sandwich some coerces between the lambdas
to make the types work.   exprEtaExpandArity looks through coerces
when computing arity; and etaExpand adds the coerces as necessary when
actually computing the expansion.

Note [No crap in eta-expanded code]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The eta expander is careful not to introduce "crap".  In particular,
given a CoreExpr satisfying the 'CpeRhs' invariant (in CorePrep), it
returns a CoreExpr satisfying the same invariant. See Note [Eta
expansion and the CorePrep invariants] in CorePrep.

This means the eta-expander has to do a bit of on-the-fly
simplification but it's not too hard.  The alternative, of relying on
a subsequent clean-up phase of the Simplifier to de-crapify the result,
means you can't really use it in CorePrep, which is painful.

Note [Eta expansion for join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The no-crap rule is very tiresome to guarantee when
we have join points. Consider eta-expanding
   let j :: Int -> Int -> Bool
       j x = e
   in b

The simple way is
  \(y::Int). (let j x = e in b) y

The no-crap way is
  \(y::Int). let j' :: Int -> Bool
                 j' x = e y
             in b[j'/j] y
where I have written b[j'/j] to stress that j's type has
changed.  Note that (of course!) we have to push the application
inside the RHS of the join as well as into the body.  AND if j
has an unfolding we have to push it into there too.  AND j might
be recursive...

So for now I'm abandoning the no-crap rule in this case, conscious that this
causes the ugly Wrinkle (EA1) of Note [Eta expansion of arguments in CorePrep].

(Moreover, I think that casts can make the no-crap rule fail too.)

Note [Eta expansion and SCCs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note that SCCs are not treated specially by etaExpand.  If we have
        etaExpand 2 (\x -> scc "foo" e)
        = (\xy -> (scc "foo" e) y)
So the costs of evaluating 'e' (not 'e y') are attributed to "foo"

Note [Eta expansion and source notes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CorePrep puts floatable ticks outside of value applications, but not
type applications. As a result we might be trying to eta-expand an
expression like

  (src<...> v) @a

which we want to lead to code like

  \x -> src<...> v @a x

This means that we need to look through type applications and be ready
to re-add floats on the top.

Note [Eta expansion with ArityType]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The etaExpandAT function takes an ArityType (not just an Arity) to
guide eta-expansion.  Why? Because we want to preserve one-shot info.
Consider
  foo = \x. case x of
              True  -> (\s{os}. blah) |> co
              False -> wubble
We'll get an ArityType for foo of \?1.T.

Then we want to eta-expand to
  foo = (\x. \eta{os}. (case x of ...as before...) eta) |> some_co

That 'eta' binder is fresh, and we really want it to have the
one-shot flag from the inner \s{os}.  By expanding with the
ArityType gotten from analysing the RHS, we achieve this neatly.

This makes a big difference to the one-shot monad trick;
see Note [The one-shot state monad trick] in GHC.Utils.Monad.
-}

-- | @etaExpand n e@ returns an expression with
-- the same meaning as @e@, but with arity @n@.
--
-- Given:
--
-- > e' = etaExpand n e
--
-- We should have that:
--
-- > ty = exprType e = exprType e'

etaExpand :: Arity -> CoreExpr -> CoreExpr
etaExpand :: Int -> CoreExpr -> CoreExpr
etaExpand Int
n CoreExpr
orig_expr
  = InScopeSet -> [OneShotInfo] -> CoreExpr -> CoreExpr
eta_expand InScopeSet
in_scope (Int -> OneShotInfo -> [OneShotInfo]
forall a. Int -> a -> [a]
replicate Int
n OneShotInfo
NoOneShotInfo) CoreExpr
orig_expr
  where
    in_scope :: InScopeSet
in_scope = {-#SCC "eta_expand:in-scopeX" #-}
               VarSet -> InScopeSet
mkInScopeSet (CoreExpr -> VarSet
exprFreeVars CoreExpr
orig_expr)

etaExpandAT :: InScopeSet -> SafeArityType -> CoreExpr -> CoreExpr
-- See Note [Eta expansion with ArityType]
--
-- We pass in the InScopeSet from the simplifier to avoid recomputing
-- it here, which can be jolly expensive if the casts are big
-- In #18223 it took 10% of compile time just to do the exprFreeVars!
etaExpandAT :: InScopeSet -> SafeArityType -> CoreExpr -> CoreExpr
etaExpandAT InScopeSet
in_scope SafeArityType
at CoreExpr
orig_expr
  = InScopeSet -> [OneShotInfo] -> CoreExpr -> CoreExpr
eta_expand InScopeSet
in_scope (SafeArityType -> [OneShotInfo]
arityTypeOneShots SafeArityType
at) CoreExpr
orig_expr

-- etaExpand arity e = res
-- Then 'res' has at least 'arity' lambdas at the top
--    possibly with a cast wrapped around the outside
-- See Note [Eta expansion with ArityType]
--
-- etaExpand deals with for-alls. For example:
--              etaExpand 1 E
-- where  E :: forall a. a -> a
-- would return
--      (/\b. \y::a -> E b y)

eta_expand :: InScopeSet -> [OneShotInfo] -> CoreExpr -> CoreExpr
eta_expand :: InScopeSet -> [OneShotInfo] -> CoreExpr -> CoreExpr
eta_expand InScopeSet
in_scope [OneShotInfo]
one_shots (Cast CoreExpr
expr Coercion
co)
  = HasDebugCallStack => CoreExpr -> Coercion -> CoreExpr
CoreExpr -> Coercion -> CoreExpr
mkCast (InScopeSet -> [OneShotInfo] -> CoreExpr -> CoreExpr
eta_expand InScopeSet
in_scope [OneShotInfo]
one_shots CoreExpr
expr) Coercion
co
    -- This mkCast is important, because eta_expand might return an
    -- expression with a cast at the outside; and tryCastWorkerWrapper
    -- asssumes that we don't have nested casts. Makes a difference
    -- in compile-time for T18223

eta_expand InScopeSet
in_scope [OneShotInfo]
one_shots CoreExpr
orig_expr
  = InScopeSet -> [OneShotInfo] -> [TyVar] -> CoreExpr -> CoreExpr
go InScopeSet
in_scope [OneShotInfo]
one_shots [] CoreExpr
orig_expr
  where
      -- Strip off existing lambdas and casts before handing off to mkEtaWW
      -- This is mainly to avoid spending time cloning binders and substituting
      -- when there is actually nothing to do.  It's slightly awkward to deal
      -- with casts here, apart from the topmost one, and they are rare, so
      -- if we find one we just hand off to mkEtaWW anyway
      -- Note [Eta expansion and SCCs]
    go :: InScopeSet -> [OneShotInfo] -> [TyVar] -> CoreExpr -> CoreExpr
go InScopeSet
_ [] [TyVar]
_ CoreExpr
_ = CoreExpr
orig_expr  -- Already has the specified arity; no-op

    go InScopeSet
in_scope oss :: [OneShotInfo]
oss@(OneShotInfo
_:[OneShotInfo]
oss1) [TyVar]
vs (Lam TyVar
v CoreExpr
body)
      | TyVar -> Bool
isTyVar TyVar
v = InScopeSet -> [OneShotInfo] -> [TyVar] -> CoreExpr -> CoreExpr
go (InScopeSet
in_scope InScopeSet -> TyVar -> InScopeSet
`extendInScopeSet` TyVar
v) [OneShotInfo]
oss  (TyVar
vTyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
:[TyVar]
vs) CoreExpr
body
      | Bool
otherwise = InScopeSet -> [OneShotInfo] -> [TyVar] -> CoreExpr -> CoreExpr
go (InScopeSet
in_scope InScopeSet -> TyVar -> InScopeSet
`extendInScopeSet` TyVar
v) [OneShotInfo]
oss1 (TyVar
vTyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
:[TyVar]
vs) CoreExpr
body

    go InScopeSet
in_scope [OneShotInfo]
oss [TyVar]
rev_vs CoreExpr
expr
      = -- pprTrace "ee" (vcat [ppr in_scope', ppr top_bndrs, ppr eis]) $
        CoreExpr -> CoreExpr
retick (CoreExpr -> CoreExpr) -> CoreExpr -> CoreExpr
forall a b. (a -> b) -> a -> b
$
        EtaInfo -> CoreExpr -> CoreExpr
etaInfoAbs EtaInfo
top_eis (CoreExpr -> CoreExpr) -> CoreExpr -> CoreExpr
forall a b. (a -> b) -> a -> b
$
        InScopeSet -> CoreExpr -> EtaInfo -> CoreExpr
etaInfoApp InScopeSet
in_scope' CoreExpr
sexpr EtaInfo
eis
      where
          (InScopeSet
in_scope', eis :: EtaInfo
eis@(EI [TyVar]
eta_bndrs MCoercionR
mco))
                    = [OneShotInfo]
-> SDoc -> InScopeSet -> Type -> (InScopeSet, EtaInfo)
mkEtaWW [OneShotInfo]
oss (CoreExpr -> SDoc
forall a. Outputable a => a -> SDoc
ppr CoreExpr
orig_expr) InScopeSet
in_scope (HasDebugCallStack => CoreExpr -> Type
CoreExpr -> Type
exprType CoreExpr
expr)
          top_bndrs :: [TyVar]
top_bndrs = [TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
rev_vs
          top_eis :: EtaInfo
top_eis   = [TyVar] -> MCoercionR -> EtaInfo
EI ([TyVar]
top_bndrs [TyVar] -> [TyVar] -> [TyVar]
forall a. [a] -> [a] -> [a]
++ [TyVar]
eta_bndrs) ([TyVar] -> MCoercionR -> MCoercionR
mkPiMCos [TyVar]
top_bndrs MCoercionR
mco)

          -- Find ticks behind type apps.
          -- See Note [Eta expansion and source notes]
          -- I don't really understand this code SLPJ May 21
          (CoreExpr
expr', [CoreExpr]
args) = CoreExpr -> (CoreExpr, [CoreExpr])
forall b. Expr b -> (Expr b, [Expr b])
collectArgs CoreExpr
expr
          ([CoreTickish]
ticks, CoreExpr
expr'') = (CoreTickish -> Bool) -> CoreExpr -> ([CoreTickish], CoreExpr)
forall b.
(CoreTickish -> Bool) -> Expr b -> ([CoreTickish], Expr b)
stripTicksTop CoreTickish -> Bool
forall (pass :: TickishPass). GenTickish pass -> Bool
tickishFloatable CoreExpr
expr'
          sexpr :: CoreExpr
sexpr = CoreExpr -> [CoreExpr] -> CoreExpr
forall b. Expr b -> [Expr b] -> Expr b
mkApps CoreExpr
expr'' [CoreExpr]
args
          retick :: CoreExpr -> CoreExpr
retick CoreExpr
expr = (CoreTickish -> CoreExpr -> CoreExpr)
-> CoreExpr -> [CoreTickish] -> CoreExpr
forall a b. (a -> b -> b) -> b -> [a] -> b
forall (t :: * -> *) a b.
Foldable t =>
(a -> b -> b) -> b -> t a -> b
foldr CoreTickish -> CoreExpr -> CoreExpr
mkTick CoreExpr
expr [CoreTickish]
ticks

{- *********************************************************************
*                                                                      *
              The EtaInfo mechanism
          mkEtaWW, etaInfoAbs, etaInfoApp
*                                                                      *
********************************************************************* -}

{- Note [The EtaInfo mechanism]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have (e :: ty) and we want to eta-expand it to arity N.
This what eta_expand does.  We do it in two steps:

1.  mkEtaWW: from 'ty' and 'N' build a EtaInfo which describes
    the shape of the expansion necessary to expand to arity N.

2.  Build the term
       \ v1..vn.  e v1 .. vn
    where those abstractions and applications are described by
    the same EtaInfo.  Specifically we build the term

       etaInfoAbs etas (etaInfoApp in_scope e etas)

   where etas :: EtaInfo
         etaInfoAbs builds the lambdas
         etaInfoApp builds the applications

   Note that the /same/ EtaInfo drives both etaInfoAbs and etaInfoApp

To a first approximation EtaInfo is just [Var].  But casts complicate
the question.  If we have
   newtype N a = MkN (S -> a)
     axN :: N a  ~  S -> a
and
   e :: N (N Int)
then the eta-expansion should look like
   (\(x::S) (y::S) -> (e |> co) x y) |> sym co
where
  co :: N (N Int) ~ S -> S -> Int
  co = axN @(N Int) ; (S -> axN @Int)

We want to get one cast, at the top, to account for all those
nested newtypes. This is expressed by the EtaInfo type:

   data EtaInfo = EI [Var] MCoercionR

Precisely, here is the (EtaInfo Invariant):

  EI bs co :: EtaInfo

describes a particular eta-expansion, thus:

  Abstraction:  (\b1 b2 .. bn. []) |> sym co
  Application:  ([] |> co) b1 b2 .. bn

  e  :: T
  co :: T ~R (t1 -> t2 -> .. -> tn -> tr)
  e = (\b1 b2 ... bn. (e |> co) b1 b2 .. bn) |> sym co


Note [Check for reflexive casts in eta expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It turns out that the casts created by the above mechanism are often Refl.
When casts are very deeply nested (as happens in #18223), the repetition
of types can make the overall term very large.  So there is a big
payoff in cancelling out casts aggressively wherever possible.
(See also Note [No crap in eta-expanded code].)

This matters particularly in etaInfoApp, where we
* Do beta-reduction on the fly
* Use getArg_maybe to get a cast out of the way,
  so that we can do beta reduction
Together this makes a big difference.  Consider when e is
   case x of
      True  -> (\x -> e1) |> c1
      False -> (\p -> e2) |> c2

When we eta-expand this to arity 1, say, etaInfoAbs will wrap
a (\eta) around the outside and use etaInfoApp to apply each
alternative to 'eta'.  We want to beta-reduce all that junk
away.

#18223 was a dramatic example in which the intermediate term was
grotesquely huge, even though the next Simplifier iteration squashed
it.  Better to kill it at birth.

The crucial spots in etaInfoApp are:
* `checkReflexiveMCo` in the (Cast e co) case of `go`
* `checkReflexiveMCo` in `pushCoArg`
* Less important: checkReflexiveMCo in the final case of `go`
Collectively these make a factor-of-5 difference to the total
allocation of T18223, so take care if you change this stuff!

Example:
   newtype N = MkN (Y->Z)
   f :: X -> N
   f = \(x::X). ((\(y::Y). blah) |> fco)

where fco :: (Y->Z) ~ N

mkEtaWW makes an EtaInfo of (EI [(eta1:X), (eta2:Y)] eta_co
  where
    eta_co :: (X->N) ~ (X->Y->Z)
    eta_co =  (<X> -> nco)
    nco :: N ~ (Y->Z)  -- Comes from topNormaliseNewType_maybe

Now, when we push that eta_co inward in etaInfoApp:
* In the (Cast e co) case, the 'fco' and 'nco' will meet, and
  should cancel.
* When we meet the (\y.e) we want no cast on the y.

-}

--------------
data EtaInfo = EI [Var] MCoercionR
     -- See Note [The EtaInfo mechanism]

instance Outputable EtaInfo where
  ppr :: EtaInfo -> SDoc
ppr (EI [TyVar]
vs MCoercionR
mco) = String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"EI" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> [TyVar] -> SDoc
forall a. Outputable a => a -> SDoc
ppr [TyVar]
vs SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc
parens (MCoercionR -> SDoc
forall a. Outputable a => a -> SDoc
ppr MCoercionR
mco)


etaInfoApp :: InScopeSet -> CoreExpr -> EtaInfo -> CoreExpr
-- (etaInfoApp s e (EI bs mco) returns something equivalent to
--             ((substExpr s e) |> mco b1 .. bn)
-- See Note [The EtaInfo mechanism]
--
-- NB: With very deeply nested casts, this function can be expensive
--     In T18223, this function alone costs 15% of allocation, all
--     spent in the calls to substExprSC and substBindSC

etaInfoApp :: InScopeSet -> CoreExpr -> EtaInfo -> CoreExpr
etaInfoApp InScopeSet
in_scope CoreExpr
expr EtaInfo
eis
  = Subst -> CoreExpr -> EtaInfo -> CoreExpr
go (InScopeSet -> Subst
mkEmptySubst InScopeSet
in_scope) CoreExpr
expr EtaInfo
eis
  where
    go :: Subst -> CoreExpr -> EtaInfo -> CoreExpr
    -- 'go' pushed down the eta-infos into the branch of a case
    -- and the body of a let; and does beta-reduction if possible
    --   go subst fun co [b1,..,bn]  returns  (subst(fun) |> co) b1 .. bn
    go :: Subst -> CoreExpr -> EtaInfo -> CoreExpr
go Subst
subst (Tick CoreTickish
t CoreExpr
e) EtaInfo
eis
      = CoreTickish -> CoreExpr -> CoreExpr
forall b. CoreTickish -> Expr b -> Expr b
Tick (Subst -> CoreTickish -> CoreTickish
substTickish Subst
subst CoreTickish
t) (Subst -> CoreExpr -> EtaInfo -> CoreExpr
go Subst
subst CoreExpr
e EtaInfo
eis)

    go Subst
subst (Cast CoreExpr
e Coercion
co) (EI [TyVar]
bs MCoercionR
mco)
      = Subst -> CoreExpr -> EtaInfo -> CoreExpr
go Subst
subst CoreExpr
e ([TyVar] -> MCoercionR -> EtaInfo
EI [TyVar]
bs MCoercionR
mco')
      where
        mco' :: MCoercionR
mco' = MCoercionR -> MCoercionR
checkReflexiveMCo (HasDebugCallStack => Subst -> Coercion -> Coercion
Subst -> Coercion -> Coercion
Core.substCo Subst
subst Coercion
co Coercion -> MCoercionR -> MCoercionR
`mkTransMCoR` MCoercionR
mco)
               -- See Note [Check for reflexive casts in eta expansion]

    go Subst
subst (Case CoreExpr
e TyVar
b Type
ty [Alt TyVar]
alts) EtaInfo
eis
      = CoreExpr -> TyVar -> Type -> [Alt TyVar] -> CoreExpr
forall b. Expr b -> b -> Type -> [Alt b] -> Expr b
Case (HasDebugCallStack => Subst -> CoreExpr -> CoreExpr
Subst -> CoreExpr -> CoreExpr
Core.substExprSC Subst
subst CoreExpr
e) TyVar
b1 Type
ty' [Alt TyVar]
alts'
      where
        (Subst
subst1, TyVar
b1) = Subst -> TyVar -> (Subst, TyVar)
Core.substBndr Subst
subst TyVar
b
        alts' :: [Alt TyVar]
alts' = (Alt TyVar -> Alt TyVar) -> [Alt TyVar] -> [Alt TyVar]
forall a b. (a -> b) -> [a] -> [b]
map Alt TyVar -> Alt TyVar
subst_alt [Alt TyVar]
alts
        ty' :: Type
ty'   = Type -> EtaInfo -> Type
etaInfoAppTy (Subst -> Type -> Type
substTyUnchecked Subst
subst Type
ty) EtaInfo
eis
        subst_alt :: Alt TyVar -> Alt TyVar
subst_alt (Alt AltCon
con [TyVar]
bs CoreExpr
rhs) = AltCon -> [TyVar] -> CoreExpr -> Alt TyVar
forall b. AltCon -> [b] -> Expr b -> Alt b
Alt AltCon
con [TyVar]
bs' (Subst -> CoreExpr -> EtaInfo -> CoreExpr
go Subst
subst2 CoreExpr
rhs EtaInfo
eis)
                 where
                  (Subst
subst2,[TyVar]
bs') = Subst -> [TyVar] -> (Subst, [TyVar])
forall (f :: * -> *).
Traversable f =>
Subst -> f TyVar -> (Subst, f TyVar)
Core.substBndrs Subst
subst1 [TyVar]
bs

    go Subst
subst (Let Bind TyVar
b CoreExpr
e) EtaInfo
eis
      | Bool -> Bool
not (Bind TyVar -> Bool
isJoinBind Bind TyVar
b) -- See Note [Eta expansion for join points]
      = Bind TyVar -> CoreExpr -> CoreExpr
forall b. Bind b -> Expr b -> Expr b
Let Bind TyVar
b' (Subst -> CoreExpr -> EtaInfo -> CoreExpr
go Subst
subst' CoreExpr
e EtaInfo
eis)
      where
        (Subst
subst', Bind TyVar
b') = HasDebugCallStack => Subst -> Bind TyVar -> (Subst, Bind TyVar)
Subst -> Bind TyVar -> (Subst, Bind TyVar)
Core.substBindSC Subst
subst Bind TyVar
b

    -- Beta-reduction if possible, pushing any intervening casts past
    -- the argument. See Note [The EtaInfo mechanism]
    go Subst
subst (Lam TyVar
v CoreExpr
e) (EI (TyVar
b:[TyVar]
bs) MCoercionR
mco)
      | Just (CoreExpr
arg,MCoercionR
mco') <- MCoercionR -> CoreExpr -> Maybe (CoreExpr, MCoercionR)
pushMCoArg MCoercionR
mco (TyVar -> CoreExpr
forall b. TyVar -> Expr b
varToCoreExpr TyVar
b)
      = Subst -> CoreExpr -> EtaInfo -> CoreExpr
go (Subst -> TyVar -> CoreExpr -> Subst
Core.extendSubst Subst
subst TyVar
v CoreExpr
arg) CoreExpr
e ([TyVar] -> MCoercionR -> EtaInfo
EI [TyVar]
bs MCoercionR
mco')

    -- Stop pushing down; just wrap the expression up
    -- See Note [Check for reflexive casts in eta expansion]
    go Subst
subst CoreExpr
e (EI [TyVar]
bs MCoercionR
mco) = HasDebugCallStack => Subst -> CoreExpr -> CoreExpr
Subst -> CoreExpr -> CoreExpr
Core.substExprSC Subst
subst CoreExpr
e
                             CoreExpr -> MCoercionR -> CoreExpr
`mkCastMCo` MCoercionR -> MCoercionR
checkReflexiveMCo MCoercionR
mco
                             CoreExpr -> [TyVar] -> CoreExpr
forall b. Expr b -> [TyVar] -> Expr b
`mkVarApps` [TyVar]
bs

--------------
etaInfoAppTy :: Type -> EtaInfo -> Type
-- If                    e :: ty
-- then   etaInfoApp e eis :: etaInfoApp ty eis
etaInfoAppTy :: Type -> EtaInfo -> Type
etaInfoAppTy Type
ty (EI [TyVar]
bs MCoercionR
mco)
  = HasDebugCallStack => Type -> [CoreExpr] -> Type
Type -> [CoreExpr] -> Type
applyTypeToArgs Type
ty1 ((TyVar -> CoreExpr) -> [TyVar] -> [CoreExpr]
forall a b. (a -> b) -> [a] -> [b]
map TyVar -> CoreExpr
forall b. TyVar -> Expr b
varToCoreExpr [TyVar]
bs)
  where
    ty1 :: Type
ty1 = case MCoercionR
mco of
             MCoercionR
MRefl  -> Type
ty
             MCo Coercion
co -> HasDebugCallStack => Coercion -> Type
Coercion -> Type
coercionRKind Coercion
co

--------------
etaInfoAbs :: EtaInfo -> CoreExpr -> CoreExpr
-- See Note [The EtaInfo mechanism]
etaInfoAbs :: EtaInfo -> CoreExpr -> CoreExpr
etaInfoAbs (EI [TyVar]
bs MCoercionR
mco) CoreExpr
expr = ([TyVar] -> CoreExpr -> CoreExpr
forall b. [b] -> Expr b -> Expr b
mkLams [TyVar]
bs CoreExpr
expr) CoreExpr -> MCoercionR -> CoreExpr
`mkCastMCo` MCoercionR -> MCoercionR
mkSymMCo MCoercionR
mco

--------------
-- | @mkEtaWW n _ fvs ty@ will compute the 'EtaInfo' necessary for eta-expanding
-- an expression @e :: ty@ to take @n@ value arguments, where @fvs@ are the
-- free variables of @e@.
--
-- Note that this function is entirely unconcerned about cost centres and other
-- semantically-irrelevant source annotations, so call sites must take care to
-- preserve that info. See Note [Eta expansion and SCCs].
mkEtaWW
  :: [OneShotInfo]
  -- ^ How many value arguments to eta-expand
  -> SDoc
  -- ^ The pretty-printed original expression, for warnings.
  -> InScopeSet
  -- ^ A super-set of the free vars of the expression to eta-expand.
  -> Type
  -> (InScopeSet, EtaInfo)
  -- ^ The variables in 'EtaInfo' are fresh wrt. to the incoming 'InScopeSet'.
  -- The outgoing 'InScopeSet' extends the incoming 'InScopeSet' with the
  -- fresh variables in 'EtaInfo'.

mkEtaWW :: [OneShotInfo]
-> SDoc -> InScopeSet -> Type -> (InScopeSet, EtaInfo)
mkEtaWW [OneShotInfo]
orig_oss SDoc
ppr_orig_expr InScopeSet
in_scope Type
orig_ty
  = Int -> [OneShotInfo] -> Subst -> Type -> (InScopeSet, EtaInfo)
go Int
0 [OneShotInfo]
orig_oss Subst
empty_subst Type
orig_ty
  where
    empty_subst :: Subst
empty_subst = InScopeSet -> Subst
mkEmptySubst InScopeSet
in_scope

    go :: Int                -- For fresh names
       -> [OneShotInfo]      -- Number of value args to expand to
       -> Subst -> Type   -- We are really looking at subst(ty)
       -> (InScopeSet, EtaInfo)
    -- (go [o1,..,on] subst ty) = (in_scope, EI [b1,..,bn] co)
    --    co :: subst(ty) ~ b1_ty -> ... -> bn_ty -> tr

    go :: Int -> [OneShotInfo] -> Subst -> Type -> (InScopeSet, EtaInfo)
go Int
_ [] Subst
subst Type
_
       ----------- Done!  No more expansion needed
       = (Subst -> InScopeSet
getSubstInScope Subst
subst, [TyVar] -> MCoercionR -> EtaInfo
EI [] MCoercionR
MRefl)

    go Int
n oss :: [OneShotInfo]
oss@(OneShotInfo
one_shot:[OneShotInfo]
oss1) Subst
subst Type
ty
       ----------- Forall types  (forall a. ty)
       | Just (Bndr TyVar
tcv ForAllTyFlag
vis, Type
ty') <- Type -> Maybe (ForAllTyBinder, Type)
splitForAllForAllTyBinder_maybe Type
ty
       , (Subst
subst', TyVar
tcv') <- HasDebugCallStack => Subst -> TyVar -> (Subst, TyVar)
Subst -> TyVar -> (Subst, TyVar)
Type.substVarBndr Subst
subst TyVar
tcv
       , let oss' :: [OneShotInfo]
oss' | TyVar -> Bool
isTyVar TyVar
tcv = [OneShotInfo]
oss
                  | Bool
otherwise   = [OneShotInfo]
oss1
         -- A forall can bind a CoVar, in which case
         -- we consume one of the [OneShotInfo]
       , (InScopeSet
in_scope, EI [TyVar]
bs MCoercionR
mco) <- Int -> [OneShotInfo] -> Subst -> Type -> (InScopeSet, EtaInfo)
go Int
n [OneShotInfo]
oss' Subst
subst' Type
ty'
       = (InScopeSet
in_scope, [TyVar] -> MCoercionR -> EtaInfo
EI (TyVar
tcv' TyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
: [TyVar]
bs) (ForAllTyBinder -> Type -> MCoercionR -> MCoercionR
mkEtaForAllMCo (TyVar -> ForAllTyFlag -> ForAllTyBinder
forall var argf. var -> argf -> VarBndr var argf
Bndr TyVar
tcv' ForAllTyFlag
vis) Type
ty' MCoercionR
mco))

       ----------- Function types  (t1 -> t2)
       | Just (FunTyFlag
_af, Type
mult, Type
arg_ty, Type
res_ty) <- Type -> Maybe (FunTyFlag, Type, Type, Type)
splitFunTy_maybe Type
ty
       , HasDebugCallStack => Type -> Bool
Type -> Bool
typeHasFixedRuntimeRep Type
arg_ty
          -- See Note [Representation polymorphism invariants] in GHC.Core
          -- See also test case typecheck/should_run/EtaExpandLevPoly

       , (Subst
subst', TyVar
eta_id) <- Int -> Subst -> Scaled Type -> (Subst, TyVar)
freshEtaId Int
n Subst
subst (Type -> Type -> Scaled Type
forall a. Type -> a -> Scaled a
Scaled Type
mult Type
arg_ty)
          -- Avoid free vars of the original expression

       , let eta_id' :: TyVar
eta_id' = TyVar
eta_id TyVar -> OneShotInfo -> TyVar
`setIdOneShotInfo` OneShotInfo
one_shot
       , (InScopeSet
in_scope, EI [TyVar]
bs MCoercionR
mco) <- Int -> [OneShotInfo] -> Subst -> Type -> (InScopeSet, EtaInfo)
go (Int
nInt -> Int -> Int
forall a. Num a => a -> a -> a
+Int
1) [OneShotInfo]
oss1 Subst
subst' Type
res_ty
       = (InScopeSet
in_scope, [TyVar] -> MCoercionR -> EtaInfo
EI (TyVar
eta_id' TyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
: [TyVar]
bs) (TyVar -> MCoercionR -> MCoercionR
mkFunResMCo TyVar
eta_id' MCoercionR
mco))

       ----------- Newtypes
       -- Given this:
       --      newtype T = MkT ([T] -> Int)
       -- Consider eta-expanding this
       --      eta_expand 1 e T
       -- We want to get
       --      coerce T (\x::[T] -> (coerce ([T]->Int) e) x)
       | Just (Coercion
co, Type
ty') <- Type -> Maybe (Coercion, Type)
topNormaliseNewType_maybe Type
ty
       , -- co :: ty ~ ty'
         let co' :: Coercion
co' = HasDebugCallStack => Subst -> Coercion -> Coercion
Subst -> Coercion -> Coercion
Type.substCo Subst
subst Coercion
co
             -- Remember to apply the substitution to co (#16979)
             -- (or we could have applied to ty, but then
             --  we'd have had to zap it for the recursive call)
       , (InScopeSet
in_scope, EI [TyVar]
bs MCoercionR
mco) <- Int -> [OneShotInfo] -> Subst -> Type -> (InScopeSet, EtaInfo)
go Int
n [OneShotInfo]
oss Subst
subst Type
ty'
         -- mco :: subst(ty') ~ b1_ty -> ... -> bn_ty -> tr
       = (InScopeSet
in_scope, [TyVar] -> MCoercionR -> EtaInfo
EI [TyVar]
bs (Coercion -> MCoercionR -> MCoercionR
mkTransMCoR Coercion
co' MCoercionR
mco))

       | Bool
otherwise       -- We have an expression of arity > 0,
                         -- but its type isn't a function, or a binder
                         -- does not have a fixed runtime representation
       = Bool
-> String -> SDoc -> (InScopeSet, EtaInfo) -> (InScopeSet, EtaInfo)
forall a. HasCallStack => Bool -> String -> SDoc -> a -> a
warnPprTrace Bool
True String
"mkEtaWW" (([OneShotInfo] -> SDoc
forall a. Outputable a => a -> SDoc
ppr [OneShotInfo]
orig_oss SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr Type
orig_ty) SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$ SDoc
ppr_orig_expr)
         (Subst -> InScopeSet
getSubstInScope Subst
subst, [TyVar] -> MCoercionR -> EtaInfo
EI [] MCoercionR
MRefl)
        -- This *can* legitimately happen:
        -- e.g.  coerce Int (\x. x) Essentially the programmer is
        -- playing fast and loose with types (Happy does this a lot).
        -- So we simply decline to eta-expand.  Otherwise we'd end up
        -- with an explicit lambda having a non-function type

mkEtaForAllMCo :: ForAllTyBinder -> Type -> MCoercion -> MCoercion
mkEtaForAllMCo :: ForAllTyBinder -> Type -> MCoercionR -> MCoercionR
mkEtaForAllMCo (Bndr TyVar
tcv ForAllTyFlag
vis) Type
ty MCoercionR
mco
  = case MCoercionR
mco of
      MCoercionR
MRefl | ForAllTyFlag
vis ForAllTyFlag -> ForAllTyFlag -> Bool
forall a. Eq a => a -> a -> Bool
== ForAllTyFlag
coreTyLamForAllTyFlag -> MCoercionR
MRefl
            | Bool
otherwise                    -> Coercion -> MCoercionR
mk_fco (Type -> Coercion
mkRepReflCo Type
ty)
      MCo Coercion
co                               -> Coercion -> MCoercionR
mk_fco Coercion
co
  where
    mk_fco :: Coercion -> MCoercionR
mk_fco Coercion
co = Coercion -> MCoercionR
MCo (HasDebugCallStack =>
TyVar
-> ForAllTyFlag -> ForAllTyFlag -> Coercion -> Coercion -> Coercion
TyVar
-> ForAllTyFlag -> ForAllTyFlag -> Coercion -> Coercion -> Coercion
mkForAllCo TyVar
tcv ForAllTyFlag
vis ForAllTyFlag
coreTyLamForAllTyFlag
                                (Type -> Coercion
mkNomReflCo (TyVar -> Type
varType TyVar
tcv)) Coercion
co)
    -- coreTyLamForAllTyFlag: See Note [The EtaInfo mechanism], particularly
    -- the (EtaInfo Invariant).  (sym co) wraps a lambda that always has
    -- a ForAllTyFlag of coreTyLamForAllTyFlag; see Note [Required foralls in Core]
    -- in GHC.Core.TyCo.Rep

{-
************************************************************************
*                                                                      *
                Eta reduction
*                                                                      *
************************************************************************

Note [Eta reduction makes sense]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC's eta reduction transforms
   \x y. <fun> x y  --->  <fun>
We discuss when this is /sound/ in Note [Eta reduction soundness].
But even assuming it is sound, when is it /desirable/.  That
is what we discuss here.

This test is made by `ok_fun` in tryEtaReduce.

1. We want to eta-reduce only if we get all the way to a trivial
   expression; we don't want to remove extra lambdas unless we are
   going to avoid allocating this thing altogether.

   Trivial means *including* casts and type lambdas:
     * `\x. f x |> co  -->  f |> (ty(x) -> co)` (provided `co` doesn't mention `x`)
     * `/\a. \x. f @(Maybe a) x -->  /\a. f @(Maybe a)`
   See Note [Do not eta reduce PAPs] for why we insist on a trivial head.

Of course, eta reduction is not always sound. See Note [Eta reduction soundness]
for when it is.

When there are multiple arguments, we might get multiple eta-redexes. Example:
   \x y. e x y
   ==> { reduce \y. (e x) y in context \x._ }
   \x. e x
   ==> { reduce \x. e x in context _ }
   e
And (1) implies that we never want to stop with `\x. e x`, because that is not a
trivial expression. So in practice, the implementation works by considering a
whole group of leading lambdas to reduce.

These delicacies are why we don't simply use 'exprIsTrivial' and 'exprIsHNF'
in 'tryEtaReduce'. Alas.

Note [Eta reduction soundness]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC's eta reduction transforms
   \x y. <fun> x y  --->  <fun>
For soundness, we obviously require that `x` and `y`
to not occur free. But what /other/ restrictions are there for
eta reduction to be sound?

We discuss separately what it means for eta reduction to be
/desirable/, in Note [Eta reduction makes sense].

Eta reduction is *not* a sound transformation in general, because it
may change termination behavior if *value* lambdas are involved:
  `bot`  /=  `\x. bot x`   (as can be observed by a simple `seq`)
The past has shown that oversight of this fact can not only lead to endless
loops or exceptions, but also straight out *segfaults*.

Nevertheless, we can give the following criteria for when it is sound to
perform eta reduction on an expression with n leading lambdas `\xs. e xs`
(checked in 'is_eta_reduction_sound' in 'tryEtaReduce', which focuses on the
case where `e` is trivial):

(A) It is sound to eta-reduce n arguments as long as n does not exceed the
    `exprArity` of `e`. (Needs Arity analysis.)
    This criterion exploits information about how `e` is *defined*.

    Example: If `e = \x. bot` then we know it won't diverge until it is called
    with one argument. Hence it is safe to eta-reduce `\x. e x` to `e`.
    By contrast, it would be *unsound* to eta-reduce 2 args, `\x y. e x y` to `e`:
    `e 42` diverges when `(\x y. e x y) 42` does not.

(S) It is sound to eta-reduce n arguments in an evaluation context in which all
    calls happen with at least n arguments. (Needs Strictness analysis.)
    NB: This treats evaluations like a call with 0 args.
    NB: This criterion exploits information about how `e` is *used*.

    Example: Given a function `g` like
      `g c = Just (c 1 2 + c 2 3)`
    it is safe to eta-reduce the arg in `g (\x y. e x y)` to `g e` without
    knowing *anything* about `e` (perhaps it's a parameter occ itself), simply
    because `g` always calls its parameter with 2 arguments.
    It is also safe to eta-reduce just one arg, e.g., `g (\x. e x)` to `g e`.
    By contrast, it would *unsound* to eta-reduce 3 args in a call site
    like `g (\x y z. e x y z)` to `g e`, because that diverges when
    `e = \x y. bot`.

    Could we relax to "*At least one call in the same trace* is with n args"?
    No. Consider what happens for
      ``g2 c = c True `seq` c False 42``
    Here, `g2` will call `c` with 2 arguments (if there is a call at all).
    But it is unsound to eta-reduce the arg in `g2 (\x y. e x y)` to `g2 e`
    when `e = \x. if x then bot else id`, because the latter will diverge when
    the former would not. Fortunately, the strictness analyser will report
    "Not always called with two arguments" for `g2` and we won't eta-expand.

    See Note [Eta reduction based on evaluation context] for the implementation
    details. This criterion is tested extensively in T21261.

(R) Note [Eta reduction in recursive RHSs] tells us that we should not
    eta-reduce `f` in its own RHS and describes our fix.
    There we have `f = \x. f x` and we should not eta-reduce to `f=f`. Which
    might change a terminating program (think @f `seq` e@) to a non-terminating
    one.

(E) (See fun_arity in tryEtaReduce.) As a perhaps special case on the
    boundary of (A) and (S), when we know that a fun binder `f` is in
    WHNF, we simply assume it has arity 1 and apply (A).  Example:
       g f = f `seq` \x. f x
    Here it's sound eta-reduce `\x. f x` to `f`, because `f` can't be bottom
    after the `seq`. This turned up in #7542.

 T. If the binders are all type arguments, it's always safe to eta-reduce,
    regardless of the arity of f.
       /\a b. f @a @b  --> f

2. Type and dictionary abstraction. Regardless of whether 'f' is a value, it
   is always sound to reduce /type lambdas/, thus:
        (/\a -> f a)  -->   f
   Moreover, we always want to, because it makes RULEs apply more often:
      This RULE:    `forall g. foldr (build (/\a -> g a))`
      should match  `foldr (build (/\b -> ...something complex...))`
   and the simplest way to do so is eta-reduce `/\a -> g a` in the RULE to `g`.

   More debatably, we extend this to dictionary arguments too, because the type
   checker can insert these eta-expanded versions, with both type and dictionary
   lambdas; hence the slightly ad-hoc (all ok_lam bndrs).  That is, we eta-reduce
        \(d::Num a). f d   -->   f
   regardless of f's arity. Its not clear whether or not this is important, and
   it is not in general sound.  But that's the way it is right now.

And here are a few more technical criteria for when it is *not* sound to
eta-reduce that are specific to Core and GHC:

(J) We may not undersaturate join points.
    See Note [Invariants on join points] in GHC.Core, and #20599.

(B) We may not undersaturate functions with no binding.
    See Note [Eta expanding primops].

(W) We may not undersaturate StrictWorkerIds.
    See Note [CBV Function Ids] in GHC.Types.Id.Info.

Here is a list of historic accidents surrounding unsound eta-reduction:

* Consider
        f = \x.f x
        h y = case (case y of { True -> f `seq` True; False -> False }) of
                True -> ...; False -> ...
  If we (unsoundly) eta-reduce f to get f=f, the strictness analyser
  says f=bottom, and replaces the (f `seq` True) with just
  (f `cast` unsafe-co).
  [SG in 2022: I don't think worker/wrapper would do this today.]
  BUT, as things stand, 'f' got arity 1, and it *keeps* arity 1 (perhaps also
  wrongly). So CorePrep eta-expands the definition again, so that it does not
  terminate after all.
  Result: seg-fault because the boolean case actually gets a function value.
  See #1947.

* Never *reduce* arity. For example
      f = \xy. g x y
  Then if h has arity 1 we don't want to eta-reduce because then
  f's arity would decrease, and that is bad
  [SG in 2022: I don't understand this point. There is no `h`, perhaps that
   should have been `g`. Even then, this proposed eta-reduction is invalid by
   criterion (A), which might actually be the point this anecdote is trying to
   make. Perhaps the "no arity decrease" idea is also related to
   Note [Arity robustness]?]

Note [Do not eta reduce PAPs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
I considered eta-reducing if the result is a PAP:
   \x. f e1 e2 x  ==>   f e1 e2

This reduces clutter, sometimes a lot. See Note [Do not eta-expand PAPs]
in GHC.Core.Opt.Simplify.Utils, where we are careful not to eta-expand
a PAP.  If eta-expanding is bad, then eta-reducing is good!

Also the code generator likes eta-reduced PAPs; see GHC.CoreToStg.Prep
Note [No eta reduction needed in rhsToBody].

But note that we don't want to eta-reduce
     \x y.  f <expensive> x y
to
     f <expensive>
The former has arity 2, and repeats <expensive> for every call of the
function; the latter has arity 0, and shares <expensive>.  We don't want
to change behaviour.  Hence the call to exprIsCheap in ok_fun.

I noticed this when examining #18993 and, although it is delicate,
eta-reducing to a PAP happens to fix the regression in #18993.

HOWEVER, if we transform
   \x. f y x   ==>   f y
that might mean that f isn't saturated any more, and does not inline.
This led to some other regressions.

TL;DR currently we do /not/ eta reduce if the result is a PAP.

Note [Eta reduction with casted arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
    (\(x:t3). f (x |> g)) :: t3 -> t2
  where
    f :: t1 -> t2
    g :: t3 ~ t1
This should be eta-reduced to

    f |> (sym g -> t2)

So we need to accumulate a coercion, pushing it inward (past
variable arguments only) thus:
   f (x |> co_arg) |> co  -->  (f |> (sym co_arg -> co)) x
   f (x:t)         |> co  -->  (f |> (t -> co)) x
   f @ a           |> co  -->  (f |> (forall a.co)) @ a
   f @ (g:t1~t2)   |> co  -->  (f |> (t1~t2 => co)) @ (g:t1~t2)
These are the equations for ok_arg.

Note [Eta reduction with casted function]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Since we are pushing a coercion inwards, it is easy to accommodate
    (\xy. (f x |> g) y)
    (\xy. (f x y) |> g)

See the `(Cast e co)` equation for `go` in `tryEtaReduce`.  The
eta-expander pushes those casts outwards, so you might think we won't
ever see a cast here, but if we have
  \xy. (f x y |> g)
we will call tryEtaReduce [x,y] (f x y |> g), and we'd like that to
work.  This happens in GHC.Core.Opt.Simplify.Utils.mkLam, where
eta-expansion may be turned off (by sm_eta_expand).

Note [Eta reduction based on evaluation context]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note [Eta reduction soundness], criterion (S) allows us to eta-reduce
`g (\x y. e x y)` to `g e` when we know that `g` always calls its parameter with
at least 2 arguments. So how do we read that off `g`'s demand signature?

Let's take the simple example of #21261, where `g` (actually, `f`) is defined as
  g c = c 1 2 + c 3 4
Then this is how the pieces are put together:

  * Demand analysis infers `<SC(S,C(1,L))>` for `g`'s demand signature

  * When the Simplifier next simplifies the argument in `g (\x y. e x y)`, it
    looks up the *evaluation context* of the argument in the form of the
    sub-demand `C(S,C(1,L))` and stores it in the 'SimplCont'.
    (Why does it drop the outer evaluation cardinality of the demand, `S`?
    Because it's irrelevant! When we simplify an expression, we do so under the
    assumption that it is currently under evaluation.)
    This sub-demand literally says "Whenever this expression is evaluated, it
    is called with at least two arguments, potentially multiple times".

  * Then the simplifier takes apart the lambda and simplifies the lambda group
    and then calls 'tryEtaReduce' when rebuilding the lambda, passing the
    evaluation context `C(S,C(1,L))` along. Then we simply peel off 2 call
    sub-demands `Cn` and see whether all of the n's (here: `S=C_1N` and
    `1=C_11`) were strict. And strict they are! Thus, it will eta-reduce
    `\x y. e x y` to `e`.

Note [Eta reduction in recursive RHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following recursive function:
  f = \x. ....g (\y. f y)....
The recursive call of f in its own RHS seems like a fine opportunity for
eta-reduction because f has arity 1. And often it is!

Alas, that is unsound in general if the eta-reduction happens in a tail context.
Making the arity visible in the RHS allows us to eta-reduce
  f = \x -> f x
to
  f = f
which means we optimise terminating programs like (f `seq` ()) into
non-terminating ones. Nor is this problem just for tail calls.  Consider
  f = id (\x -> f x)
where we have (for some reason) not yet inlined `id`.  We must not eta-reduce to
  f = id f
because that will then simplify to `f = f` as before.

An immediate idea might be to look at whether the called function is a local
loopbreaker and refrain from eta-expanding. But that doesn't work for mutually
recursive function like in #21652:
  f = g
  g* x = f x
Here, g* is the loopbreaker but f isn't.

What can we do?

Fix 1: Zap `idArity` when analysing recursive RHSs and re-attach the info when
    entering the let body.
    Has the disadvantage that other transformations which make use of arity
    (such as dropping of `seq`s when arity > 0) will no longer work in the RHS.
    Plus it requires non-trivial refactorings to both the simple optimiser (in
    the way `subst_opt_bndr` is used) as well as the Simplifier (in the way
    `simplRecBndrs` and `simplRecJoinBndrs` is used), modifying the SimplEnv's
    substitution twice in the process. A very complicated stop-gap.

Fix 2: Pass the set of enclosing recursive binders to `tryEtaReduce`; these are
    the ones we should not eta-reduce. All call-site must maintain this set.
    Example:
      rec { f1 = ....rec { g = ... (\x. g x)...(\y. f2 y)... }...
          ; f2 = ...f1... }
    when eta-reducing those inner lambdas, we need to know that we are in the
    rec group for {f1, f2, g}.
    This is very much like the solution in Note [Speculative evaluation] in
    GHC.CoreToStg.Prep.
    It is a bit tiresome to maintain this info, because it means another field
    in SimplEnv and SimpleOptEnv.

We implement Fix (2) because of it isn't as complicated to maintain as (1).
Plus, it is the correct fix to begin with. After all, the arity is correct,
but doing the transformation isn't. The moving parts are:
  * A field `scRecIds` in `SimplEnv` tracks the enclosing recursive binders
  * We extend the `scRecIds` set in `GHC.Core.Opt.Simplify.simplRecBind`
  * We consult the set in `is_eta_reduction_sound` in `tryEtaReduce`
The situation is very similar to Note [Speculative evaluation] which has the
same fix.
-}

-- | `tryEtaReduce [x,y,z] e sd` returns `Just e'` if `\x y z -> e` is evaluated
-- according to `sd` and can soundly and gainfully be eta-reduced to `e'`.
-- See Note [Eta reduction soundness]
-- and Note [Eta reduction makes sense] when that is the case.
tryEtaReduce :: UnVarSet -> [Var] -> CoreExpr -> SubDemand -> Maybe CoreExpr
-- Return an expression equal to (\bndrs. body)
tryEtaReduce :: UnVarSet -> [TyVar] -> CoreExpr -> SubDemand -> Maybe CoreExpr
tryEtaReduce UnVarSet
rec_ids [TyVar]
bndrs CoreExpr
body SubDemand
eval_sd
  = [TyVar] -> CoreExpr -> Coercion -> Maybe CoreExpr
go ([TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
bndrs) CoreExpr
body (Type -> Coercion
mkRepReflCo (HasDebugCallStack => CoreExpr -> Type
CoreExpr -> Type
exprType CoreExpr
body))
  where
    incoming_arity :: Int
incoming_arity = (TyVar -> Bool) -> [TyVar] -> Int
forall a. (a -> Bool) -> [a] -> Int
count TyVar -> Bool
isId [TyVar]
bndrs -- See Note [Eta reduction makes sense], point (2)

    go :: [Var]            -- Binders, innermost first, types [a3,a2,a1]
       -> CoreExpr         -- Of type tr
       -> Coercion         -- Of type tr ~ ts
       -> Maybe CoreExpr   -- Of type a1 -> a2 -> a3 -> ts
    -- See Note [Eta reduction with casted arguments]
    -- for why we have an accumulating coercion
    --
    -- Invariant: (go bs body co) returns an expression
    --            equivalent to (\(reverse bs). (body |> co))

    -- See Note [Eta reduction with casted function]
    go :: [TyVar] -> CoreExpr -> Coercion -> Maybe CoreExpr
go [TyVar]
bs (Cast CoreExpr
e Coercion
co1) Coercion
co2
      = [TyVar] -> CoreExpr -> Coercion -> Maybe CoreExpr
go [TyVar]
bs CoreExpr
e (Coercion
co1 HasDebugCallStack => Coercion -> Coercion -> Coercion
Coercion -> Coercion -> Coercion
`mkTransCo` Coercion
co2)

    go [TyVar]
bs (Tick CoreTickish
t CoreExpr
e) Coercion
co
      | CoreTickish -> Bool
forall (pass :: TickishPass). GenTickish pass -> Bool
tickishFloatable CoreTickish
t
      = (CoreExpr -> CoreExpr) -> Maybe CoreExpr -> Maybe CoreExpr
forall a b. (a -> b) -> Maybe a -> Maybe b
forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
fmap (CoreTickish -> CoreExpr -> CoreExpr
forall b. CoreTickish -> Expr b -> Expr b
Tick CoreTickish
t) (Maybe CoreExpr -> Maybe CoreExpr)
-> Maybe CoreExpr -> Maybe CoreExpr
forall a b. (a -> b) -> a -> b
$ [TyVar] -> CoreExpr -> Coercion -> Maybe CoreExpr
go [TyVar]
bs CoreExpr
e Coercion
co
      -- Float app ticks: \x -> Tick t (e x) ==> Tick t e

    go (TyVar
b : [TyVar]
bs) (App CoreExpr
fun CoreExpr
arg) Coercion
co
      | Just (Coercion
co', [CoreTickish]
ticks) <- TyVar
-> CoreExpr -> Coercion -> Type -> Maybe (Coercion, [CoreTickish])
ok_arg TyVar
b CoreExpr
arg Coercion
co (HasDebugCallStack => CoreExpr -> Type
CoreExpr -> Type
exprType CoreExpr
fun)
      = (CoreExpr -> CoreExpr) -> Maybe CoreExpr -> Maybe CoreExpr
forall a b. (a -> b) -> Maybe a -> Maybe b
forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
fmap ((CoreExpr -> [CoreTickish] -> CoreExpr)
-> [CoreTickish] -> CoreExpr -> CoreExpr
forall a b c. (a -> b -> c) -> b -> a -> c
flip ((CoreTickish -> CoreExpr -> CoreExpr)
-> CoreExpr -> [CoreTickish] -> CoreExpr
forall a b. (a -> b -> b) -> b -> [a] -> b
forall (t :: * -> *) a b.
Foldable t =>
(a -> b -> b) -> b -> t a -> b
foldr CoreTickish -> CoreExpr -> CoreExpr
mkTick) [CoreTickish]
ticks) (Maybe CoreExpr -> Maybe CoreExpr)
-> Maybe CoreExpr -> Maybe CoreExpr
forall a b. (a -> b) -> a -> b
$ [TyVar] -> CoreExpr -> Coercion -> Maybe CoreExpr
go [TyVar]
bs CoreExpr
fun Coercion
co'
            -- Float arg ticks: \x -> e (Tick t x) ==> Tick t e

    go [TyVar]
remaining_bndrs CoreExpr
fun Coercion
co
      | (TyVar -> Bool) -> [TyVar] -> Bool
forall (t :: * -> *) a. Foldable t => (a -> Bool) -> t a -> Bool
all TyVar -> Bool
isTyVar [TyVar]
remaining_bndrs
            -- If all the remaining_bnrs are tyvars, then the etad_exp
            --    will be trivial, which is what we want.
            -- e.g. We might have  /\a \b. f [a] b, and we want to
            --      eta-reduce to  /\a. f [a]
            -- We don't want to give up on this one: see #20040
            -- See Note [Eta reduction makes sense], point (1)
      , [TyVar]
remaining_bndrs [TyVar] -> [TyVar] -> Bool
forall a b. [a] -> [b] -> Bool
`ltLength` [TyVar]
bndrs
            -- Only reply Just if /something/ has happened
      , CoreExpr -> Bool
ok_fun CoreExpr
fun
      , let used_vars :: VarSet
used_vars     = CoreExpr -> VarSet
exprFreeVars CoreExpr
fun VarSet -> VarSet -> VarSet
`unionVarSet` Coercion -> VarSet
tyCoVarsOfCo Coercion
co
            reduced_bndrs :: VarSet
reduced_bndrs = [TyVar] -> VarSet
mkVarSet ([TyVar] -> [TyVar] -> [TyVar]
forall b a. [b] -> [a] -> [a]
dropList [TyVar]
remaining_bndrs [TyVar]
bndrs)
            -- reduced_bndrs are the ones we are eta-reducing away
      , VarSet
used_vars VarSet -> VarSet -> Bool
`disjointVarSet` VarSet
reduced_bndrs
          -- Check for any of the reduced_bndrs (about to be dropped)
          -- free in the result, including the accumulated coercion.
          -- See Note [Eta reduction makes sense], intro and point (1)
          -- NB: don't compute used_vars from exprFreeVars (mkCast fun co)
          --     because the latter may be ill formed if the guard fails (#21801)
      = CoreExpr -> Maybe CoreExpr
forall a. a -> Maybe a
Just ([TyVar] -> CoreExpr -> CoreExpr
forall b. [b] -> Expr b -> Expr b
mkLams ([TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
remaining_bndrs) (HasDebugCallStack => CoreExpr -> Coercion -> CoreExpr
CoreExpr -> Coercion -> CoreExpr
mkCast CoreExpr
fun Coercion
co))

    go [TyVar]
_remaining_bndrs CoreExpr
_fun  Coercion
_  = -- pprTrace "tER fail" (ppr _fun $$ ppr _remaining_bndrs) $
                                   Maybe CoreExpr
forall a. Maybe a
Nothing

    ---------------
    -- See Note [Eta reduction makes sense], point (1)
    ok_fun :: CoreExpr -> Bool
ok_fun (App CoreExpr
fun (Type {})) = CoreExpr -> Bool
ok_fun CoreExpr
fun
    ok_fun (Cast CoreExpr
fun Coercion
_)        = CoreExpr -> Bool
ok_fun CoreExpr
fun
    ok_fun (Tick CoreTickish
_ CoreExpr
expr)       = CoreExpr -> Bool
ok_fun CoreExpr
expr
    ok_fun (Var TyVar
fun_id)        = TyVar -> Bool
is_eta_reduction_sound TyVar
fun_id
    ok_fun CoreExpr
_fun                = Bool
False

    ---------------
    -- See Note [Eta reduction soundness], this is THE place to check soundness!
    is_eta_reduction_sound :: TyVar -> Bool
is_eta_reduction_sound TyVar
fun
      | TyVar
fun TyVar -> UnVarSet -> Bool
`elemUnVarSet` UnVarSet
rec_ids          -- Criterion (R)
      = Bool
False -- Don't eta-reduce in fun in its own recursive RHSs

      | TyVar -> Bool
cantEtaReduceFun TyVar
fun                -- Criteria (J), (W), (B)
      = Bool
False -- Function can't be eta reduced to arity 0
              -- without violating invariants of Core and GHC

      | Bool
otherwise
      = -- Check that eta-reduction won't make the program stricter...
        TyVar -> Int
fun_arity TyVar
fun Int -> Int -> Bool
forall a. Ord a => a -> a -> Bool
>= Int
incoming_arity          -- Criterion (A) and (E)
        Bool -> Bool -> Bool
|| Int -> Bool
all_calls_with_arity Int
incoming_arity   -- Criterion (S)
        Bool -> Bool -> Bool
|| (TyVar -> Bool) -> [TyVar] -> Bool
forall (t :: * -> *) a. Foldable t => (a -> Bool) -> t a -> Bool
all TyVar -> Bool
ok_lam [TyVar]
bndrs                      -- Criterion (T)

    all_calls_with_arity :: Int -> Bool
all_calls_with_arity Int
n = Card -> Bool
isStrict ((Card, SubDemand) -> Card
forall a b. (a, b) -> a
fst ((Card, SubDemand) -> Card) -> (Card, SubDemand) -> Card
forall a b. (a -> b) -> a -> b
$ Int -> SubDemand -> (Card, SubDemand)
peelManyCalls Int
n SubDemand
eval_sd)
       -- See Note [Eta reduction based on evaluation context]

    ---------------
    fun_arity :: TyVar -> Int
fun_arity TyVar
fun
       | Int
arity Int -> Int -> Bool
forall a. Ord a => a -> a -> Bool
> Int
0                           = Int
arity
       | Unfolding -> Bool
isEvaldUnfolding (IdUnfoldingFun
idUnfolding TyVar
fun)  = Int
1
           -- See Note [Eta reduction soundness], criterion (E)
       | Bool
otherwise                           = Int
0
       where
         arity :: Int
arity = TyVar -> Int
idArity TyVar
fun

    ---------------
    ok_lam :: TyVar -> Bool
ok_lam TyVar
v = TyVar -> Bool
isTyVar TyVar
v Bool -> Bool -> Bool
|| TyVar -> Bool
isEvVar TyVar
v
    -- See Note [Eta reduction makes sense], point (2)

    ---------------
    ok_arg :: Var              -- Of type bndr_t
           -> CoreExpr         -- Of type arg_t
           -> Coercion         -- Of kind (t1~t2)
           -> Type             -- Type (arg_t -> t1) of the function
                               --      to which the argument is supplied
           -> Maybe (Coercion  -- Of type (arg_t -> t1 ~  bndr_t -> t2)
                               --   (and similarly for tyvars, coercion args)
                    , [CoreTickish])
    -- See Note [Eta reduction with casted arguments]
    ok_arg :: TyVar
-> CoreExpr -> Coercion -> Type -> Maybe (Coercion, [CoreTickish])
ok_arg TyVar
bndr (Type Type
arg_ty) Coercion
co Type
fun_ty
       | Just TyVar
tv <- Type -> Maybe TyVar
getTyVar_maybe Type
arg_ty
       , TyVar
bndr TyVar -> TyVar -> Bool
forall a. Eq a => a -> a -> Bool
== TyVar
tv  = case Type -> Maybe (ForAllTyBinder, Type)
splitForAllForAllTyBinder_maybe Type
fun_ty of
           Just (Bndr TyVar
_ ForAllTyFlag
vis, Type
_) -> (Coercion, [CoreTickish]) -> Maybe (Coercion, [CoreTickish])
forall a. a -> Maybe a
Just (Coercion
fco, [])
             where !fco :: Coercion
fco = HasDebugCallStack =>
TyVar
-> ForAllTyFlag -> ForAllTyFlag -> Coercion -> Coercion -> Coercion
TyVar
-> ForAllTyFlag -> ForAllTyFlag -> Coercion -> Coercion -> Coercion
mkForAllCo TyVar
tv ForAllTyFlag
vis ForAllTyFlag
coreTyLamForAllTyFlag Coercion
kco Coercion
co
                   -- The lambda we are eta-reducing always has visibility
                   -- 'coreTyLamForAllTyFlag' which may or may not match
                   -- the visibility on the inner function (#24014)
                   kco :: Coercion
kco = Type -> Coercion
mkNomReflCo (TyVar -> Type
tyVarKind TyVar
tv)
           Maybe (ForAllTyBinder, Type)
Nothing -> String -> SDoc -> Maybe (Coercion, [CoreTickish])
forall a. HasCallStack => String -> SDoc -> a
pprPanic String
"tryEtaReduce: type arg to non-forall type"
                               (String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"fun:" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> TyVar -> SDoc
forall a. Outputable a => a -> SDoc
ppr TyVar
bndr
                                SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$ String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"arg:" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr Type
arg_ty
                                SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$ String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"fun_ty:" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr Type
fun_ty)
    ok_arg TyVar
bndr (Var TyVar
v) Coercion
co Type
fun_ty
       | TyVar
bndr TyVar -> TyVar -> Bool
forall a. Eq a => a -> a -> Bool
== TyVar
v
       , let mult :: Type
mult = HasDebugCallStack => TyVar -> Type
TyVar -> Type
idMult TyVar
bndr
       , Just (FunTyFlag
_af, Type
fun_mult, Type
_, Type
_) <- Type -> Maybe (FunTyFlag, Type, Type, Type)
splitFunTy_maybe Type
fun_ty
       , Type
mult HasCallStack => Type -> Type -> Bool
Type -> Type -> Bool
`eqType` Type
fun_mult -- There is no change in multiplicity, otherwise we must abort
       = (Coercion, [CoreTickish]) -> Maybe (Coercion, [CoreTickish])
forall a. a -> Maybe a
Just (Role -> TyVar -> Coercion -> Coercion
mkFunResCo Role
Representational TyVar
bndr Coercion
co, [])
    ok_arg TyVar
bndr (Cast CoreExpr
e Coercion
co_arg) Coercion
co Type
fun_ty
       | ([CoreTickish]
ticks, Var TyVar
v) <- (CoreTickish -> Bool) -> CoreExpr -> ([CoreTickish], CoreExpr)
forall b.
(CoreTickish -> Bool) -> Expr b -> ([CoreTickish], Expr b)
stripTicksTop CoreTickish -> Bool
forall (pass :: TickishPass). GenTickish pass -> Bool
tickishFloatable CoreExpr
e
       , Just (FunTyFlag
_, Type
fun_mult, Type
_, Type
_) <- Type -> Maybe (FunTyFlag, Type, Type, Type)
splitFunTy_maybe Type
fun_ty
       , TyVar
bndr TyVar -> TyVar -> Bool
forall a. Eq a => a -> a -> Bool
== TyVar
v
       , Type
fun_mult HasCallStack => Type -> Type -> Bool
Type -> Type -> Bool
`eqType` HasDebugCallStack => TyVar -> Type
TyVar -> Type
idMult TyVar
bndr
       = (Coercion, [CoreTickish]) -> Maybe (Coercion, [CoreTickish])
forall a. a -> Maybe a
Just (HasDebugCallStack =>
Role -> Coercion -> Coercion -> Coercion -> Coercion
Role -> Coercion -> Coercion -> Coercion -> Coercion
mkFunCoNoFTF Role
Representational (Type -> Coercion
multToCo Type
fun_mult) (Coercion -> Coercion
mkSymCo Coercion
co_arg) Coercion
co, [CoreTickish]
ticks)
       -- The simplifier combines multiple casts into one,
       -- so we can have a simple-minded pattern match here
    ok_arg TyVar
bndr (Tick CoreTickish
t CoreExpr
arg) Coercion
co Type
fun_ty
       | CoreTickish -> Bool
forall (pass :: TickishPass). GenTickish pass -> Bool
tickishFloatable CoreTickish
t, Just (Coercion
co', [CoreTickish]
ticks) <- TyVar
-> CoreExpr -> Coercion -> Type -> Maybe (Coercion, [CoreTickish])
ok_arg TyVar
bndr CoreExpr
arg Coercion
co Type
fun_ty
       = (Coercion, [CoreTickish]) -> Maybe (Coercion, [CoreTickish])
forall a. a -> Maybe a
Just (Coercion
co', CoreTickish
tCoreTickish -> [CoreTickish] -> [CoreTickish]
forall a. a -> [a] -> [a]
:[CoreTickish]
ticks)

    ok_arg TyVar
_ CoreExpr
_ Coercion
_ Type
_ = Maybe (Coercion, [CoreTickish])
forall a. Maybe a
Nothing

-- | Can we eta-reduce the given function
-- See Note [Eta reduction soundness], criteria (B), (J), and (W).
cantEtaReduceFun :: Id -> Bool
cantEtaReduceFun :: TyVar -> Bool
cantEtaReduceFun TyVar
fun
  =    TyVar -> Bool
hasNoBinding TyVar
fun -- (B)
       -- Don't undersaturate functions with no binding.

    Bool -> Bool -> Bool
||  TyVar -> Bool
isJoinId TyVar
fun    -- (J)
       -- Don't undersaturate join points.
       -- See Note [Invariants on join points] in GHC.Core, and #20599

    Bool -> Bool -> Bool
|| (Maybe [CbvMark] -> Bool
forall a. Maybe a -> Bool
isJust (TyVar -> Maybe [CbvMark]
idCbvMarks_maybe TyVar
fun)) -- (W)
       -- Don't undersaturate StrictWorkerIds.
       -- See Note [CBV Function Ids] in GHC.Types.Id.Info.


{- *********************************************************************
*                                                                      *
              The "push rules"
*                                                                      *
************************************************************************

Here we implement the "push rules" from FC papers:

* The push-argument rules, where we can move a coercion past an argument.
  We have
      (fun |> co) arg
  and we want to transform it to
    (fun arg') |> co'
  for some suitable co' and transformed arg'.

* The PushK rule for data constructors.  We have
       (K e1 .. en) |> co
  and we want to transform to
       (K e1' .. en')
  by pushing the coercion into the arguments
-}

pushCoArgs :: CoercionR -> [CoreArg] -> Maybe ([CoreArg], MCoercion)
pushCoArgs :: Coercion -> [CoreExpr] -> Maybe ([CoreExpr], MCoercionR)
pushCoArgs Coercion
co []         = ([CoreExpr], MCoercionR) -> Maybe ([CoreExpr], MCoercionR)
forall a. a -> Maybe a
forall (m :: * -> *) a. Monad m => a -> m a
return ([], Coercion -> MCoercionR
MCo Coercion
co)
pushCoArgs Coercion
co (CoreExpr
arg:[CoreExpr]
args) = do { (arg',  m_co1) <- Coercion -> CoreExpr -> Maybe (CoreExpr, MCoercionR)
pushCoArg  Coercion
co  CoreExpr
arg
                              ; case m_co1 of
                                  MCo Coercion
co1 -> do { (args', m_co2) <- Coercion -> [CoreExpr] -> Maybe ([CoreExpr], MCoercionR)
pushCoArgs Coercion
co1 [CoreExpr]
args
                                                 ; return (arg':args', m_co2) }
                                  MCoercionR
MRefl  -> ([CoreExpr], MCoercionR) -> Maybe ([CoreExpr], MCoercionR)
forall a. a -> Maybe a
forall (m :: * -> *) a. Monad m => a -> m a
return (CoreExpr
arg'CoreExpr -> [CoreExpr] -> [CoreExpr]
forall a. a -> [a] -> [a]
:[CoreExpr]
args, MCoercionR
MRefl) }

pushMCoArg :: MCoercionR -> CoreArg -> Maybe (CoreArg, MCoercion)
pushMCoArg :: MCoercionR -> CoreExpr -> Maybe (CoreExpr, MCoercionR)
pushMCoArg MCoercionR
MRefl    CoreExpr
arg = (CoreExpr, MCoercionR) -> Maybe (CoreExpr, MCoercionR)
forall a. a -> Maybe a
Just (CoreExpr
arg, MCoercionR
MRefl)
pushMCoArg (MCo Coercion
co) CoreExpr
arg = Coercion -> CoreExpr -> Maybe (CoreExpr, MCoercionR)
pushCoArg Coercion
co CoreExpr
arg

pushCoArg :: CoercionR -> CoreArg -> Maybe (CoreArg, MCoercion)
-- We have (fun |> co) arg, and we want to transform it to
--         (fun arg) |> co
-- This may fail, e.g. if (fun :: N) where N is a newtype
-- C.f. simplCast in GHC.Core.Opt.Simplify
-- 'co' is always Representational
pushCoArg :: Coercion -> CoreExpr -> Maybe (CoreExpr, MCoercionR)
pushCoArg Coercion
co CoreExpr
arg
  | Type Type
ty <- CoreExpr
arg
  = do { (ty', m_co') <- Coercion -> Type -> Maybe (Type, MCoercionR)
pushCoTyArg Coercion
co Type
ty
       ; return (Type ty', m_co') }
  | Bool
otherwise
  = do { (arg_mco, m_co') <- Coercion -> Maybe (MCoercionR, MCoercionR)
pushCoValArg Coercion
co
       ; let arg_mco' = MCoercionR -> MCoercionR
checkReflexiveMCo MCoercionR
arg_mco
             -- checkReflexiveMCo: see Note [Check for reflexive casts in eta expansion]
             -- The coercion is very often (arg_co -> res_co), but without
             -- the argument coercion actually being ReflCo
       ; return (arg `mkCastMCo` arg_mco', m_co') }

pushCoTyArg :: CoercionR -> Type -> Maybe (Type, MCoercionR)
-- We have (fun |> co) @ty
-- Push the coercion through to return
--         (fun @ty') |> co'
-- 'co' is always Representational
-- If the returned coercion is Nothing, then it would have been reflexive;
-- it's faster not to compute it, though.
pushCoTyArg :: Coercion -> Type -> Maybe (Type, MCoercionR)
pushCoTyArg Coercion
co Type
ty
  -- The following is inefficient - don't do `eqType` here, the coercion
  -- optimizer will take care of it. See #14737.
  -- -- | tyL `eqType` tyR
  -- -- = Just (ty, Nothing)

  | Coercion -> Bool
isReflCo Coercion
co
  = (Type, MCoercionR) -> Maybe (Type, MCoercionR)
forall a. a -> Maybe a
Just (Type
ty, MCoercionR
MRefl)

  | Type -> Bool
isForAllTy_ty Type
tyL
  = Bool
-> SDoc -> Maybe (Type, MCoercionR) -> Maybe (Type, MCoercionR)
forall a. HasCallStack => Bool -> SDoc -> a -> a
assertPpr (Type -> Bool
isForAllTy_ty Type
tyR) (Coercion -> SDoc
forall a. Outputable a => a -> SDoc
ppr Coercion
co SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$ Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr Type
ty) (Maybe (Type, MCoercionR) -> Maybe (Type, MCoercionR))
-> Maybe (Type, MCoercionR) -> Maybe (Type, MCoercionR)
forall a b. (a -> b) -> a -> b
$
    (Type, MCoercionR) -> Maybe (Type, MCoercionR)
forall a. a -> Maybe a
Just (Type
ty Type -> Coercion -> Type
`mkCastTy` Coercion
co1, Coercion -> MCoercionR
MCo Coercion
co2)

  | Bool
otherwise
  = Maybe (Type, MCoercionR)
forall a. Maybe a
Nothing
  where
    Pair Type
tyL Type
tyR = HasDebugCallStack => Coercion -> Pair Type
Coercion -> Pair Type
coercionKind Coercion
co
       -- co :: tyL ~R tyR
       -- tyL = forall (a1 :: k1). ty1
       -- tyR = forall (a2 :: k2). ty2

    co1 :: Coercion
co1 = Coercion -> Coercion
mkSymCo (HasDebugCallStack => CoSel -> Coercion -> Coercion
CoSel -> Coercion -> Coercion
mkSelCo CoSel
SelForAll Coercion
co)
       -- co1 :: k2 ~N k1
       -- Note that SelCo extracts a Nominal equality between the
       -- kinds of the types related by a coercion between forall-types.
       -- See the SelCo case in GHC.Core.Lint.

    co2 :: Coercion
co2 = Coercion -> Coercion -> Coercion
mkInstCo Coercion
co (Role -> Type -> Coercion -> Coercion
mkGReflLeftCo Role
Nominal Type
ty Coercion
co1)
        -- co2 :: ty1[ (ty|>co1)/a1 ] ~R ty2[ ty/a2 ]
        -- Arg of mkInstCo is always nominal, hence Nominal

-- | If @pushCoValArg co = Just (co_arg, co_res)@, then
--
-- > (\x.body) |> co  =  (\y. let { x = y |> co_arg } in body) |> co_res)
--
-- or, equivalently
--
-- > (fun |> co) arg  =  (fun (arg |> co_arg)) |> co_res
--
-- If the LHS is well-typed, then so is the RHS. In particular, the argument
-- @arg |> co_arg@ is guaranteed to have a fixed 'RuntimeRep', in the sense of
-- Note [Fixed RuntimeRep] in GHC.Tc.Utils.Concrete.
pushCoValArg :: CoercionR -> Maybe (MCoercionR, MCoercionR)
pushCoValArg :: Coercion -> Maybe (MCoercionR, MCoercionR)
pushCoValArg Coercion
co
  -- The following is inefficient - don't do `eqType` here, the coercion
  -- optimizer will take care of it. See #14737.
  -- -- | tyL `eqType` tyR
  -- -- = Just (mkRepReflCo arg, Nothing)

  | Coercion -> Bool
isReflCo Coercion
co
  = (MCoercionR, MCoercionR) -> Maybe (MCoercionR, MCoercionR)
forall a. a -> Maybe a
Just (MCoercionR
MRefl, MCoercionR
MRefl)

  | Type -> Bool
isFunTy Type
tyL
  , (Coercion
_, Coercion
co1, Coercion
co2) <- HasDebugCallStack => Coercion -> (Coercion, Coercion, Coercion)
Coercion -> (Coercion, Coercion, Coercion)
decomposeFunCo Coercion
co
      -- If   co  :: (tyL1 -> tyL2) ~ (tyR1 -> tyR2)
      -- then co1 :: tyL1 ~ tyR1
      --      co2 :: tyL2 ~ tyR2

  , HasDebugCallStack => Type -> Bool
Type -> Bool
typeHasFixedRuntimeRep Type
new_arg_ty
    -- We can't push the coercion inside if it would give rise to
    -- a representation-polymorphic argument.

  = Bool
-> SDoc
-> Maybe (MCoercionR, MCoercionR)
-> Maybe (MCoercionR, MCoercionR)
forall a. HasCallStack => Bool -> SDoc -> a -> a
assertPpr (Type -> Bool
isFunTy Type
tyL Bool -> Bool -> Bool
&& Type -> Bool
isFunTy Type
tyR)
     ([SDoc] -> SDoc
forall doc. IsDoc doc => [doc] -> doc
vcat [ String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"co:" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> Coercion -> SDoc
forall a. Outputable a => a -> SDoc
ppr Coercion
co
           , String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"old_arg_ty:" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr Type
old_arg_ty
           , String -> SDoc
forall doc. IsLine doc => String -> doc
text String
"new_arg_ty:" SDoc -> SDoc -> SDoc
forall doc. IsLine doc => doc -> doc -> doc
<+> Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr Type
new_arg_ty ]) (Maybe (MCoercionR, MCoercionR) -> Maybe (MCoercionR, MCoercionR))
-> Maybe (MCoercionR, MCoercionR) -> Maybe (MCoercionR, MCoercionR)
forall a b. (a -> b) -> a -> b
$
    (MCoercionR, MCoercionR) -> Maybe (MCoercionR, MCoercionR)
forall a. a -> Maybe a
Just (Coercion -> MCoercionR
coToMCo (Coercion -> Coercion
mkSymCo Coercion
co1), Coercion -> MCoercionR
coToMCo Coercion
co2)
    -- Critically, coToMCo to checks for ReflCo; the whole coercion may not
    -- be reflexive, but either of its components might be
    -- We could use isReflexiveCo, but it's not clear if the benefit
    -- is worth the cost, and it makes no difference in #18223

  | Bool
otherwise
  = Maybe (MCoercionR, MCoercionR)
forall a. Maybe a
Nothing
  where
    old_arg_ty :: Type
old_arg_ty = HasDebugCallStack => Type -> Type
Type -> Type
funArgTy Type
tyR
    new_arg_ty :: Type
new_arg_ty = HasDebugCallStack => Type -> Type
Type -> Type
funArgTy Type
tyL
    Pair Type
tyL Type
tyR = HasDebugCallStack => Coercion -> Pair Type
Coercion -> Pair Type
coercionKind Coercion
co

pushCoercionIntoLambda
    :: HasDebugCallStack => InScopeSet -> Var -> CoreExpr -> CoercionR -> Maybe (Var, CoreExpr)
-- This implements the Push rule from the paper on coercions
--    (\x. e) |> co
-- ===>
--    (\x'. e |> co')
pushCoercionIntoLambda :: HasDebugCallStack =>
InScopeSet
-> TyVar -> CoreExpr -> Coercion -> Maybe (TyVar, CoreExpr)
pushCoercionIntoLambda InScopeSet
in_scope TyVar
x CoreExpr
e Coercion
co
    | Bool -> Bool -> Bool
forall a. HasCallStack => Bool -> a -> a
assert (Bool -> Bool
not (TyVar -> Bool
isTyVar TyVar
x) Bool -> Bool -> Bool
&& Bool -> Bool
not (TyVar -> Bool
isCoVar TyVar
x)) Bool
True
    , Pair Type
s1s2 Type
t1t2 <- HasDebugCallStack => Coercion -> Pair Type
Coercion -> Pair Type
coercionKind Coercion
co
    , Just {}              <- Type -> Maybe (FunTyFlag, Type, Type, Type)
splitFunTy_maybe Type
s1s2
    , Just (FunTyFlag
_, Type
w1, Type
t1,Type
_t2) <- Type -> Maybe (FunTyFlag, Type, Type, Type)
splitFunTy_maybe Type
t1t2
    , (Coercion
_, Coercion
co1, Coercion
co2)  <- HasDebugCallStack => Coercion -> (Coercion, Coercion, Coercion)
Coercion -> (Coercion, Coercion, Coercion)
decomposeFunCo Coercion
co
    , HasDebugCallStack => Type -> Bool
Type -> Bool
typeHasFixedRuntimeRep Type
t1
      -- We can't push the coercion into the lambda if it would create
      -- a representation-polymorphic binder.
    = let
          -- Should we optimize the coercions here?
          -- Otherwise they might not match too well
          x' :: TyVar
x' = TyVar
x TyVar -> Type -> TyVar
`setIdType` Type
t1 TyVar -> Type -> TyVar
`setIdMult` Type
w1
          in_scope' :: InScopeSet
in_scope' = InScopeSet
in_scope InScopeSet -> TyVar -> InScopeSet
`extendInScopeSet` TyVar
x'
          subst :: Subst
subst = Subst -> TyVar -> CoreExpr -> Subst
extendIdSubst (InScopeSet -> Subst
mkEmptySubst InScopeSet
in_scope')
                                TyVar
x
                                (HasDebugCallStack => CoreExpr -> Coercion -> CoreExpr
CoreExpr -> Coercion -> CoreExpr
mkCast (TyVar -> CoreExpr
forall b. TyVar -> Expr b
Var TyVar
x') (Coercion -> Coercion
mkSymCo Coercion
co1))
            -- We substitute x' for x, except we need to preserve types.
            -- The types are as follows:
            --   x :: s1,  x' :: t1,  co1 :: s1 ~# t1,
            -- so we extend the substitution with x |-> (x' |> sym co1).
      in (TyVar, CoreExpr) -> Maybe (TyVar, CoreExpr)
forall a. a -> Maybe a
Just (TyVar
x', HasDebugCallStack => Subst -> CoreExpr -> CoreExpr
Subst -> CoreExpr -> CoreExpr
substExpr Subst
subst CoreExpr
e HasDebugCallStack => CoreExpr -> Coercion -> CoreExpr
CoreExpr -> Coercion -> CoreExpr
`mkCast` Coercion
co2)
    | Bool
otherwise
    = Maybe (TyVar, CoreExpr)
forall a. Maybe a
Nothing

pushCoDataCon :: DataCon -> [CoreExpr] -> MCoercion
              -> Maybe (DataCon
                       , [Type]      -- Universal type args
                       , [CoreExpr]) -- All other args incl existentials
-- Implement the KPush reduction rule as described in "Down with kinds"
-- The transformation applies iff we have
--      (C e1 ... en) `cast` co
-- where co :: (T t1 .. tn) ~ to_ty
-- The left-hand one must be a T, because exprIsConApp returned True
-- but the right-hand one might not be.  (Though it usually will.)
pushCoDataCon :: DataCon
-> [CoreExpr] -> MCoercionR -> Maybe (DataCon, [Type], [CoreExpr])
pushCoDataCon DataCon
dc [CoreExpr]
dc_args MCoercionR
MRefl    = (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a. a -> Maybe a
Just ((DataCon, [Type], [CoreExpr])
 -> Maybe (DataCon, [Type], [CoreExpr]))
-> (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a b. (a -> b) -> a -> b
$! (DataCon -> [CoreExpr] -> (DataCon, [Type], [CoreExpr])
push_dc_refl DataCon
dc [CoreExpr]
dc_args)
pushCoDataCon DataCon
dc [CoreExpr]
dc_args (MCo Coercion
co) = DataCon
-> [CoreExpr]
-> Coercion
-> Pair Type
-> Maybe (DataCon, [Type], [CoreExpr])
push_dc_gen  DataCon
dc [CoreExpr]
dc_args Coercion
co (HasDebugCallStack => Coercion -> Pair Type
Coercion -> Pair Type
coercionKind Coercion
co)

push_dc_refl :: DataCon -> [CoreExpr] -> (DataCon, [Type], [CoreExpr])
push_dc_refl :: DataCon -> [CoreExpr] -> (DataCon, [Type], [CoreExpr])
push_dc_refl DataCon
dc [CoreExpr]
dc_args
  = (DataCon
dc, (CoreExpr -> Type) -> [CoreExpr] -> [Type]
forall a b. (a -> b) -> [a] -> [b]
map CoreExpr -> Type
exprToType [CoreExpr]
univ_ty_args, [CoreExpr]
rest_args)
  where
    !([CoreExpr]
univ_ty_args, [CoreExpr]
rest_args) = [TyVar] -> [CoreExpr] -> ([CoreExpr], [CoreExpr])
forall b a. [b] -> [a] -> ([a], [a])
splitAtList (DataCon -> [TyVar]
dataConUnivTyVars DataCon
dc) [CoreExpr]
dc_args

push_dc_gen :: DataCon -> [CoreExpr] -> Coercion -> Pair Type
            -> Maybe (DataCon, [Type], [CoreExpr])
push_dc_gen :: DataCon
-> [CoreExpr]
-> Coercion
-> Pair Type
-> Maybe (DataCon, [Type], [CoreExpr])
push_dc_gen DataCon
dc [CoreExpr]
dc_args Coercion
co (Pair Type
from_ty Type
to_ty)
  | Type
from_ty HasCallStack => Type -> Type -> Bool
Type -> Type -> Bool
`eqType` Type
to_ty  -- try cheap test first
  = (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a. a -> Maybe a
Just ((DataCon, [Type], [CoreExpr])
 -> Maybe (DataCon, [Type], [CoreExpr]))
-> (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a b. (a -> b) -> a -> b
$! (DataCon -> [CoreExpr] -> (DataCon, [Type], [CoreExpr])
push_dc_refl DataCon
dc [CoreExpr]
dc_args)

  | Just (TyCon
to_tc, [Type]
to_tc_arg_tys) <- HasDebugCallStack => Type -> Maybe (TyCon, [Type])
Type -> Maybe (TyCon, [Type])
splitTyConApp_maybe Type
to_ty
  , TyCon
to_tc TyCon -> TyCon -> Bool
forall a. Eq a => a -> a -> Bool
== DataCon -> TyCon
dataConTyCon DataCon
dc
        -- These two tests can fail; we might see
        --      (C x y) `cast` (g :: T a ~ S [a]),
        -- where S is a type function.  In fact, exprIsConApp
        -- will probably not be called in such circumstances,
        -- but there's nothing wrong with it

  = let
        tc_arity :: Int
tc_arity       = TyCon -> Int
tyConArity TyCon
to_tc
        dc_univ_tyvars :: [TyVar]
dc_univ_tyvars = DataCon -> [TyVar]
dataConUnivTyVars DataCon
dc
        dc_ex_tcvars :: [TyVar]
dc_ex_tcvars   = DataCon -> [TyVar]
dataConExTyCoVars DataCon
dc
        arg_tys :: [Scaled Type]
arg_tys        = DataCon -> [Scaled Type]
dataConRepArgTys DataCon
dc

        non_univ_args :: [CoreExpr]
non_univ_args  = [TyVar] -> [CoreExpr] -> [CoreExpr]
forall b a. [b] -> [a] -> [a]
dropList [TyVar]
dc_univ_tyvars [CoreExpr]
dc_args
        ([CoreExpr]
ex_args, [CoreExpr]
val_args) = [TyVar] -> [CoreExpr] -> ([CoreExpr], [CoreExpr])
forall b a. [b] -> [a] -> ([a], [a])
splitAtList [TyVar]
dc_ex_tcvars [CoreExpr]
non_univ_args

        -- Make the "Psi" from the paper
        omegas :: [Coercion]
omegas = Int -> Coercion -> Infinite Role -> [Coercion]
decomposeCo Int
tc_arity Coercion
co (TyCon -> Infinite Role
tyConRolesRepresentational TyCon
to_tc)
        (Type -> Coercion
psi_subst, [Type]
to_ex_arg_tys)
          = Role
-> [TyVar]
-> [Coercion]
-> [TyVar]
-> [Type]
-> (Type -> Coercion, [Type])
liftCoSubstWithEx Role
Representational
                              [TyVar]
dc_univ_tyvars
                              [Coercion]
omegas
                              [TyVar]
dc_ex_tcvars
                              ((CoreExpr -> Type) -> [CoreExpr] -> [Type]
forall a b. (a -> b) -> [a] -> [b]
map CoreExpr -> Type
exprToType [CoreExpr]
ex_args)

          -- Cast the value arguments (which include dictionaries)
        new_val_args :: [CoreExpr]
new_val_args = (Type -> CoreExpr -> CoreExpr)
-> [Type] -> [CoreExpr] -> [CoreExpr]
forall a b c. (a -> b -> c) -> [a] -> [b] -> [c]
zipWith Type -> CoreExpr -> CoreExpr
cast_arg ((Scaled Type -> Type) -> [Scaled Type] -> [Type]
forall a b. (a -> b) -> [a] -> [b]
map Scaled Type -> Type
forall a. Scaled a -> a
scaledThing [Scaled Type]
arg_tys) [CoreExpr]
val_args
        cast_arg :: Type -> CoreExpr -> CoreExpr
cast_arg Type
arg_ty CoreExpr
arg = HasDebugCallStack => CoreExpr -> Coercion -> CoreExpr
CoreExpr -> Coercion -> CoreExpr
mkCast CoreExpr
arg (Type -> Coercion
psi_subst Type
arg_ty)

        to_ex_args :: [CoreExpr]
to_ex_args = (Type -> CoreExpr) -> [Type] -> [CoreExpr]
forall a b. (a -> b) -> [a] -> [b]
map Type -> CoreExpr
forall b. Type -> Expr b
Type [Type]
to_ex_arg_tys

        dump_doc :: SDoc
dump_doc = [SDoc] -> SDoc
forall doc. IsDoc doc => [doc] -> doc
vcat [DataCon -> SDoc
forall a. Outputable a => a -> SDoc
ppr DataCon
dc,      [TyVar] -> SDoc
forall a. Outputable a => a -> SDoc
ppr [TyVar]
dc_univ_tyvars, [TyVar] -> SDoc
forall a. Outputable a => a -> SDoc
ppr [TyVar]
dc_ex_tcvars,
                         [Scaled Type] -> SDoc
forall a. Outputable a => a -> SDoc
ppr [Scaled Type]
arg_tys, [CoreExpr] -> SDoc
forall a. Outputable a => a -> SDoc
ppr [CoreExpr]
dc_args,
                         [CoreExpr] -> SDoc
forall a. Outputable a => a -> SDoc
ppr [CoreExpr]
ex_args, [CoreExpr] -> SDoc
forall a. Outputable a => a -> SDoc
ppr [CoreExpr]
val_args, Coercion -> SDoc
forall a. Outputable a => a -> SDoc
ppr Coercion
co, Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr Type
from_ty, Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr Type
to_ty, TyCon -> SDoc
forall a. Outputable a => a -> SDoc
ppr TyCon
to_tc
                         , Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr (Type -> SDoc) -> Type -> SDoc
forall a b. (a -> b) -> a -> b
$ TyCon -> [Type] -> Type
mkTyConApp TyCon
to_tc ((CoreExpr -> Type) -> [CoreExpr] -> [Type]
forall a b. (a -> b) -> [a] -> [b]
map CoreExpr -> Type
exprToType ([CoreExpr] -> [Type]) -> [CoreExpr] -> [Type]
forall a b. (a -> b) -> a -> b
$ [TyVar] -> [CoreExpr] -> [CoreExpr]
forall b a. [b] -> [a] -> [a]
takeList [TyVar]
dc_univ_tyvars [CoreExpr]
dc_args) ]
    in
    Bool
-> SDoc
-> Maybe (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a. HasCallStack => Bool -> SDoc -> a -> a
assertPpr (HasCallStack => Type -> Type -> Bool
Type -> Type -> Bool
eqType Type
from_ty (TyCon -> [Type] -> Type
mkTyConApp TyCon
to_tc ((CoreExpr -> Type) -> [CoreExpr] -> [Type]
forall a b. (a -> b) -> [a] -> [b]
map CoreExpr -> Type
exprToType ([CoreExpr] -> [Type]) -> [CoreExpr] -> [Type]
forall a b. (a -> b) -> a -> b
$ [TyVar] -> [CoreExpr] -> [CoreExpr]
forall b a. [b] -> [a] -> [a]
takeList [TyVar]
dc_univ_tyvars [CoreExpr]
dc_args))) SDoc
dump_doc (Maybe (DataCon, [Type], [CoreExpr])
 -> Maybe (DataCon, [Type], [CoreExpr]))
-> Maybe (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a b. (a -> b) -> a -> b
$
    Bool
-> SDoc
-> Maybe (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a. HasCallStack => Bool -> SDoc -> a -> a
assertPpr ([CoreExpr] -> [Scaled Type] -> Bool
forall a b. [a] -> [b] -> Bool
equalLength [CoreExpr]
val_args [Scaled Type]
arg_tys) SDoc
dump_doc (Maybe (DataCon, [Type], [CoreExpr])
 -> Maybe (DataCon, [Type], [CoreExpr]))
-> Maybe (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a b. (a -> b) -> a -> b
$
    (DataCon, [Type], [CoreExpr])
-> Maybe (DataCon, [Type], [CoreExpr])
forall a. a -> Maybe a
Just (DataCon
dc, [Type]
to_tc_arg_tys, [CoreExpr]
to_ex_args [CoreExpr] -> [CoreExpr] -> [CoreExpr]
forall a. [a] -> [a] -> [a]
++ [CoreExpr]
new_val_args)

  | Bool
otherwise
  = Maybe (DataCon, [Type], [CoreExpr])
forall a. Maybe a
Nothing


collectBindersPushingCo :: CoreExpr -> ([Var], CoreExpr)
-- Collect lambda binders, pushing coercions inside if possible
-- E.g.   (\x.e) |> g         g :: <Int> -> blah
--        = (\x. e |> SelCo (SelFun SelRes) g)
--
-- That is,
--
-- collectBindersPushingCo ((\x.e) |> g) === ([x], e |> SelCo (SelFun SelRes) g)
collectBindersPushingCo :: CoreExpr -> ([TyVar], CoreExpr)
collectBindersPushingCo CoreExpr
e
  = [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go [] CoreExpr
e
  where
    -- Peel off lambdas until we hit a cast.
    go :: [Var] -> CoreExpr -> ([Var], CoreExpr)
    -- The accumulator is in reverse order
    go :: [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go [TyVar]
bs (Lam TyVar
b CoreExpr
e)   = [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go (TyVar
bTyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
:[TyVar]
bs) CoreExpr
e
    go [TyVar]
bs (Cast CoreExpr
e Coercion
co) = [TyVar] -> CoreExpr -> Coercion -> ([TyVar], CoreExpr)
go_c [TyVar]
bs CoreExpr
e Coercion
co
    go [TyVar]
bs CoreExpr
e           = ([TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
bs, CoreExpr
e)

    -- We are in a cast; peel off casts until we hit a lambda.
    go_c :: [Var] -> CoreExpr -> CoercionR -> ([Var], CoreExpr)
    -- (go_c bs e c) is same as (go bs e (e |> c))
    go_c :: [TyVar] -> CoreExpr -> Coercion -> ([TyVar], CoreExpr)
go_c [TyVar]
bs (Cast CoreExpr
e Coercion
co1) Coercion
co2 = [TyVar] -> CoreExpr -> Coercion -> ([TyVar], CoreExpr)
go_c [TyVar]
bs CoreExpr
e (Coercion
co1 HasDebugCallStack => Coercion -> Coercion -> Coercion
Coercion -> Coercion -> Coercion
`mkTransCo` Coercion
co2)
    go_c [TyVar]
bs (Lam TyVar
b CoreExpr
e)    Coercion
co  = [TyVar] -> TyVar -> CoreExpr -> Coercion -> ([TyVar], CoreExpr)
go_lam [TyVar]
bs TyVar
b CoreExpr
e Coercion
co
    go_c [TyVar]
bs CoreExpr
e            Coercion
co  = ([TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
bs, HasDebugCallStack => CoreExpr -> Coercion -> CoreExpr
CoreExpr -> Coercion -> CoreExpr
mkCast CoreExpr
e Coercion
co)

    -- We are in a lambda under a cast; peel off lambdas and build a
    -- new coercion for the body.
    go_lam :: [Var] -> Var -> CoreExpr -> CoercionR -> ([Var], CoreExpr)
    -- (go_lam bs b e c) is same as (go_c bs (\b.e) c)
    go_lam :: [TyVar] -> TyVar -> CoreExpr -> Coercion -> ([TyVar], CoreExpr)
go_lam [TyVar]
bs TyVar
b CoreExpr
e Coercion
co
      | TyVar -> Bool
isTyVar TyVar
b
      , let Pair Type
tyL Type
tyR = HasDebugCallStack => Coercion -> Pair Type
Coercion -> Pair Type
coercionKind Coercion
co
      , Bool -> Bool -> Bool
forall a. HasCallStack => Bool -> a -> a
assert (Type -> Bool
isForAllTy_ty Type
tyL) (Bool -> Bool) -> Bool -> Bool
forall a b. (a -> b) -> a -> b
$
        Type -> Bool
isForAllTy_ty Type
tyR
      , Coercion -> Bool
isReflCo (HasDebugCallStack => CoSel -> Coercion -> Coercion
CoSel -> Coercion -> Coercion
mkSelCo CoSel
SelForAll Coercion
co)  -- See Note [collectBindersPushingCo]
      = [TyVar] -> CoreExpr -> Coercion -> ([TyVar], CoreExpr)
go_c (TyVar
bTyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
:[TyVar]
bs) CoreExpr
e (Coercion -> Coercion -> Coercion
mkInstCo Coercion
co (Type -> Coercion
mkNomReflCo (TyVar -> Type
mkTyVarTy TyVar
b)))

      | TyVar -> Bool
isCoVar TyVar
b
      , let Pair Type
tyL Type
tyR = HasDebugCallStack => Coercion -> Pair Type
Coercion -> Pair Type
coercionKind Coercion
co
      , Bool -> Bool -> Bool
forall a. HasCallStack => Bool -> a -> a
assert (Type -> Bool
isForAllTy_co Type
tyL) (Bool -> Bool) -> Bool -> Bool
forall a b. (a -> b) -> a -> b
$
        Type -> Bool
isForAllTy_co Type
tyR
      , Coercion -> Bool
isReflCo (HasDebugCallStack => CoSel -> Coercion -> Coercion
CoSel -> Coercion -> Coercion
mkSelCo CoSel
SelForAll Coercion
co)  -- See Note [collectBindersPushingCo]
      , let cov :: Coercion
cov = TyVar -> Coercion
mkCoVarCo TyVar
b
      = [TyVar] -> CoreExpr -> Coercion -> ([TyVar], CoreExpr)
go_c (TyVar
bTyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
:[TyVar]
bs) CoreExpr
e (Coercion -> Coercion -> Coercion
mkInstCo Coercion
co (Type -> Coercion
mkNomReflCo (Coercion -> Type
mkCoercionTy Coercion
cov)))

      | TyVar -> Bool
isId TyVar
b
      , let Pair Type
tyL Type
tyR = HasDebugCallStack => Coercion -> Pair Type
Coercion -> Pair Type
coercionKind Coercion
co
      , Bool -> Bool -> Bool
forall a. HasCallStack => Bool -> a -> a
assert (Type -> Bool
isFunTy Type
tyL) (Bool -> Bool) -> Bool -> Bool
forall a b. (a -> b) -> a -> b
$ Type -> Bool
isFunTy Type
tyR
      , (Coercion
co_mult, Coercion
co_arg, Coercion
co_res) <- HasDebugCallStack => Coercion -> (Coercion, Coercion, Coercion)
Coercion -> (Coercion, Coercion, Coercion)
decomposeFunCo Coercion
co
      , Coercion -> Bool
isReflCo Coercion
co_mult -- See Note [collectBindersPushingCo]
      , Coercion -> Bool
isReflCo Coercion
co_arg  -- See Note [collectBindersPushingCo]
      = [TyVar] -> CoreExpr -> Coercion -> ([TyVar], CoreExpr)
go_c (TyVar
bTyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
:[TyVar]
bs) CoreExpr
e Coercion
co_res

      | Bool
otherwise = ([TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
bs, HasDebugCallStack => CoreExpr -> Coercion -> CoreExpr
CoreExpr -> Coercion -> CoreExpr
mkCast (TyVar -> CoreExpr -> CoreExpr
forall b. b -> Expr b -> Expr b
Lam TyVar
b CoreExpr
e) Coercion
co)

{-

Note [collectBindersPushingCo]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We just look for coercions of form
   <type> % w -> blah
(and similarly for foralls) to keep this function simple.  We could do
more elaborate stuff, but it'd involve substitution etc.

-}

{- *********************************************************************
*                                                                      *
                Join points
*                                                                      *
********************************************************************* -}

-------------------
-- | Split an expression into the given number of binders and a body,
-- eta-expanding if necessary. Counts value *and* type binders.
etaExpandToJoinPoint :: JoinArity -> CoreExpr -> ([CoreBndr], CoreExpr)
etaExpandToJoinPoint :: Int -> CoreExpr -> ([TyVar], CoreExpr)
etaExpandToJoinPoint Int
join_arity CoreExpr
expr
  = Int -> [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go Int
join_arity [] CoreExpr
expr
  where
    go :: Int -> [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go Int
0 [TyVar]
rev_bs CoreExpr
e         = ([TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
rev_bs, CoreExpr
e)
    go Int
n [TyVar]
rev_bs (Lam TyVar
b CoreExpr
e) = Int -> [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go (Int
nInt -> Int -> Int
forall a. Num a => a -> a -> a
-Int
1) (TyVar
b TyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
: [TyVar]
rev_bs) CoreExpr
e
    go Int
n [TyVar]
rev_bs CoreExpr
e         = case Int -> CoreExpr -> ([TyVar], CoreExpr)
etaBodyForJoinPoint Int
n CoreExpr
e of
                              ([TyVar]
bs, CoreExpr
e') -> ([TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
rev_bs [TyVar] -> [TyVar] -> [TyVar]
forall a. [a] -> [a] -> [a]
++ [TyVar]
bs, CoreExpr
e')

etaExpandToJoinPointRule :: JoinArity -> CoreRule -> CoreRule
etaExpandToJoinPointRule :: Int -> CoreRule -> CoreRule
etaExpandToJoinPointRule Int
_ rule :: CoreRule
rule@(BuiltinRule {})
  = Bool -> String -> SDoc -> CoreRule -> CoreRule
forall a. HasCallStack => Bool -> String -> SDoc -> a -> a
warnPprTrace Bool
True String
"Can't eta-expand built-in rule:" (CoreRule -> SDoc
forall a. Outputable a => a -> SDoc
ppr CoreRule
rule)
      -- How did a local binding get a built-in rule anyway? Probably a plugin.
    CoreRule
rule
etaExpandToJoinPointRule Int
join_arity rule :: CoreRule
rule@(Rule { ru_bndrs :: CoreRule -> [TyVar]
ru_bndrs = [TyVar]
bndrs, ru_rhs :: CoreRule -> CoreExpr
ru_rhs = CoreExpr
rhs
                                               , ru_args :: CoreRule -> [CoreExpr]
ru_args  = [CoreExpr]
args })
  | Int
need_args Int -> Int -> Bool
forall a. Eq a => a -> a -> Bool
== Int
0
  = CoreRule
rule
  | Int
need_args Int -> Int -> Bool
forall a. Ord a => a -> a -> Bool
< Int
0
  = String -> SDoc -> CoreRule
forall a. HasCallStack => String -> SDoc -> a
pprPanic String
"etaExpandToJoinPointRule" (Int -> SDoc
forall a. Outputable a => a -> SDoc
ppr Int
join_arity SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$ CoreRule -> SDoc
forall a. Outputable a => a -> SDoc
ppr CoreRule
rule)
  | Bool
otherwise
  = CoreRule
rule { ru_bndrs = bndrs ++ new_bndrs
         , ru_args  = args ++ new_args
         , ru_rhs   = new_rhs }
  -- new_rhs really ought to be occ-analysed (see GHC.Core Note
  -- [OccInfo in unfoldings and rules]), but it makes a module loop to
  -- do so; it doesn't happen often; and it doesn't really matter if
  -- the outer binders have bogus occurrence info; and new_rhs won't
  -- have dead code if rhs didn't.

  where
    need_args :: Int
need_args = Int
join_arity Int -> Int -> Int
forall a. Num a => a -> a -> a
- [CoreExpr] -> Int
forall a. [a] -> Int
forall (t :: * -> *) a. Foldable t => t a -> Int
length [CoreExpr]
args
    ([TyVar]
new_bndrs, CoreExpr
new_rhs) = Int -> CoreExpr -> ([TyVar], CoreExpr)
etaBodyForJoinPoint Int
need_args CoreExpr
rhs
    new_args :: [CoreExpr]
new_args = [TyVar] -> [CoreExpr]
forall b. [TyVar] -> [Expr b]
varsToCoreExprs [TyVar]
new_bndrs

-- Adds as many binders as asked for; assumes expr is not a lambda
etaBodyForJoinPoint :: Int -> CoreExpr -> ([CoreBndr], CoreExpr)
etaBodyForJoinPoint :: Int -> CoreExpr -> ([TyVar], CoreExpr)
etaBodyForJoinPoint Int
need_args CoreExpr
body
  = Int -> Type -> Subst -> [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go Int
need_args Type
body_ty (InScopeSet -> Subst
mkEmptySubst InScopeSet
in_scope) [] CoreExpr
body
  where
    go :: Int -> Type -> Subst -> [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go Int
0 Type
_  Subst
_     [TyVar]
rev_bs CoreExpr
e
      = ([TyVar] -> [TyVar]
forall a. [a] -> [a]
reverse [TyVar]
rev_bs, CoreExpr
e)
    go Int
n Type
ty Subst
subst [TyVar]
rev_bs CoreExpr
e
      | Just (TyVar
tv, Type
res_ty) <- Type -> Maybe (TyVar, Type)
splitForAllTyCoVar_maybe Type
ty
      , let (Subst
subst', TyVar
tv') = HasDebugCallStack => Subst -> TyVar -> (Subst, TyVar)
Subst -> TyVar -> (Subst, TyVar)
substVarBndr Subst
subst TyVar
tv
      = Int -> Type -> Subst -> [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go (Int
nInt -> Int -> Int
forall a. Num a => a -> a -> a
-Int
1) Type
res_ty Subst
subst' (TyVar
tv' TyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
: [TyVar]
rev_bs) (CoreExpr
e CoreExpr -> CoreExpr -> CoreExpr
forall b. Expr b -> Expr b -> Expr b
`App` TyVar -> CoreExpr
forall b. TyVar -> Expr b
varToCoreExpr TyVar
tv')
        -- The varToCoreExpr is important: `tv` might be a coercion variable

      | Just (FunTyFlag
_, Type
mult, Type
arg_ty, Type
res_ty) <- Type -> Maybe (FunTyFlag, Type, Type, Type)
splitFunTy_maybe Type
ty
      , let (Subst
subst', TyVar
b) = Int -> Subst -> Scaled Type -> (Subst, TyVar)
freshEtaId Int
n Subst
subst (Type -> Type -> Scaled Type
forall a. Type -> a -> Scaled a
Scaled Type
mult Type
arg_ty)
      = Int -> Type -> Subst -> [TyVar] -> CoreExpr -> ([TyVar], CoreExpr)
go (Int
nInt -> Int -> Int
forall a. Num a => a -> a -> a
-Int
1) Type
res_ty Subst
subst' (TyVar
b TyVar -> [TyVar] -> [TyVar]
forall a. a -> [a] -> [a]
: [TyVar]
rev_bs) (CoreExpr
e CoreExpr -> CoreExpr -> CoreExpr
forall b. Expr b -> Expr b -> Expr b
`App` TyVar -> CoreExpr
forall b. TyVar -> Expr b
varToCoreExpr TyVar
b)
        -- The varToCoreExpr is important: `b` might be a coercion variable

      | Bool
otherwise
      = String -> SDoc -> ([TyVar], CoreExpr)
forall a. HasCallStack => String -> SDoc -> a
pprPanic String
"etaBodyForJoinPoint" (SDoc -> ([TyVar], CoreExpr)) -> SDoc -> ([TyVar], CoreExpr)
forall a b. (a -> b) -> a -> b
$ Int -> SDoc
forall doc. IsLine doc => Int -> doc
int Int
need_args SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$
                                         CoreExpr -> SDoc
forall a. Outputable a => a -> SDoc
ppr CoreExpr
body SDoc -> SDoc -> SDoc
forall doc. IsDoc doc => doc -> doc -> doc
$$ Type -> SDoc
forall a. Outputable a => a -> SDoc
ppr (HasDebugCallStack => CoreExpr -> Type
CoreExpr -> Type
exprType CoreExpr
body)

    body_ty :: Type
body_ty = HasDebugCallStack => CoreExpr -> Type
CoreExpr -> Type
exprType CoreExpr
body
    in_scope :: InScopeSet
in_scope = VarSet -> InScopeSet
mkInScopeSet (CoreExpr -> VarSet
exprFreeVars CoreExpr
body VarSet -> VarSet -> VarSet
`unionVarSet` Type -> VarSet
tyCoVarsOfType Type
body_ty)
    -- in_scope is a bit tricky.
    -- - We are wrapping `body` in some value lambdas, so must not shadow
    --   any free vars of `body`
    -- - We are wrapping `body` in some type lambdas, so must not shadow any
    --   tyvars in body_ty.  Example: body is just a variable
    --            (g :: forall (a::k). T k a -> Int)
    --   We must not shadown that `k` when adding the /\a. So treat the free vars
    --   of body_ty as in-scope.  Showed up in #23026.

--------------
freshEtaId :: Int -> Subst -> Scaled Type -> (Subst, Id)
-- Make a fresh Id, with specified type (after applying substitution)
-- It should be "fresh" in the sense that it's not in the in-scope set
-- of the TvSubstEnv; and it should itself then be added to the in-scope
-- set of the TvSubstEnv
--
-- The Int is just a reasonable starting point for generating a unique;
-- it does not necessarily have to be unique itself.
freshEtaId :: Int -> Subst -> Scaled Type -> (Subst, TyVar)
freshEtaId Int
n Subst
subst Scaled Type
ty
      = (Subst
subst', TyVar
eta_id')
      where
        Scaled Type
mult' Type
ty' = HasDebugCallStack => Subst -> Scaled Type -> Scaled Type
Subst -> Scaled Type -> Scaled Type
Type.substScaledTyUnchecked Subst
subst Scaled Type
ty
        eta_id' :: TyVar
eta_id' = InScopeSet -> TyVar -> TyVar
uniqAway (Subst -> InScopeSet
getSubstInScope Subst
subst) (TyVar -> TyVar) -> TyVar -> TyVar
forall a b. (a -> b) -> a -> b
$
                  FastString -> Unique -> Type -> Type -> TyVar
mkSysLocalOrCoVar (String -> FastString
fsLit String
"eta") (Int -> Unique
mkBuiltinUnique Int
n) Type
mult' Type
ty'
                  -- "OrCoVar" since this can be used to eta-expand
                  -- coercion abstractions
        subst' :: Subst
subst'  = Subst -> TyVar -> Subst
extendSubstInScope Subst
subst TyVar
eta_id'