code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
{- (c) The AQUA Project, Glasgow University, 1993-1998 \section[Simplify]{The main module of the simplifier} -} {-# LANGUAGE CPP #-} module Simplify ( simplTopBinds, simplExpr ) where #include "HsVersions.h" import DynFlags import SimplMonad import Type hiding ( substTy, extendTvSubst, substTyVar ) import SimplEnv import SimplUtils import FamInstEnv ( FamInstEnv ) import Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326 import Id import MkId ( seqId, voidPrimId ) import MkCore ( mkImpossibleExpr, castBottomExpr ) import IdInfo import Name ( mkSystemVarName, isExternalName ) import Coercion hiding ( substCo, substTy, substCoVar, extendTvSubst ) import OptCoercion ( optCoercion ) import FamInstEnv ( topNormaliseType_maybe ) import DataCon ( DataCon, dataConWorkId, dataConRepStrictness , isMarkedStrict ) --, dataConTyCon, dataConTag, fIRST_TAG ) --import TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326 import CoreMonad ( Tick(..), SimplifierMode(..) ) import CoreSyn import Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd ) import PprCore ( pprCoreExpr ) import CoreUnfold import CoreUtils import CoreArity --import PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326 import Rules ( lookupRule, getRules ) import TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326 import BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) ) import MonadUtils ( foldlM, mapAccumLM, liftIO ) import Maybes ( orElse ) --import Unique ( hasKey ) -- temporalily commented out. See #8326 import Control.Monad import Data.List ( mapAccumL ) import Outputable import FastString import Pair import Util import ErrUtils {- The guts of the simplifier is in this module, but the driver loop for the simplifier is in SimplCore.hs. ----------------------------------------- *** IMPORTANT NOTE *** ----------------------------------------- The simplifier used to guarantee that the output had no shadowing, but it does not do so any more. (Actually, it never did!) The reason is documented with simplifyArgs. ----------------------------------------- *** IMPORTANT NOTE *** ----------------------------------------- Many parts of the simplifier return a bunch of "floats" as well as an expression. This is wrapped as a datatype SimplUtils.FloatsWith. All "floats" are let-binds, not case-binds, but some non-rec lets may be unlifted (with RHS ok-for-speculation). ----------------------------------------- ORGANISATION OF FUNCTIONS ----------------------------------------- simplTopBinds - simplify all top-level binders - for NonRec, call simplRecOrTopPair - for Rec, call simplRecBind ------------------------------ simplExpr (applied lambda) ==> simplNonRecBind simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind ------------------------------ simplRecBind [binders already simplfied] - use simplRecOrTopPair on each pair in turn simplRecOrTopPair [binder already simplified] Used for: recursive bindings (top level and nested) top-level non-recursive bindings Returns: - check for PreInlineUnconditionally - simplLazyBind simplNonRecBind Used for: non-top-level non-recursive bindings beta reductions (which amount to the same thing) Because it can deal with strict arts, it takes a "thing-inside" and returns an expression - check for PreInlineUnconditionally - simplify binder, including its IdInfo - if strict binding simplStrictArg mkAtomicArgs completeNonRecX else simplLazyBind addFloats simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder] Used for: binding case-binder and constr args in a known-constructor case - check for PreInLineUnconditionally - simplify binder - completeNonRecX ------------------------------ simplLazyBind: [binder already simplified, RHS not] Used for: recursive bindings (top level and nested) top-level non-recursive bindings non-top-level, but *lazy* non-recursive bindings [must not be strict or unboxed] Returns floats + an augmented environment, not an expression - substituteIdInfo and add result to in-scope [so that rules are available in rec rhs] - simplify rhs - mkAtomicArgs - float if exposes constructor or PAP - completeBind completeNonRecX: [binder and rhs both simplified] - if the the thing needs case binding (unlifted and not ok-for-spec) build a Case else completeBind addFloats completeBind: [given a simplified RHS] [used for both rec and non-rec bindings, top level and not] - try PostInlineUnconditionally - add unfolding [this is the only place we add an unfolding] - add arity Right hand sides and arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In many ways we want to treat (a) the right hand side of a let(rec), and (b) a function argument in the same way. But not always! In particular, we would like to leave these arguments exactly as they are, so they will match a RULE more easily. f (g x, h x) g (+ x) It's harder to make the rule match if we ANF-ise the constructor, or eta-expand the PAP: f (let { a = g x; b = h x } in (a,b)) g (\y. + x y) On the other hand if we see the let-defns p = (g x, h x) q = + x then we *do* want to ANF-ise and eta-expand, so that p and q can be safely inlined. Even floating lets out is a bit dubious. For let RHS's we float lets out if that exposes a value, so that the value can be inlined more vigorously. For example r = let x = e in (x,x) Here, if we float the let out we'll expose a nice constructor. We did experiments that showed this to be a generally good thing. But it was a bad thing to float lets out unconditionally, because that meant they got allocated more often. For function arguments, there's less reason to expose a constructor (it won't get inlined). Just possibly it might make a rule match, but I'm pretty skeptical. So for the moment we don't float lets out of function arguments either. Eta expansion ~~~~~~~~~~~~~~ For eta expansion, we want to catch things like case e of (a,b) -> \x -> case a of (p,q) -> \y -> r If the \x was on the RHS of a let, we'd eta expand to bring the two lambdas together. And in general that's a good thing to do. Perhaps we should eta expand wherever we find a (value) lambda? Then the eta expansion at a let RHS can concentrate solely on the PAP case. ************************************************************************ * * \subsection{Bindings} * * ************************************************************************ -} simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv simplTopBinds env0 binds0 = do { -- Put all the top-level binders into scope at the start -- so that if a transformation rule has unexpectedly brought -- anything into scope, then we don't get a complaint about that. -- It's rather as if the top-level binders were imported. -- See note [Glomming] in OccurAnal. ; env1 <- simplRecBndrs env0 (bindersOfBinds binds0) ; env2 <- simpl_binds env1 binds0 ; freeTick SimplifierDone ; return env2 } where -- We need to track the zapped top-level binders, because -- they should have their fragile IdInfo zapped (notably occurrence info) -- That's why we run down binds and bndrs' simultaneously. -- simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv simpl_binds env [] = return env simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind ; simpl_binds env' binds } simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs simpl_bind env (NonRec b r) = simplRecOrTopPair env' TopLevel NonRecursive b b' r where (env', b') = addBndrRules env b (lookupRecBndr env b) {- ************************************************************************ * * \subsection{Lazy bindings} * * ************************************************************************ simplRecBind is used for * recursive bindings only -} simplRecBind :: SimplEnv -> TopLevelFlag -> [(InId, InExpr)] -> SimplM SimplEnv simplRecBind env0 top_lvl pairs0 = do { let (env_with_info, triples) = mapAccumL add_rules env0 pairs0 ; env1 <- go (zapFloats env_with_info) triples ; return (env0 `addRecFloats` env1) } -- addFloats adds the floats from env1, -- _and_ updates env0 with the in-scope set from env1 where add_rules :: SimplEnv -> (InBndr,InExpr) -> (SimplEnv, (InBndr, OutBndr, InExpr)) -- Add the (substituted) rules to the binder add_rules env (bndr, rhs) = (env', (bndr, bndr', rhs)) where (env', bndr') = addBndrRules env bndr (lookupRecBndr env bndr) go env [] = return env go env ((old_bndr, new_bndr, rhs) : pairs) = do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs ; go env' pairs } {- simplOrTopPair is used for * recursive bindings (whether top level or not) * top-level non-recursive bindings It assumes the binder has already been simplified, but not its IdInfo. -} simplRecOrTopPair :: SimplEnv -> TopLevelFlag -> RecFlag -> InId -> OutBndr -> InExpr -- Binder and rhs -> SimplM SimplEnv -- Returns an env that includes the binding simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs = do { dflags <- getDynFlags ; trace_bind dflags $ if preInlineUnconditionally dflags env top_lvl old_bndr rhs -- Check for unconditional inline then do tick (PreInlineUnconditionally old_bndr) return (extendIdSubst env old_bndr (mkContEx env rhs)) else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env } where trace_bind dflags thing_inside | not (dopt Opt_D_verbose_core2core dflags) = thing_inside | otherwise = pprTrace "SimplBind" (ppr old_bndr) thing_inside -- trace_bind emits a trace for each top-level binding, which -- helps to locate the tracing for inlining and rule firing {- simplLazyBind is used for * [simplRecOrTopPair] recursive bindings (whether top level or not) * [simplRecOrTopPair] top-level non-recursive bindings * [simplNonRecE] non-top-level *lazy* non-recursive bindings Nota bene: 1. It assumes that the binder is *already* simplified, and is in scope, and its IdInfo too, except unfolding 2. It assumes that the binder type is lifted. 3. It does not check for pre-inline-unconditionally; that should have been done already. -} simplLazyBind :: SimplEnv -> TopLevelFlag -> RecFlag -> InId -> OutId -- Binder, both pre-and post simpl -- The OutId has IdInfo, except arity, unfolding -> InExpr -> SimplEnv -- The RHS and its environment -> SimplM SimplEnv -- Precondition: rhs obeys the let/app invariant simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se = -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $ do { let rhs_env = rhs_se `setInScope` env (tvs, body) = case collectTyBinders rhs of (tvs, body) | not_lam body -> (tvs,body) | otherwise -> ([], rhs) not_lam (Lam _ _) = False not_lam (Tick t e) | not (tickishFloatable t) = not_lam e -- eta-reduction could float not_lam _ = True -- Do not do the "abstract tyyvar" thing if there's -- a lambda inside, because it defeats eta-reduction -- f = /\a. \x. g a x -- should eta-reduce. ; (body_env, tvs') <- simplBinders rhs_env tvs -- See Note [Floating and type abstraction] in SimplUtils -- Simplify the RHS ; let rhs_cont = mkRhsStop (substTy body_env (exprType body)) ; (body_env1, body1) <- simplExprF body_env body rhs_cont -- ANF-ise a constructor or PAP rhs ; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1 ; (env', rhs') <- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2) then -- No floating, revert to body1 do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont ; return (env, rhs') } else if null tvs then -- Simple floating do { tick LetFloatFromLet ; return (addFloats env body_env2, body2) } else -- Do type-abstraction first do { tick LetFloatFromLet ; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2 ; rhs' <- mkLam tvs' body3 rhs_cont ; env' <- foldlM (addPolyBind top_lvl) env poly_binds ; return (env', rhs') } ; completeBind env' top_lvl bndr bndr1 rhs' } {- A specialised variant of simplNonRec used when the RHS is already simplified, notably in knownCon. It uses case-binding where necessary. -} simplNonRecX :: SimplEnv -> InId -- Old binder -> OutExpr -- Simplified RHS -> SimplM SimplEnv -- Precondition: rhs satisfies the let/app invariant simplNonRecX env bndr new_rhs | isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p } = return env -- Here c is dead, and we avoid creating -- the binding c = (a,b) | Coercion co <- new_rhs = return (extendCvSubst env bndr co) | otherwise = do { (env', bndr') <- simplBinder env bndr ; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs } -- simplNonRecX is only used for NotTopLevel things completeNonRecX :: TopLevelFlag -> SimplEnv -> Bool -> InId -- Old binder -> OutId -- New binder -> OutExpr -- Simplified RHS -> SimplM SimplEnv -- Precondition: rhs satisfies the let/app invariant -- See Note [CoreSyn let/app invariant] in CoreSyn completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs = do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs ; (env2, rhs2) <- if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1 then do { tick LetFloatFromLet ; return (addFloats env env1, rhs1) } -- Add the floats to the main env else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS ; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 } {- {- No, no, no! Do not try preInlineUnconditionally in completeNonRecX Doing so risks exponential behaviour, because new_rhs has been simplified once already In the cases described by the folowing commment, postInlineUnconditionally will catch many of the relevant cases. -- This happens; for example, the case_bndr during case of -- known constructor: case (a,b) of x { (p,q) -> ... } -- Here x isn't mentioned in the RHS, so we don't want to -- create the (dead) let-binding let x = (a,b) in ... -- -- Similarly, single occurrences can be inlined vigourously -- e.g. case (f x, g y) of (a,b) -> .... -- If a,b occur once we can avoid constructing the let binding for them. Furthermore in the case-binding case preInlineUnconditionally risks extra thunks -- Consider case I# (quotInt# x y) of -- I# v -> let w = J# v in ... -- If we gaily inline (quotInt# x y) for v, we end up building an -- extra thunk: -- let w = J# (quotInt# x y) in ... -- because quotInt# can fail. | preInlineUnconditionally env NotTopLevel bndr new_rhs = thing_inside (extendIdSubst env bndr (DoneEx new_rhs)) -} ---------------------------------- prepareRhs takes a putative RHS, checks whether it's a PAP or constructor application and, if so, converts it to ANF, so that the resulting thing can be inlined more easily. Thus x = (f a, g b) becomes t1 = f a t2 = g b x = (t1,t2) We also want to deal well cases like this v = (f e1 `cast` co) e2 Here we want to make e1,e2 trivial and get x1 = e1; x2 = e2; v = (f x1 `cast` co) v2 That's what the 'go' loop in prepareRhs does -} prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr) -- Adds new floats to the env iff that allows us to return a good RHS prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions] | Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type , not (isUnLiftedType ty1) -- see Note [Float coercions (unlifted)] = do { (env', rhs') <- makeTrivialWithInfo top_lvl env sanitised_info rhs ; return (env', Cast rhs' co) } where sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info `setDemandInfo` demandInfo info info = idInfo id prepareRhs top_lvl env0 _ rhs0 = do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0 ; return (env1, rhs1) } where go n_val_args env (Cast rhs co) = do { (is_exp, env', rhs') <- go n_val_args env rhs ; return (is_exp, env', Cast rhs' co) } go n_val_args env (App fun (Type ty)) = do { (is_exp, env', rhs') <- go n_val_args env fun ; return (is_exp, env', App rhs' (Type ty)) } go n_val_args env (App fun arg) = do { (is_exp, env', fun') <- go (n_val_args+1) env fun ; case is_exp of True -> do { (env'', arg') <- makeTrivial top_lvl env' arg ; return (True, env'', App fun' arg') } False -> return (False, env, App fun arg) } go n_val_args env (Var fun) = return (is_exp, env, Var fun) where is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP -- See Note [CONLIKE pragma] in BasicTypes -- The definition of is_exp should match that in -- OccurAnal.occAnalApp go n_val_args env (Tick t rhs) -- We want to be able to float bindings past this -- tick. Non-scoping ticks don't care. | tickishScoped t == NoScope = do { (is_exp, env', rhs') <- go n_val_args env rhs ; return (is_exp, env', Tick t rhs') } -- On the other hand, for scoping ticks we need to be able to -- copy them on the floats, which in turn is only allowed if -- we can obtain non-counting ticks. | not (tickishCounts t) || tickishCanSplit t = do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs ; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr) floats' = seFloats $ env `addFloats` mapFloats env' tickIt ; return (is_exp, env' { seFloats = floats' }, Tick t rhs') } go _ env other = return (False, env, other) {- Note [Float coercions] ~~~~~~~~~~~~~~~~~~~~~~ When we find the binding x = e `cast` co we'd like to transform it to x' = e x = x `cast` co -- A trivial binding There's a chance that e will be a constructor application or function, or something like that, so moving the coerion to the usage site may well cancel the coersions and lead to further optimisation. Example: data family T a :: * data instance T Int = T Int foo :: Int -> Int -> Int foo m n = ... where x = T m go 0 = 0 go n = case x of { T m -> go (n-m) } -- This case should optimise Note [Preserve strictness when floating coercions] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the Note [Float coercions] transformation, keep the strictness info. Eg f = e `cast` co -- f has strictness SSL When we transform to f' = e -- f' also has strictness SSL f = f' `cast` co -- f still has strictness SSL Its not wrong to drop it on the floor, but better to keep it. Note [Float coercions (unlifted)] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BUT don't do [Float coercions] if 'e' has an unlifted type. This *can* happen: foo :: Int = (error (# Int,Int #) "urk") `cast` CoUnsafe (# Int,Int #) Int If do the makeTrivial thing to the error call, we'll get foo = case error (# Int,Int #) "urk" of v -> v `cast` ... But 'v' isn't in scope! These strange casts can happen as a result of case-of-case bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of (# p,q #) -> p+q -} makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec) makeTrivialArg env (ValArg e) = do { (env', e') <- makeTrivial NotTopLevel env e ; return (env', ValArg e') } makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg makeTrivial :: TopLevelFlag -> SimplEnv -> OutExpr -> SimplM (SimplEnv, OutExpr) -- Binds the expression to a variable, if it's not trivial, returning the variable makeTrivial top_lvl env expr = makeTrivialWithInfo top_lvl env vanillaIdInfo expr makeTrivialWithInfo :: TopLevelFlag -> SimplEnv -> IdInfo -> OutExpr -> SimplM (SimplEnv, OutExpr) -- Propagate strictness and demand info to the new binder -- Note [Preserve strictness when floating coercions] -- Returned SimplEnv has same substitution as incoming one makeTrivialWithInfo top_lvl env info expr | exprIsTrivial expr -- Already trivial || not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise -- See Note [Cannot trivialise] = return (env, expr) | otherwise -- See Note [Take care] below = do { uniq <- getUniqueM ; let name = mkSystemVarName uniq (fsLit "a") var = mkLocalIdWithInfo name expr_ty info ; env' <- completeNonRecX top_lvl env False var var expr ; expr' <- simplVar env' var ; return (env', expr') } -- The simplVar is needed becase we're constructing a new binding -- a = rhs -- And if rhs is of form (rhs1 |> co), then we might get -- a1 = rhs1 -- a = a1 |> co -- and now a's RHS is trivial and can be substituted out, and that -- is what completeNonRecX will do -- To put it another way, it's as if we'd simplified -- let var = e in var where expr_ty = exprType expr bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool -- True iff we can have a binding of this expression at this level -- Precondition: the type is the type of the expression bindingOk top_lvl _ expr_ty | isTopLevel top_lvl = not (isUnLiftedType expr_ty) | otherwise = True {- Note [Cannot trivialise] ~~~~~~~~~~~~~~~~~~~~~~~~ Consider tih f :: Int -> Addr# foo :: Bar foo = Bar (f 3) Then we can't ANF-ise foo, even though we'd like to, because we can't make a top-level binding for the Addr# (f 3). And if so we don't want to turn it into foo = let x = f 3 in Bar x because we'll just end up inlining x back, and that makes the simplifier loop. Better not to ANF-ise it at all. A case in point is literal strings (a MachStr is not regarded as trivial): foo = Ptr "blob"# We don't want to ANF-ise this. ************************************************************************ * * \subsection{Completing a lazy binding} * * ************************************************************************ completeBind * deals only with Ids, not TyVars * takes an already-simplified binder and RHS * is used for both recursive and non-recursive bindings * is used for both top-level and non-top-level bindings It does the following: - tries discarding a dead binding - tries PostInlineUnconditionally - add unfolding [this is the only place we add an unfolding] - add arity It does *not* attempt to do let-to-case. Why? Because it is used for - top-level bindings (when let-to-case is impossible) - many situations where the "rhs" is known to be a WHNF (so let-to-case is inappropriate). Nor does it do the atomic-argument thing -} completeBind :: SimplEnv -> TopLevelFlag -- Flag stuck into unfolding -> InId -- Old binder -> OutId -> OutExpr -- New binder and RHS -> SimplM SimplEnv -- completeBind may choose to do its work -- * by extending the substitution (e.g. let x = y in ...) -- * or by adding to the floats in the envt -- -- Precondition: rhs obeys the let/app invariant completeBind env top_lvl old_bndr new_bndr new_rhs | isCoVar old_bndr = case new_rhs of Coercion co -> return (extendCvSubst env old_bndr co) _ -> return (addNonRec env new_bndr new_rhs) | otherwise = ASSERT( isId new_bndr ) do { let old_info = idInfo old_bndr old_unf = unfoldingInfo old_info occ_info = occInfo old_info -- Do eta-expansion on the RHS of the binding -- See Note [Eta-expanding at let bindings] in SimplUtils ; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs -- Simplify the unfolding ; new_unfolding <- simplUnfolding env top_lvl old_bndr final_rhs old_unf ; dflags <- getDynFlags ; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info final_rhs new_unfolding -- Inline and discard the binding then do { tick (PostInlineUnconditionally old_bndr) ; return (extendIdSubst env old_bndr (DoneEx final_rhs)) } -- Use the substitution to make quite, quite sure that the -- substitution will happen, since we are going to discard the binding else do { let info1 = idInfo new_bndr `setArityInfo` new_arity -- Unfolding info: Note [Setting the new unfolding] info2 = info1 `setUnfoldingInfo` new_unfolding -- Demand info: Note [Setting the demand info] -- -- We also have to nuke demand info if for some reason -- eta-expansion *reduces* the arity of the binding to less -- than that of the strictness sig. This can happen: see Note [Arity decrease]. info3 | isEvaldUnfolding new_unfolding || (case strictnessInfo info2 of StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty) = zapDemandInfo info2 `orElse` info2 | otherwise = info2 final_id = new_bndr `setIdInfo` info3 ; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $ return (addNonRec env final_id final_rhs) } } -- The addNonRec adds it to the in-scope set too ------------------------------ addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv -- Add a new binding to the environment, complete with its unfolding -- but *do not* do postInlineUnconditionally, because we have already -- processed some of the scope of the binding -- We still want the unfolding though. Consider -- let -- x = /\a. let y = ... in Just y -- in body -- Then we float the y-binding out (via abstractFloats and addPolyBind) -- but 'x' may well then be inlined in 'body' in which case we'd like the -- opportunity to inline 'y' too. -- -- INVARIANT: the arity is correct on the incoming binders addPolyBind top_lvl env (NonRec poly_id rhs) = do { unfolding <- simplUnfolding env top_lvl poly_id rhs noUnfolding -- Assumes that poly_id did not have an INLINE prag -- which is perhaps wrong. ToDo: think about this ; let final_id = setIdInfo poly_id $ idInfo poly_id `setUnfoldingInfo` unfolding ; return (addNonRec env final_id rhs) } addPolyBind _ env bind@(Rec _) = return (extendFloats env bind) -- Hack: letrecs are more awkward, so we extend "by steam" -- without adding unfoldings etc. At worst this leads to -- more simplifier iterations ------------------------------ simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> OutExpr -> Unfolding -> SimplM Unfolding -- Note [Setting the new unfolding] simplUnfolding env top_lvl id new_rhs unf = case unf of DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args } -> do { (env', bndrs') <- simplBinders rule_env bndrs ; args' <- mapM (simplExpr env') args ; return (mkDFunUnfolding bndrs' con args') } CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide } | isStableSource src -> do { expr' <- simplExpr rule_env expr ; case guide of UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things -> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok , ug_boring_ok = inlineBoringOk expr' } -- Refresh the boring-ok flag, in case expr' -- has got small. This happens, notably in the inlinings -- for dfuns for single-method classes; see -- Note [Single-method classes] in TcInstDcls. -- A test case is Trac #4138 in return (mkCoreUnfolding src is_top_lvl expr' guide') -- See Note [Top-level flag on inline rules] in CoreUnfold _other -- Happens for INLINABLE things -> bottoming `seq` -- See Note [Force bottoming field] do { dflags <- getDynFlags ; return (mkUnfolding dflags src is_top_lvl bottoming expr') } } -- If the guidance is UnfIfGoodArgs, this is an INLINABLE -- unfolding, and we need to make sure the guidance is kept up -- to date with respect to any changes in the unfolding. _other -> bottoming `seq` -- See Note [Force bottoming field] do { dflags <- getDynFlags ; return (mkUnfolding dflags InlineRhs is_top_lvl bottoming new_rhs) } -- We make an unfolding *even for loop-breakers*. -- Reason: (a) It might be useful to know that they are WHNF -- (b) In TidyPgm we currently assume that, if we want to -- expose the unfolding then indeed we *have* an unfolding -- to expose. (We could instead use the RHS, but currently -- we don't.) The simple thing is always to have one. where bottoming = isBottomingId id is_top_lvl = isTopLevel top_lvl act = idInlineActivation id rule_env = updMode (updModeForStableUnfoldings act) env -- See Note [Simplifying inside stable unfoldings] in SimplUtils {- Note [Force bottoming field] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We need to force bottoming, or the new unfolding holds on to the old unfolding (which is part of the id). Note [Arity decrease] ~~~~~~~~~~~~~~~~~~~~~ Generally speaking the arity of a binding should not decrease. But it *can* legitimately happen because of RULES. Eg f = g Int where g has arity 2, will have arity 2. But if there's a rewrite rule g Int --> h where h has arity 1, then f's arity will decrease. Here's a real-life example, which is in the output of Specialise: Rec { $dm {Arity 2} = \d.\x. op d {-# RULES forall d. $dm Int d = $s$dm #-} dInt = MkD .... opInt ... opInt {Arity 1} = $dm dInt $s$dm {Arity 0} = \x. op dInt } Here opInt has arity 1; but when we apply the rule its arity drops to 0. That's why Specialise goes to a little trouble to pin the right arity on specialised functions too. Note [Setting the new unfolding] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * If there's an INLINE pragma, we simplify the RHS gently. Maybe we should do nothing at all, but simplifying gently might get rid of more crap. * If not, we make an unfolding from the new RHS. But *only* for non-loop-breakers. Making loop breakers not have an unfolding at all means that we can avoid tests in exprIsConApp, for example. This is important: if exprIsConApp says 'yes' for a recursive thing, then we can get into an infinite loop If there's an stable unfolding on a loop breaker (which happens for INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the user did say 'INLINE'. May need to revisit this choice. Note [Setting the demand info] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the unfolding is a value, the demand info may go pear-shaped, so we nuke it. Example: let x = (a,b) in case x of (p,q) -> h p q x Here x is certainly demanded. But after we've nuked the case, we'll get just let x = (a,b) in h a b x and now x is not demanded (I'm assuming h is lazy) This really happens. Similarly let f = \x -> e in ...f..f... After inlining f at some of its call sites the original binding may (for example) be no longer strictly demanded. The solution here is a bit ad hoc... ************************************************************************ * * \subsection[Simplify-simplExpr]{The main function: simplExpr} * * ************************************************************************ The reason for this OutExprStuff stuff is that we want to float *after* simplifying a RHS, not before. If we do so naively we get quadratic behaviour as things float out. To see why it's important to do it after, consider this (real) example: let t = f x in fst t ==> let t = let a = e1 b = e2 in (a,b) in fst t ==> let a = e1 b = e2 t = (a,b) in a -- Can't inline a this round, cos it appears twice ==> e1 Each of the ==> steps is a round of simplification. We'd save a whole round if we float first. This can cascade. Consider let f = g d in \x -> ...f... ==> let f = let d1 = ..d.. in \y -> e in \x -> ...f... ==> let d1 = ..d.. in \x -> ...(\y ->e)... Only in this second round can the \y be applied, and it might do the same again. -} simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty) where expr_out_ty :: OutType expr_out_ty = substTy env (exprType expr) simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr -- Simplify an expression, given a continuation simplExprC env expr cont = -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $ do { (env', expr') <- simplExprF (zapFloats env) expr cont ; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $ -- pprTrace "simplExprC ret3" (ppr (seInScope env')) $ -- pprTrace "simplExprC ret4" (ppr (seFloats env')) $ return (wrapFloats env' expr') } -------------------------------------------------- simplExprF :: SimplEnv -> InExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) simplExprF env e cont = {- pprTrace "simplExprF" (vcat [ ppr e , text "cont =" <+> ppr cont , text "inscope =" <+> ppr (seInScope env) , text "tvsubst =" <+> ppr (seTvSubst env) , text "idsubst =" <+> ppr (seIdSubst env) , text "cvsubst =" <+> ppr (seCvSubst env) {- , ppr (seFloats env) -} ]) $ -} simplExprF1 env e cont simplExprF1 :: SimplEnv -> InExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) simplExprF1 env (Var v) cont = simplIdF env v cont simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont simplExprF1 env (Tick t expr) cont = simplTick env t expr cont simplExprF1 env (Cast body co) cont = simplCast env body co cont simplExprF1 env (Coercion co) cont = simplCoercionF env co cont simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont ) rebuild env (Type (substTy env ty)) cont simplExprF1 env (App fun arg) cont = simplExprF env fun $ case arg of Type ty -> ApplyToTy { sc_arg_ty = substTy env ty , sc_hole_ty = substTy env (exprType fun) , sc_cont = cont } _ -> ApplyToVal { sc_arg = arg, sc_env = env , sc_dup = NoDup, sc_cont = cont } simplExprF1 env expr@(Lam {}) cont = simplLam env zapped_bndrs body cont -- The main issue here is under-saturated lambdas -- (\x1. \x2. e) arg1 -- Here x1 might have "occurs-once" occ-info, because occ-info -- is computed assuming that a group of lambdas is applied -- all at once. If there are too few args, we must zap the -- occ-info, UNLESS the remaining binders are one-shot where (bndrs, body) = collectBinders expr zapped_bndrs | need_to_zap = map zap bndrs | otherwise = bndrs need_to_zap = any zappable_bndr (drop n_args bndrs) n_args = countArgs cont -- NB: countArgs counts all the args (incl type args) -- and likewise drop counts all binders (incl type lambdas) zappable_bndr b = isId b && not (isOneShotBndr b) zap b | isTyVar b = b | otherwise = zapLamIdInfo b simplExprF1 env (Case scrut bndr _ alts) cont = simplExprF env scrut (Select NoDup bndr alts env cont) simplExprF1 env (Let (Rec pairs) body) cont = do { env' <- simplRecBndrs env (map fst pairs) -- NB: bndrs' don't have unfoldings or rules -- We add them as we go down ; env'' <- simplRecBind env' NotTopLevel pairs ; simplExprF env'' body cont } simplExprF1 env (Let (NonRec bndr rhs) body) cont = simplNonRecE env bndr (rhs, env) ([], body) cont --------------------------------- simplType :: SimplEnv -> InType -> SimplM OutType -- Kept monadic just so we can do the seqType simplType env ty = -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $ seqType new_ty `seq` return new_ty where new_ty = substTy env ty --------------------------------- simplCoercionF :: SimplEnv -> InCoercion -> SimplCont -> SimplM (SimplEnv, OutExpr) simplCoercionF env co cont = do { co' <- simplCoercion env co ; rebuild env (Coercion co') cont } simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion simplCoercion env co = let opt_co = optCoercion (getCvSubst env) co in seqCo opt_co `seq` return opt_co ----------------------------------- -- | Push a TickIt context outwards past applications and cases, as -- long as this is a non-scoping tick, to let case and application -- optimisations apply. simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) simplTick env tickish expr cont -- A scoped tick turns into a continuation, so that we can spot -- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do -- it this way, then it would take two passes of the simplifier to -- reduce ((scc t (\x . e)) e'). -- NB, don't do this with counting ticks, because if the expr is -- bottom, then rebuildCall will discard the continuation. -- XXX: we cannot do this, because the simplifier assumes that -- the context can be pushed into a case with a single branch. e.g. -- scc<f> case expensive of p -> e -- becomes -- case expensive of p -> scc<f> e -- -- So I'm disabling this for now. It just means we will do more -- simplifier iterations that necessary in some cases. -- | tickishScoped tickish && not (tickishCounts tickish) -- = simplExprF env expr (TickIt tickish cont) -- For unscoped or soft-scoped ticks, we are allowed to float in new -- cost, so we simply push the continuation inside the tick. This -- has the effect of moving the tick to the outside of a case or -- application context, allowing the normal case and application -- optimisations to fire. | tickish `tickishScopesLike` SoftScope = do { (env', expr') <- simplExprF env expr cont ; return (env', mkTick tickish expr') } -- Push tick inside if the context looks like this will allow us to -- do a case-of-case - see Note [case-of-scc-of-case] | Select {} <- cont, Just expr' <- push_tick_inside = simplExprF env expr' cont -- We don't want to move the tick, but we might still want to allow -- floats to pass through with appropriate wrapping (or not, see -- wrap_floats below) --- | not (tickishCounts tickish) || tickishCanSplit tickish -- = wrap_floats | otherwise = no_floating_past_tick where -- Try to push tick inside a case, see Note [case-of-scc-of-case]. push_tick_inside = case expr0 of Case scrut bndr ty alts -> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts) _other -> Nothing where (ticks, expr0) = stripTicksTop movable (Tick tickish expr) movable t = not (tickishCounts t) || t `tickishScopesLike` NoScope || tickishCanSplit t tickScrut e = foldr mkTick e ticks -- Alternatives get annotated with all ticks that scope in some way, -- but we don't want to count entries. tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope) ts_scope = map mkNoCount $ filter (not . (`tickishScopesLike` NoScope)) ticks no_floating_past_tick = do { let (inc,outc) = splitCont cont ; (env', expr') <- simplExprF (zapFloats env) expr inc ; let tickish' = simplTickish env tickish ; (env'', expr'') <- rebuild (zapFloats env') (wrapFloats env' expr') (TickIt tickish' outc) ; return (addFloats env env'', expr'') } -- Alternative version that wraps outgoing floats with the tick. This -- results in ticks being duplicated, as we don't make any attempt to -- eliminate the tick if we re-inline the binding (because the tick -- semantics allows unrestricted inlining of HNFs), so I'm not doing -- this any more. FloatOut will catch any real opportunities for -- floating. -- -- wrap_floats = -- do { let (inc,outc) = splitCont cont -- ; (env', expr') <- simplExprF (zapFloats env) expr inc -- ; let tickish' = simplTickish env tickish -- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0), -- mkTick (mkNoCount tickish') rhs) -- -- when wrapping a float with mkTick, we better zap the Id's -- -- strictness info and arity, because it might be wrong now. -- ; let env'' = addFloats env (mapFloats env' wrap_float) -- ; rebuild env'' expr' (TickIt tickish' outc) -- } simplTickish env tickish | Breakpoint n ids <- tickish = Breakpoint n (map (getDoneId . substId env) ids) | otherwise = tickish -- Push type application and coercion inside a tick splitCont :: SimplCont -> (SimplCont, SimplCont) splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc) where (inc,outc) = splitCont tail splitCont (CastIt co c) = (CastIt co inc, outc) where (inc,outc) = splitCont c splitCont other = (mkBoringStop (contHoleType other), other) getDoneId (DoneId id) = id getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst getDoneId other = pprPanic "getDoneId" (ppr other) -- Note [case-of-scc-of-case] -- It's pretty important to be able to transform case-of-case when -- there's an SCC in the way. For example, the following comes up -- in nofib/real/compress/Encode.hs: -- -- case scctick<code_string.r1> -- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje -- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) -> -- (ww1_s13f, ww2_s13g, ww3_s13h) -- } -- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) -> -- tick<code_string.f1> -- (ww_s12Y, -- ww1_s12Z, -- PTTrees.PT -- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf) -- } -- -- We really want this case-of-case to fire, because then the 3-tuple -- will go away (indeed, the CPR optimisation is relying on this -- happening). But the scctick is in the way - we need to push it -- inside to expose the case-of-case. So we perform this -- transformation on the inner case: -- -- scctick c (case e of { p1 -> e1; ...; pn -> en }) -- ==> -- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en } -- -- So we've moved a constant amount of work out of the scc to expose -- the case. We only do this when the continuation is interesting: in -- for now, it has to be another Case (maybe generalise this later). {- ************************************************************************ * * \subsection{The main rebuilder} * * ************************************************************************ -} rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) -- At this point the substitution in the SimplEnv should be irrelevant -- only the in-scope set and floats should matter rebuild env expr cont = case cont of Stop {} -> return (env, expr) TickIt t cont -> rebuild env (mkTick t expr) cont CastIt co cont -> rebuild env (mkCast expr co) cont -- NB: mkCast implements the (Coercion co |> g) optimisation Select _ bndr alts se cont -> rebuildCase (se `setFloats` env) expr bndr alts cont StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr -- expr satisfies let/app since it started life -- in a call to simplNonRecE ; simplLam env' bs body cont } ApplyToTy { sc_arg_ty = ty, sc_cont = cont} -> rebuild env (App expr (Type ty)) cont ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont} -- See Note [Avoid redundant simplification] | isSimplified dup_flag -> rebuild env (App expr arg) cont | otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg ; rebuild env (App expr arg') cont } {- ************************************************************************ * * \subsection{Lambdas} * * ************************************************************************ -} simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont -> SimplM (SimplEnv, OutExpr) simplCast env body co0 cont0 = do { co1 <- simplCoercion env co0 ; -- pprTrace "simplCast" (ppr co1) $ simplExprF env body (addCoerce co1 cont0) } where addCoerce co cont = add_coerce co (coercionKind co) cont add_coerce _co (Pair s1 k1) cont -- co :: ty~ty | s1 `eqType` k1 = cont -- is a no-op add_coerce co1 (Pair s1 _k2) (CastIt co2 cont) | (Pair _l1 t1) <- coercionKind co2 -- e |> (g1 :: S1~L) |> (g2 :: L~T1) -- ==> -- e, if S1=T1 -- e |> (g1 . g2 :: S1~T1) otherwise -- -- For example, in the initial form of a worker -- we may find (coerce T (coerce S (\x.e))) y -- and we'd like it to simplify to e[y/x] in one round -- of simplification , s1 `eqType` t1 = cont -- The coerces cancel out | otherwise = CastIt (mkTransCo co1 co2) cont add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail }) -- (f |> g) ty ---> (f ty) |> (g @ ty) -- This implements the PushT rule from the paper | Just (tyvar,_) <- splitForAllTy_maybe s1s2 = ASSERT( isTyVar tyvar ) cont { sc_cont = addCoerce new_cast tail } where new_cast = mkInstCo co arg_ty add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se , sc_dup = dup, sc_cont = cont }) | isFunTy s1s2 -- This implements the Push rule from the paper , isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg -- (e |> (g :: s1s2 ~ t1->t2)) f -- ===> -- (e (f |> (arg g :: t1~s1)) -- |> (res g :: s2->t2) -- -- t1t2 must be a function type, t1->t2, because it's applied -- to something but s1s2 might conceivably not be -- -- When we build the ApplyTo we can't mix the out-types -- with the InExpr in the argument, so we simply substitute -- to make it all consistent. It's a bit messy. -- But it isn't a common case. -- -- Example of use: Trac #995 = ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1) , sc_env = zapSubstEnv arg_se , sc_dup = dup , sc_cont = addCoerce co2 cont } where -- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and -- t2 ~ s2 with left and right on the curried form: -- (->) t1 t2 ~ (->) s1 s2 [co1, co2] = decomposeCo 2 co arg' = substExpr (text "move-cast") arg_se' arg arg_se' = arg_se `setInScope` env add_coerce co _ cont = CastIt co cont {- ************************************************************************ * * \subsection{Lambdas} * * ************************************************************************ Note [Zap unfolding when beta-reducing] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Lambda-bound variables can have stable unfoldings, such as $j = \x. \b{Unf=Just x}. e See Note [Case binders and join points] below; the unfolding for lets us optimise e better. However when we beta-reduce it we want to revert to using the actual value, otherwise we can end up in the stupid situation of let x = blah in let b{Unf=Just x} = y in ...b... Here it'd be far better to drop the unfolding and use the actual RHS. -} simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) simplLam env [] body cont = simplExprF env body cont -- Beta reduction simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont }) = do { tick (BetaReduction bndr) ; simplLam (extendTvSubst env bndr arg_ty) bndrs body cont } simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se , sc_cont = cont }) = do { tick (BetaReduction bndr) ; simplNonRecE env (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont } where zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing] | isId bndr, isStableUnfolding (realIdUnfolding bndr) = setIdUnfolding bndr NoUnfolding | otherwise = bndr -- discard a non-counting tick on a lambda. This may change the -- cost attribution slightly (moving the allocation of the -- lambda elsewhere), but we don't care: optimisation changes -- cost attribution all the time. simplLam env bndrs body (TickIt tickish cont) | not (tickishCounts tickish) = simplLam env bndrs body cont -- Not enough args, so there are real lambdas left to put in the result simplLam env bndrs body cont = do { (env', bndrs') <- simplLamBndrs env bndrs ; body' <- simplExpr env' body ; new_lam <- mkLam bndrs' body' cont ; rebuild env' new_lam cont } ------------------ simplNonRecE :: SimplEnv -> InBndr -- The binder -> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda) -> ([InBndr], InExpr) -- Body of the let/lambda -- \xs.e -> SimplCont -> SimplM (SimplEnv, OutExpr) -- simplNonRecE is used for -- * non-top-level non-recursive lets in expressions -- * beta reduction -- -- It deals with strict bindings, via the StrictBind continuation, -- which may abort the whole process -- -- Precondition: rhs satisfies the let/app invariant -- Note [CoreSyn let/app invariant] in CoreSyn -- -- The "body" of the binding comes as a pair of ([InId],InExpr) -- representing a lambda; so we recurse back to simplLam -- Why? Because of the binder-occ-info-zapping done before -- the call to simplLam in simplExprF (Lam ...) -- First deal with type applications and type lets -- (/\a. e) (Type ty) and (let a = Type ty in e) simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont = ASSERT( isTyVar bndr ) do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg ; simplLam (extendTvSubst env bndr ty_arg') bndrs body cont } simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont = do dflags <- getDynFlags case () of _ | preInlineUnconditionally dflags env NotTopLevel bndr rhs -> do { tick (PreInlineUnconditionally bndr) ; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $ simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont } | isStrictId bndr -- Includes coercions -> simplExprF (rhs_se `setFloats` env) rhs (StrictBind bndr bndrs body env cont) | otherwise -> ASSERT( not (isTyVar bndr) ) do { (env1, bndr1) <- simplNonRecBndr env bndr ; let (env2, bndr2) = addBndrRules env1 bndr bndr1 ; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se ; simplLam env3 bndrs body cont } {- ************************************************************************ * * Variables * * ************************************************************************ -} simplVar :: SimplEnv -> InVar -> SimplM OutExpr -- Look up an InVar in the environment simplVar env var | isTyVar var = return (Type (substTyVar env var)) | isCoVar var = return (Coercion (substCoVar env var)) | otherwise = case substId env var of DoneId var1 -> return (Var var1) DoneEx e -> return e ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr) simplIdF env var cont = case substId env var of DoneEx e -> simplExprF (zapSubstEnv env) e cont ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont DoneId var1 -> completeCall env var1 cont -- Note [zapSubstEnv] -- The template is already simplified, so don't re-substitute. -- This is VITAL. Consider -- let x = e in -- let y = \z -> ...x... in -- \ x -> ...y... -- We'll clone the inner \x, adding x->x' in the id_subst -- Then when we inline y, we must *not* replace x by x' in -- the inlined copy!! --------------------------------------------------------- -- Dealing with a call site completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr) completeCall env var cont = do { ------------- Try inlining ---------------- dflags <- getDynFlags ; let (lone_variable, arg_infos, call_cont) = contArgs cont n_val_args = length arg_infos interesting_cont = interestingCallContext call_cont unfolding = activeUnfolding env var maybe_inline = callSiteInline dflags var unfolding lone_variable arg_infos interesting_cont ; case maybe_inline of { Just expr -- There is an inlining! -> do { checkedTick (UnfoldingDone var) ; dump_inline dflags expr cont ; simplExprF (zapSubstEnv env) expr cont } ; Nothing -> do -- No inlining! { rule_base <- getSimplRules ; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont ; rebuildCall env info cont }}} where dump_inline dflags unfolding cont | not (dopt Opt_D_dump_inlinings dflags) = return () | not (dopt Opt_D_verbose_core2core dflags) = when (isExternalName (idName var)) $ liftIO $ printOutputForUser dflags alwaysQualify $ sep [text "Inlining done:", nest 4 (ppr var)] | otherwise = liftIO $ printOutputForUser dflags alwaysQualify $ sep [text "Inlining done: " <> ppr var, nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding), text "Cont: " <+> ppr cont])] rebuildCall :: SimplEnv -> ArgInfo -> SimplCont -> SimplM (SimplEnv, OutExpr) rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont -- When we run out of strictness args, it means -- that the call is definitely bottom; see SimplUtils.mkArgInfo -- Then we want to discard the entire strict continuation. E.g. -- * case (error "hello") of { ... } -- * (error "Hello") arg -- * f (error "Hello") where f is strict -- etc -- Then, especially in the first of these cases, we'd like to discard -- the continuation, leaving just the bottoming expression. But the -- type might not be right, so we may have to add a coerce. | not (contIsTrivial cont) -- Only do this if there is a non-trivial = return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it where -- again and again! res = argInfoExpr fun rev_args cont_ty = contResultType cont rebuildCall env info (CastIt co cont) = rebuildCall env (addCastTo info co) cont rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont }) = rebuildCall env (info `addTyArgTo` arg_ty) cont rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty , ai_strs = str:strs, ai_discs = disc:discs }) (ApplyToVal { sc_arg = arg, sc_env = arg_se , sc_dup = dup_flag, sc_cont = cont }) | isSimplified dup_flag -- See Note [Avoid redundant simplification] = rebuildCall env (addValArgTo info' arg) cont | str -- Strict argument = -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $ simplExprF (arg_se `setFloats` env) arg (StrictArg info' cci cont) -- Note [Shadowing] | otherwise -- Lazy argument -- DO NOT float anything outside, hence simplExprC -- There is no benefit (unlike in a let-binding), and we'd -- have to be very careful about bogus strictness through -- floating a demanded let. = do { arg' <- simplExprC (arg_se `setInScope` env) arg (mkLazyArgStop (funArgTy fun_ty) cci) ; rebuildCall env (addValArgTo info' arg') cont } where info' = info { ai_strs = strs, ai_discs = discs } cci | encl_rules = RuleArgCtxt | disc > 0 = DiscArgCtxt -- Be keener here | otherwise = BoringCtxt -- Nothing interesting rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont | null rules = rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case | otherwise = do { -- We've accumulated a simplified call in <fun,rev_args> -- so try rewrite rules; see Note [RULEs apply to simplified arguments] -- See also Note [Rules for recursive functions] ; let env' = zapSubstEnv env -- See Note [zapSubstEnv]; -- and NB that 'rev_args' are all fully simplified ; mb_rule <- tryRules env' rules fun (reverse rev_args) cont ; case mb_rule of { Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont' -- Rules don't match ; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules } } {- Note [RULES apply to simplified arguments] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It's very desirable to try RULES once the arguments have been simplified, because doing so ensures that rule cascades work in one pass. Consider {-# RULES g (h x) = k x f (k x) = x #-} ...f (g (h x))... Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If we match f's rules against the un-simplified RHS, it won't match. This makes a particularly big difference when superclass selectors are involved: op ($p1 ($p2 (df d))) We want all this to unravel in one sweeep. Note [Avoid redundant simplification] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Because RULES apply to simplified arguments, there's a danger of repeatedly simplifying already-simplified arguments. An important example is that of (>>=) d e1 e2 Here e1, e2 are simplified before the rule is applied, but don't really participate in the rule firing. So we mark them as Simplified to avoid re-simplifying them. Note [Shadowing] ~~~~~~~~~~~~~~~~ This part of the simplifier may break the no-shadowing invariant Consider f (...(\a -> e)...) (case y of (a,b) -> e') where f is strict in its second arg If we simplify the innermost one first we get (...(\a -> e)...) Simplifying the second arg makes us float the case out, so we end up with case y of (a,b) -> f (...(\a -> e)...) e' So the output does not have the no-shadowing invariant. However, there is no danger of getting name-capture, because when the first arg was simplified we used an in-scope set that at least mentioned all the variables free in its static environment, and that is enough. We can't just do innermost first, or we'd end up with a dual problem: case x of (a,b) -> f e (...(\a -> e')...) I spent hours trying to recover the no-shadowing invariant, but I just could not think of an elegant way to do it. The simplifier is already knee-deep in continuations. We have to keep the right in-scope set around; AND we have to get the effect that finding (error "foo") in a strict arg position will discard the entire application and replace it with (error "foo"). Getting all this at once is TOO HARD! ************************************************************************ * * Rewrite rules * * ************************************************************************ -} tryRules :: SimplEnv -> [CoreRule] -> Id -> [ArgSpec] -> SimplCont -> SimplM (Maybe (CoreExpr, SimplCont)) -- The SimplEnv already has zapSubstEnv applied to it tryRules env rules fn args call_cont | null rules = return Nothing {- Disabled until we fix #8326 | fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#] , [_type_arg, val_arg] <- args , Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont , isDeadBinder bndr = do { dflags <- getDynFlags ; let enum_to_tag :: CoreAlt -> CoreAlt -- Takes K -> e into tagK# -> e -- where tagK# is the tag of constructor K enum_to_tag (DataAlt con, [], rhs) = ASSERT( isEnumerationTyCon (dataConTyCon con) ) (LitAlt tag, [], rhs) where tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG)) enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt) new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts new_bndr = setIdType bndr intPrimTy -- The binder is dead, but should have the right type ; return (Just (val_arg, Select dup new_bndr new_alts se cont)) } -} | otherwise = do { dflags <- getDynFlags ; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env) fn (argInfoAppArgs args) rules of { Nothing -> return Nothing ; -- No rule matches Just (rule, rule_rhs) -> do { checkedTick (RuleFired (ru_name rule)) ; let cont' = pushSimplifiedArgs env (drop (ruleArity rule) args) call_cont -- (ruleArity rule) says how many args the rule consumed ; dump dflags rule rule_rhs ; return (Just (rule_rhs, cont')) }}} where dump dflags rule rule_rhs | dopt Opt_D_dump_rule_rewrites dflags = log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat [ text "Rule:" <+> ftext (ru_name rule) , text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args)) , text "After: " <+> pprCoreExpr rule_rhs , text "Cont: " <+> ppr call_cont ] | dopt Opt_D_dump_rule_firings dflags = log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $ ftext (ru_name rule) | otherwise = return () log_rule dflags flag hdr details = liftIO . dumpSDoc dflags alwaysQualify flag "" $ sep [text hdr, nest 4 details] {- Note [Optimising tagToEnum#] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we have an enumeration data type: data Foo = A | B | C Then we want to transform case tagToEnum# x of ==> case x of A -> e1 DEFAULT -> e1 B -> e2 1# -> e2 C -> e3 2# -> e3 thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT alternative we retain it (remember it comes first). If not the case must be exhaustive, and we reflect that in the transformed version by adding a DEFAULT. Otherwise Lint complains that the new case is not exhaustive. See #8317. Note [Rules for recursive functions] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ You might think that we shouldn't apply rules for a loop breaker: doing so might give rise to an infinite loop, because a RULE is rather like an extra equation for the function: RULE: f (g x) y = x+y Eqn: f a y = a-y But it's too drastic to disable rules for loop breakers. Even the foldr/build rule would be disabled, because foldr is recursive, and hence a loop breaker: foldr k z (build g) = g k z So it's up to the programmer: rules can cause divergence ************************************************************************ * * Rebuilding a case expression * * ************************************************************************ Note [Case elimination] ~~~~~~~~~~~~~~~~~~~~~~~ The case-elimination transformation discards redundant case expressions. Start with a simple situation: case x# of ===> let y# = x# in e y# -> e (when x#, y# are of primitive type, of course). We can't (in general) do this for algebraic cases, because we might turn bottom into non-bottom! The code in SimplUtils.prepareAlts has the effect of generalise this idea to look for a case where we're scrutinising a variable, and we know that only the default case can match. For example: case x of 0# -> ... DEFAULT -> ...(case x of 0# -> ... DEFAULT -> ...) ... Here the inner case is first trimmed to have only one alternative, the DEFAULT, after which it's an instance of the previous case. This really only shows up in eliminating error-checking code. Note that SimplUtils.mkCase combines identical RHSs. So case e of ===> case e of DEFAULT -> r True -> r False -> r Now again the case may be elminated by the CaseElim transformation. This includes things like (==# a# b#)::Bool so that we simplify case ==# a# b# of { True -> x; False -> x } to just x This particular example shows up in default methods for comparison operations (e.g. in (>=) for Int.Int32) Note [Case elimination: lifted case] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If a case over a lifted type has a single alternative, and is being used as a strict 'let' (all isDeadBinder bndrs), we may want to do this transformation: case e of r ===> let r = e in ...r... _ -> ...r... (a) 'e' is already evaluated (it may so if e is a variable) Specifically we check (exprIsHNF e). In this case we can just allocate the WHNF directly with a let. or (b) 'x' is not used at all and e is ok-for-speculation The ok-for-spec bit checks that we don't lose any exceptions or divergence. NB: it'd be *sound* to switch from case to let if the scrutinee was not yet WHNF but was guaranteed to converge; but sticking with case means we won't build a thunk or (c) 'x' is used strictly in the body, and 'e' is a variable Then we can just substitute 'e' for 'x' in the body. See Note [Eliminating redundant seqs] For (b), the "not used at all" test is important. Consider case (case a ># b of { True -> (p,q); False -> (q,p) }) of r -> blah The scrutinee is ok-for-speculation (it looks inside cases), but we do not want to transform to let r = case a ># b of { True -> (p,q); False -> (q,p) } in blah because that builds an unnecessary thunk. Note [Eliminating redundant seqs] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we have this: case x of r { _ -> ..r.. } where 'r' is used strictly in (..r..), the case is effectively a 'seq' on 'x', but since 'r' is used strictly anyway, we can safely transform to (...x...) Note that this can change the error behaviour. For example, we might transform case x of { _ -> error "bad" } --> error "bad" which is might be puzzling if 'x' currently lambda-bound, but later gets let-bound to (error "good"). Nevertheless, the paper "A semantics for imprecise exceptions" allows this transformation. If you want to fix the evaluation order, use 'pseq'. See Trac #8900 for an example where the loss of this transformation bit us in practice. See also Note [Empty case alternatives] in CoreSyn. Just for reference, the original code (added Jan 13) looked like this: || case_bndr_evald_next rhs case_bndr_evald_next :: CoreExpr -> Bool -- See Note [Case binder next] case_bndr_evald_next (Var v) = v == case_bndr case_bndr_evald_next (Cast e _) = case_bndr_evald_next e case_bndr_evald_next (App e _) = case_bndr_evald_next e case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e case_bndr_evald_next _ = False (This came up when fixing Trac #7542. See also Note [Eta reduction of an eval'd function] in CoreUtils.) Note [Case elimination: unlifted case] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider case a +# b of r -> ...r... Then we do case-elimination (to make a let) followed by inlining, to get .....(a +# b).... If we have case indexArray# a i of r -> ...r... we might like to do the same, and inline the (indexArray# a i). But indexArray# is not okForSpeculation, so we don't build a let in rebuildCase (lest it get floated *out*), so the inlining doesn't happen either. This really isn't a big deal I think. The let can be Further notes about case elimination ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider: test :: Integer -> IO () test = print Turns out that this compiles to: Print.test = \ eta :: Integer eta1 :: Void# -> case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT -> case hPutStr stdout (PrelNum.jtos eta ($w[] @ Char)) eta1 of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }} Notice the strange '<' which has no effect at all. This is a funny one. It started like this: f x y = if x < 0 then jtos x else if y==0 then "" else jtos x At a particular call site we have (f v 1). So we inline to get if v < 0 then jtos x else if 1==0 then "" else jtos x Now simplify the 1==0 conditional: if v<0 then jtos v else jtos v Now common-up the two branches of the case: case (v<0) of DEFAULT -> jtos v Why don't we drop the case? Because it's strict in v. It's technically wrong to drop even unnecessary evaluations, and in practice they may be a result of 'seq' so we *definitely* don't want to drop those. I don't really know how to improve this situation. -} --------------------------------------------------------- -- Eliminate the case if possible rebuildCase, reallyRebuildCase :: SimplEnv -> OutExpr -- Scrutinee -> InId -- Case binder -> [InAlt] -- Alternatives (inceasing order) -> SimplCont -> SimplM (SimplEnv, OutExpr) -------------------------------------------------- -- 1. Eliminate the case if there's a known constructor -------------------------------------------------- rebuildCase env scrut case_bndr alts cont | Lit lit <- scrut -- No need for same treatment as constructors -- because literals are inlined more vigorously , not (litIsLifted lit) = do { tick (KnownBranch case_bndr) ; case findAlt (LitAlt lit) alts of Nothing -> missingAlt env case_bndr alts cont Just (_, bs, rhs) -> simple_rhs bs rhs } | Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut -- Works when the scrutinee is a variable with a known unfolding -- as well as when it's an explicit constructor application = do { tick (KnownBranch case_bndr) ; case findAlt (DataAlt con) alts of Nothing -> missingAlt env case_bndr alts cont Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args case_bndr bs rhs cont } where simple_rhs bs rhs = ASSERT( null bs ) do { env' <- simplNonRecX env case_bndr scrut -- scrut is a constructor application, -- hence satisfies let/app invariant ; simplExprF env' rhs cont } -------------------------------------------------- -- 2. Eliminate the case if scrutinee is evaluated -------------------------------------------------- rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont -- See if we can get rid of the case altogether -- See Note [Case elimination] -- mkCase made sure that if all the alternatives are equal, -- then there is now only one (DEFAULT) rhs -- 2a. Dropping the case altogether, if -- a) it binds nothing (so it's really just a 'seq') -- b) evaluating the scrutinee has no side effects | is_plain_seq , exprOkForSideEffects scrut -- The entire case is dead, so we can drop it -- if the scrutinee converges without having imperative -- side effects or raising a Haskell exception -- See Note [PrimOp can_fail and has_side_effects] in PrimOp = simplExprF env rhs cont -- 2b. Turn the case into a let, if -- a) it binds only the case-binder -- b) unlifted case: the scrutinee is ok-for-speculation -- lifted case: the scrutinee is in HNF (or will later be demanded) | all_dead_bndrs , if is_unlifted then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case] else exprIsHNF scrut -- See Note [Case elimination: lifted case] || scrut_is_demanded_var scrut = do { tick (CaseElim case_bndr) ; env' <- simplNonRecX env case_bndr scrut ; simplExprF env' rhs cont } -- 2c. Try the seq rules if -- a) it binds only the case binder -- b) a rule for seq applies -- See Note [User-defined RULES for seq] in MkId | is_plain_seq = do { let rhs' = substExpr (text "rebuild-case") env rhs env' = zapSubstEnv env scrut_ty = substTy env (idType case_bndr) out_args = [ TyArg { as_arg_ty = scrut_ty , as_hole_ty = seq_id_ty } , TyArg { as_arg_ty = exprType rhs' , as_hole_ty = applyTy seq_id_ty scrut_ty } , ValArg scrut, ValArg rhs'] -- Lazily evaluated, so we don't do most of this ; rule_base <- getSimplRules ; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args cont ; case mb_rule of Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont' Nothing -> reallyRebuildCase env scrut case_bndr alts cont } where is_unlifted = isUnLiftedType (idType case_bndr) all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId] is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect seq_id_ty = idType seqId scrut_is_demanded_var :: CoreExpr -> Bool -- See Note [Eliminating redundant seqs] scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr) scrut_is_demanded_var _ = False rebuildCase env scrut case_bndr alts cont = reallyRebuildCase env scrut case_bndr alts cont -------------------------------------------------- -- 3. Catch-all case -------------------------------------------------- reallyRebuildCase env scrut case_bndr alts cont = do { -- Prepare the continuation; -- The new subst_env is in place (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont -- Simplify the alternatives ; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont ; dflags <- getDynFlags ; let alts_ty' = contResultType dup_cont ; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts' -- Notice that rebuild gets the in-scope set from env', not alt_env -- (which in any case is only build in simplAlts) -- The case binder *not* scope over the whole returned case-expression ; rebuild env' case_expr nodup_cont } {- simplCaseBinder checks whether the scrutinee is a variable, v. If so, try to eliminate uses of v in the RHSs in favour of case_bndr; that way, there's a chance that v will now only be used once, and hence inlined. Historical note: we use to do the "case binder swap" in the Simplifier so there were additional complications if the scrutinee was a variable. Now the binder-swap stuff is done in the occurrence analyer; see OccurAnal Note [Binder swap]. Note [knownCon occ info] ~~~~~~~~~~~~~~~~~~~~~~~~ If the case binder is not dead, then neither are the pattern bound variables: case <any> of x { (a,b) -> case x of { (p,q) -> p } } Here (a,b) both look dead, but come alive after the inner case is eliminated. The point is that we bring into the envt a binding let x = (a,b) after the outer case, and that makes (a,b) alive. At least we do unless the case binder is guaranteed dead. Note [Case alternative occ info] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When we are simply reconstructing a case (the common case), we always zap the occurrence info on the binders in the alternatives. Even if the case binder is dead, the scrutinee is usually a variable, and *that* can bring the case-alternative binders back to life. See Note [Add unfolding for scrutinee] Note [Improving seq] ~~~~~~~~~~~~~~~~~~~ Consider type family F :: * -> * type instance F Int = Int ... case e of x { DEFAULT -> rhs } ... where x::F Int. Then we'd like to rewrite (F Int) to Int, getting case e `cast` co of x'::Int I# x# -> let x = x' `cast` sym co in rhs so that 'rhs' can take advantage of the form of x'. Notice that Note [Case of cast] (in OccurAnal) may then apply to the result. Nota Bene: We only do the [Improving seq] transformation if the case binder 'x' is actually used in the rhs; that is, if the case is *not* a *pure* seq. a) There is no point in adding the cast to a pure seq. b) There is a good reason not to: doing so would interfere with seq rules (Note [Built-in RULES for seq] in MkId). In particular, this [Improving seq] thing *adds* a cast while [Built-in RULES for seq] *removes* one, so they just flip-flop. You might worry about case v of x { __DEFAULT -> ... case (v `cast` co) of y { I# -> ... }} This is a pure seq (since x is unused), so [Improving seq] won't happen. But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get case v of x { __DEFAULT -> ... case (x `cast` co) of y { I# -> ... }} Now the outer case is not a pure seq, so [Improving seq] will happen, and then the inner case will disappear. The need for [Improving seq] showed up in Roman's experiments. Example: foo :: F Int -> Int -> Int foo t n = t `seq` bar n where bar 0 = 0 bar n = bar (n - case t of TI i -> i) Here we'd like to avoid repeated evaluating t inside the loop, by taking advantage of the `seq`. At one point I did transformation in LiberateCase, but it's more robust here. (Otherwise, there's a danger that we'll simply drop the 'seq' altogether, before LiberateCase gets to see it.) -} simplAlts :: SimplEnv -> OutExpr -> InId -- Case binder -> [InAlt] -- Non-empty -> SimplCont -> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation -- Like simplExpr, this just returns the simplified alternatives; -- it does not return an environment -- The returned alternatives can be empty, none are possible simplAlts env scrut case_bndr alts cont' = do { let env0 = zapFloats env ; (env1, case_bndr1) <- simplBinder env0 case_bndr ; fam_envs <- getFamEnvs ; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut case_bndr case_bndr1 alts ; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts -- NB: it's possible that the returned in_alts is empty: this is handled -- by the caller (rebuildCase) in the missingAlt function ; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts ; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $ return (scrut', case_bndr', alts') } ------------------------------------ improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv -> OutExpr -> InId -> OutId -> [InAlt] -> SimplM (SimplEnv, OutExpr, OutId) -- Note [Improving seq] improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)] | not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq] , Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1) = do { case_bndr2 <- newId (fsLit "nt") ty2 ; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co) env2 = extendIdSubst env case_bndr rhs ; return (env2, scrut `Cast` co, case_bndr2) } improveSeq _ env scrut _ case_bndr1 _ = return (env, scrut, case_bndr1) ------------------------------------ simplAlt :: SimplEnv -> Maybe OutExpr -- The scrutinee -> [AltCon] -- These constructors can't be present when -- matching the DEFAULT alternative -> OutId -- The case binder -> SimplCont -> InAlt -> SimplM OutAlt simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs) = ASSERT( null bndrs ) do { let env' = addBinderUnfolding env case_bndr' (mkOtherCon imposs_deflt_cons) -- Record the constructors that the case-binder *can't* be. ; rhs' <- simplExprC env' rhs cont' ; return (DEFAULT, [], rhs') } simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs) = ASSERT( null bndrs ) do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit) ; rhs' <- simplExprC env' rhs cont' ; return (LitAlt lit, [], rhs') } simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs) = do { -- Deal with the pattern-bound variables -- Mark the ones that are in ! positions in the -- data constructor as certainly-evaluated. -- NB: simplLamBinders preserves this eval info ; let vs_with_evals = add_evals (dataConRepStrictness con) ; (env', vs') <- simplLamBndrs env vs_with_evals -- Bind the case-binder to (con args) ; let inst_tys' = tyConAppArgs (idType case_bndr') con_app :: OutExpr con_app = mkConApp2 con inst_tys' vs' ; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app ; rhs' <- simplExprC env'' rhs cont' ; return (DataAlt con, vs', rhs') } where -- add_evals records the evaluated-ness of the bound variables of -- a case pattern. This is *important*. Consider -- data T = T !Int !Int -- -- case x of { T a b -> T (a+1) b } -- -- We really must record that b is already evaluated so that we don't -- go and re-evaluate it when constructing the result. -- See Note [Data-con worker strictness] in MkId.hs add_evals the_strs = go vs the_strs where go [] [] = [] go (v:vs') strs | isTyVar v = v : go vs' strs go (v:vs') (str:strs) | isMarkedStrict str = evald_v : go vs' strs | otherwise = zapped_v : go vs' strs where zapped_v = zapIdOccInfo v -- See Note [Case alternative occ info] evald_v = zapped_v `setIdUnfolding` evaldUnfolding go _ _ = pprPanic "cat_evals" (ppr con $$ ppr vs $$ ppr the_strs) addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv addAltUnfoldings env scrut case_bndr con_app = do { dflags <- getDynFlags ; let con_app_unf = mkSimpleUnfolding dflags con_app env1 = addBinderUnfolding env case_bndr con_app_unf -- See Note [Add unfolding for scrutinee] env2 = case scrut of Just (Var v) -> addBinderUnfolding env1 v con_app_unf Just (Cast (Var v) co) -> addBinderUnfolding env1 v $ mkSimpleUnfolding dflags (Cast con_app (mkSymCo co)) _ -> env1 ; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app]) ; return env2 } addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv addBinderUnfolding env bndr unf | debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf = WARN( not (eqType (idType bndr) (exprType tmpl)), ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) ) modifyInScope env (bndr `setIdUnfolding` unf) | otherwise = modifyInScope env (bndr `setIdUnfolding` unf) zapBndrOccInfo :: Bool -> Id -> Id -- Consider case e of b { (a,b) -> ... } -- Then if we bind b to (a,b) in "...", and b is not dead, -- then we must zap the deadness info on a,b zapBndrOccInfo keep_occ_info pat_id | keep_occ_info = pat_id | otherwise = zapIdOccInfo pat_id {- Note [Add unfolding for scrutinee] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In general it's unlikely that a variable scrutinee will appear in the case alternatives case x of { ...x unlikely to appear... } because the binder-swap in OccAnal has got rid of all such occcurrences See Note [Binder swap] in OccAnal. BUT it is still VERY IMPORTANT to add a suitable unfolding for a variable scrutinee, in simplAlt. Here's why case x of y (a,b) -> case b of c I# v -> ...(f y)... There is no occurrence of 'b' in the (...(f y)...). But y gets the unfolding (a,b), and *that* mentions b. If f has a RULE RULE f (p, I# q) = ... we want that rule to match, so we must extend the in-scope env with a suitable unfolding for 'y'. It's *essential* for rule matching; but it's also good for case-elimintation -- suppose that 'f' was inlined and did multi-level case analysis, then we'd solve it in one simplifier sweep instead of two. Exactly the same issue arises in SpecConstr; see Note [Add scrutinee to ValueEnv too] in SpecConstr HOWEVER, given case x of y { Just a -> r1; Nothing -> r2 } we do not want to add the unfolding x -> y to 'x', which might seem cool, since 'y' itself has different unfoldings in r1 and r2. Reason: if we did that, we'd have to zap y's deadness info and that is a very useful piece of information. So instead we add the unfolding x -> Just a, and x -> Nothing in the respective RHSs. ************************************************************************ * * \subsection{Known constructor} * * ************************************************************************ We are a bit careful with occurrence info. Here's an example (\x* -> case x of (a*, b) -> f a) (h v, e) where the * means "occurs once". This effectively becomes case (h v, e) of (a*, b) -> f a) and then let a* = h v; b = e in f a and then f (h v) All this should happen in one sweep. -} knownCon :: SimplEnv -> OutExpr -- The scrutinee -> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces) -> InId -> [InBndr] -> InExpr -- The alternative -> SimplCont -> SimplM (SimplEnv, OutExpr) knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont = do { env' <- bind_args env bs dc_args ; env'' <- bind_case_bndr env' ; simplExprF env'' rhs cont } where zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId -- Ugh! bind_args env' [] _ = return env' bind_args env' (b:bs') (Type ty : args) = ASSERT( isTyVar b ) bind_args (extendTvSubst env' b ty) bs' args bind_args env' (b:bs') (arg : args) = ASSERT( isId b ) do { let b' = zap_occ b -- Note that the binder might be "dead", because it doesn't -- occur in the RHS; and simplNonRecX may therefore discard -- it via postInlineUnconditionally. -- Nevertheless we must keep it if the case-binder is alive, -- because it may be used in the con_app. See Note [knownCon occ info] ; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant ; bind_args env'' bs' args } bind_args _ _ _ = pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$ text "scrut:" <+> ppr scrut -- It's useful to bind bndr to scrut, rather than to a fresh -- binding x = Con arg1 .. argn -- because very often the scrut is a variable, so we avoid -- creating, and then subsequently eliminating, a let-binding -- BUT, if scrut is a not a variable, we must be careful -- about duplicating the arg redexes; in that case, make -- a new con-app from the args bind_case_bndr env | isDeadBinder bndr = return env | exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut)) | otherwise = do { dc_args <- mapM (simplVar env) bs -- dc_ty_args are aready OutTypes, -- but bs are InBndrs ; let con_app = Var (dataConWorkId dc) `mkTyApps` dc_ty_args `mkApps` dc_args ; simplNonRecX env bndr con_app } ------------------- missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr) -- This isn't strictly an error, although it is unusual. -- It's possible that the simplifer might "see" that -- an inner case has no accessible alternatives before -- it "sees" that the entire branch of an outer case is -- inaccessible. So we simply put an error case here instead. missingAlt env case_bndr _ cont = WARN( True, ptext (sLit "missingAlt") <+> ppr case_bndr ) return (env, mkImpossibleExpr (contResultType cont)) {- ************************************************************************ * * \subsection{Duplicating continuations} * * ************************************************************************ -} prepareCaseCont :: SimplEnv -> [InAlt] -> SimplCont -> SimplM (SimplEnv, SimplCont, -- Dupable part SimplCont) -- Non-dupable part -- We are considering -- K[case _ of { p1 -> r1; ...; pn -> rn }] -- where K is some enclosing continuation for the case -- Goal: split K into two pieces Kdup,Knodup so that -- a) Kdup can be duplicated -- b) Knodup[Kdup[e]] = K[e] -- The idea is that we'll transform thus: -- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] } -- -- We may also return some extra bindings in SimplEnv (that scope over -- the entire continuation) -- -- When case-of-case is off, just make the entire continuation non-dupable prepareCaseCont env alts cont | not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont) | not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont)) | otherwise = mkDupableCont env cont where many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative many_alts [] = False -- See Note [Bottom alternatives] many_alts [_] = False many_alts (alt:alts) | is_bot_alt alt = many_alts alts | otherwise = not (all is_bot_alt alts) is_bot_alt (_,_,rhs) = exprIsBottom rhs {- Note [Bottom alternatives] ~~~~~~~~~~~~~~~~~~~~~~~~~~ When we have case (case x of { A -> error .. ; B -> e; C -> error ..) of alts then we can just duplicate those alts because the A and C cases will disappear immediately. This is more direct than creating join points and inlining them away; and in some cases we would not even create the join points (see Note [Single-alternative case]) and we would keep the case-of-case which is silly. See Trac #4930. -} mkDupableCont :: SimplEnv -> SimplCont -> SimplM (SimplEnv, SimplCont, SimplCont) mkDupableCont env cont | contIsDupable cont = return (env, cont, mkBoringStop (contResultType cont)) mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn mkDupableCont env (CastIt ty cont) = do { (env', dup, nodup) <- mkDupableCont env cont ; return (env', CastIt ty dup, nodup) } -- Duplicating ticks for now, not sure if this is good or not mkDupableCont env cont@(TickIt{}) = return (env, mkBoringStop (contHoleType cont), cont) mkDupableCont env cont@(StrictBind {}) = return (env, mkBoringStop (contHoleType cont), cont) -- See Note [Duplicating StrictBind] mkDupableCont env (StrictArg info cci cont) -- See Note [Duplicating StrictArg] = do { (env', dup, nodup) <- mkDupableCont env cont ; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info) ; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) } mkDupableCont env cont@(ApplyToTy { sc_cont = tail }) = do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail ; return (env', cont { sc_cont = dup_cont }, nodup_cont ) } mkDupableCont env (ApplyToVal { sc_arg = arg, sc_env = se, sc_cont = cont }) = -- e.g. [...hole...] (...arg...) -- ==> -- let a = ...arg... -- in [...hole...] a do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont ; arg' <- simplExpr (se `setInScope` env') arg ; (env'', arg'') <- makeTrivial NotTopLevel env' arg' ; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = zapSubstEnv env'' , sc_dup = OkToDup, sc_cont = dup_cont } ; return (env'', app_cont, nodup_cont) } mkDupableCont env cont@(Select _ case_bndr [(_, bs, _rhs)] _ _) -- See Note [Single-alternative case] -- | not (exprIsDupable rhs && contIsDupable case_cont) -- | not (isDeadBinder case_bndr) | all isDeadBinder bs -- InIds && not (isUnLiftedType (idType case_bndr)) -- Note [Single-alternative-unlifted] = return (env, mkBoringStop (contHoleType cont), cont) mkDupableCont env (Select _ case_bndr alts se cont) = -- e.g. (case [...hole...] of { pi -> ei }) -- ===> -- let ji = \xij -> ei -- in case [...hole...] of { pi -> ji xij } do { tick (CaseOfCase case_bndr) ; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont -- NB: We call prepareCaseCont here. If there is only one -- alternative, then dup_cont may be big, but that's ok -- because we push it into the single alternative, and then -- use mkDupableAlt to turn that simplified alternative into -- a join point if it's too big to duplicate. -- And this is important: see Note [Fusing case continuations] ; let alt_env = se `setInScope` env' ; (alt_env', case_bndr') <- simplBinder alt_env case_bndr ; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts -- Safe to say that there are no handled-cons for the DEFAULT case -- NB: simplBinder does not zap deadness occ-info, so -- a dead case_bndr' will still advertise its deadness -- This is really important because in -- case e of b { (# p,q #) -> ... } -- b is always dead, and indeed we are not allowed to bind b to (# p,q #), -- which might happen if e was an explicit unboxed pair and b wasn't marked dead. -- In the new alts we build, we have the new case binder, so it must retain -- its deadness. -- NB: we don't use alt_env further; it has the substEnv for -- the alternatives, and we don't want that ; (env'', alts'') <- mkDupableAlts env' case_bndr' alts' ; return (env'', -- Note [Duplicated env] Select OkToDup case_bndr' alts'' (zapSubstEnv env'') (mkBoringStop (contHoleType nodup_cont)), nodup_cont) } mkDupableAlts :: SimplEnv -> OutId -> [InAlt] -> SimplM (SimplEnv, [InAlt]) -- Absorbs the continuation into the new alternatives mkDupableAlts env case_bndr' the_alts = go env the_alts where go env0 [] = return (env0, []) go env0 (alt:alts) = do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt ; (env2, alts') <- go env1 alts ; return (env2, alt' : alts' ) } mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr) -> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr)) mkDupableAlt env case_bndr (con, bndrs', rhs') = do dflags <- getDynFlags if exprIsDupable dflags rhs' -- Note [Small alternative rhs] then return (env, (con, bndrs', rhs')) else do { let rhs_ty' = exprType rhs' scrut_ty = idType case_bndr case_bndr_w_unf = case con of DEFAULT -> case_bndr DataAlt dc -> setIdUnfolding case_bndr unf where -- See Note [Case binders and join points] unf = mkInlineUnfolding Nothing rhs rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs' LitAlt {} -> WARN( True, ptext (sLit "mkDupableAlt") <+> ppr case_bndr <+> ppr con ) case_bndr -- The case binder is alive but trivial, so why has -- it not been substituted away? used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs' | otherwise = bndrs' ++ [case_bndr_w_unf] abstract_over bndr | isTyVar bndr = True -- Abstract over all type variables just in case | otherwise = not (isDeadBinder bndr) -- The deadness info on the new Ids is preserved by simplBinders ; (final_bndrs', final_args) -- Note [Join point abstraction] <- if (any isId used_bndrs') then return (used_bndrs', varsToCoreExprs used_bndrs') else do { rw_id <- newId (fsLit "w") voidPrimTy ; return ([setOneShotLambda rw_id], [Var voidPrimId]) } ; join_bndr <- newId (fsLit "$j") (mkPiTypes final_bndrs' rhs_ty') -- Note [Funky mkPiTypes] ; let -- We make the lambdas into one-shot-lambdas. The -- join point is sure to be applied at most once, and doing so -- prevents the body of the join point being floated out by -- the full laziness pass really_final_bndrs = map one_shot final_bndrs' one_shot v | isId v = setOneShotLambda v | otherwise = v join_rhs = mkLams really_final_bndrs rhs' join_arity = exprArity join_rhs join_call = mkApps (Var join_bndr) final_args ; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs) ; return (env', (con, bndrs', join_call)) } -- See Note [Duplicated env] {- Note [Fusing case continuations] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It's important to fuse two successive case continuations when the first has one alternative. That's why we call prepareCaseCont here. Consider this, which arises from thunk splitting (see Note [Thunk splitting] in WorkWrap): let x* = case (case v of {pn -> rn}) of I# a -> I# a in body The simplifier will find (Var v) with continuation Select (pn -> rn) ( Select [I# a -> I# a] ( StrictBind body Stop So we'll call mkDupableCont on Select [I# a -> I# a] (StrictBind body Stop) There is just one alternative in the first Select, so we want to simplify the rhs (I# a) with continuation (StricgtBind body Stop) Supposing that body is big, we end up with let $j a = <let x = I# a in body> in case v of { pn -> case rn of I# a -> $j a } This is just what we want because the rn produces a box that the case rn cancels with. See Trac #4957 a fuller example. Note [Case binders and join points] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this case (case .. ) of c { I# c# -> ....c.... If we make a join point with c but not c# we get $j = \c -> ....c.... But if later inlining scrutines the c, thus $j = \c -> ... case c of { I# y -> ... } ... we won't see that 'c' has already been scrutinised. This actually happens in the 'tabulate' function in wave4main, and makes a significant difference to allocation. An alternative plan is this: $j = \c# -> let c = I# c# in ...c.... but that is bad if 'c' is *not* later scrutinised. So instead we do both: we pass 'c' and 'c#' , and record in c's inlining (a stable unfolding) that it's really I# c#, thus $j = \c# -> \c[=I# c#] -> ...c.... Absence analysis may later discard 'c'. NB: take great care when doing strictness analysis; see Note [Lamba-bound unfoldings] in DmdAnal. Also note that we can still end up passing stuff that isn't used. Before strictness analysis we have let $j x y c{=(x,y)} = (h c, ...) in ... After strictness analysis we see that h is strict, we end up with let $j x y c{=(x,y)} = ($wh x y, ...) and c is unused. Note [Duplicated env] ~~~~~~~~~~~~~~~~~~~~~ Some of the alternatives are simplified, but have not been turned into a join point So they *must* have an zapped subst-env. So we can't use completeNonRecX to bind the join point, because it might to do PostInlineUnconditionally, and we'd lose that when zapping the subst-env. We could have a per-alt subst-env, but zapping it (as we do in mkDupableCont, the Select case) is safe, and at worst delays the join-point inlining. Note [Small alternative rhs] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It is worth checking for a small RHS because otherwise we get extra let bindings that may cause an extra iteration of the simplifier to inline back in place. Quite often the rhs is just a variable or constructor. The Ord instance of Maybe in PrelMaybe.hs, for example, took several extra iterations because the version with the let bindings looked big, and so wasn't inlined, but after the join points had been inlined it looked smaller, and so was inlined. NB: we have to check the size of rhs', not rhs. Duplicating a small InAlt might invalidate occurrence information However, if it *is* dupable, we return the *un* simplified alternative, because otherwise we'd need to pair it up with an empty subst-env.... but we only have one env shared between all the alts. (Remember we must zap the subst-env before re-simplifying something). Rather than do this we simply agree to re-simplify the original (small) thing later. Note [Funky mkPiTypes] ~~~~~~~~~~~~~~~~~~~~~~ Notice the funky mkPiTypes. If the contructor has existentials it's possible that the join point will be abstracted over type variables as well as term variables. Example: Suppose we have data T = forall t. C [t] Then faced with case (case e of ...) of C t xs::[t] -> rhs We get the join point let j :: forall t. [t] -> ... j = /\t \xs::[t] -> rhs in case (case e of ...) of C t xs::[t] -> j t xs Note [Join point abstraction] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Join points always have at least one value argument, for several reasons * If we try to lift a primitive-typed something out for let-binding-purposes, we will *caseify* it (!), with potentially-disastrous strictness results. So instead we turn it into a function: \v -> e where v::Void#. The value passed to this function is void, which generates (almost) no code. * CPR. We used to say "&& isUnLiftedType rhs_ty'" here, but now we make the join point into a function whenever used_bndrs' is empty. This makes the join-point more CPR friendly. Consider: let j = if .. then I# 3 else I# 4 in case .. of { A -> j; B -> j; C -> ... } Now CPR doesn't w/w j because it's a thunk, so that means that the enclosing function can't w/w either, which is a lose. Here's the example that happened in practice: kgmod :: Int -> Int -> Int kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0 then 78 else 5 * Let-no-escape. We want a join point to turn into a let-no-escape so that it is implemented as a jump, and one of the conditions for LNE is that it's not updatable. In CoreToStg, see Note [What is a non-escaping let] * Floating. Since a join point will be entered once, no sharing is gained by floating out, but something might be lost by doing so because it might be allocated. I have seen a case alternative like this: True -> \v -> ... It's a bit silly to add the realWorld dummy arg in this case, making $j = \s v -> ... True -> $j s (the \v alone is enough to make CPR happy) but I think it's rare There's a slight infelicity here: we pass the overall case_bndr to all the join points if it's used in *any* RHS, because we don't know its usage in each RHS separately Note [Duplicating StrictArg] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The original plan had (where E is a big argument) e.g. f E [..hole..] ==> let $j = \a -> f E a in $j [..hole..] But this is terrible! Here's an example: && E (case x of { T -> F; F -> T }) Now, && is strict so we end up simplifying the case with an ArgOf continuation. If we let-bind it, we get let $j = \v -> && E v in simplExpr (case x of { T -> F; F -> T }) (ArgOf (\r -> $j r) And after simplifying more we get let $j = \v -> && E v in case x of { T -> $j F; F -> $j T } Which is a Very Bad Thing What we do now is this f E [..hole..] ==> let a = E in f a [..hole..] Now if the thing in the hole is a case expression (which is when we'll call mkDupableCont), we'll push the function call into the branches, which is what we want. Now RULES for f may fire, and call-pattern specialisation. Here's an example from Trac #3116 go (n+1) (case l of 1 -> bs' _ -> Chunk p fpc (o+1) (l-1) bs') If we can push the call for 'go' inside the case, we get call-pattern specialisation for 'go', which is *crucial* for this program. Here is the (&&) example: && E (case x of { T -> F; F -> T }) ==> let a = E in case x of { T -> && a F; F -> && a T } Much better! Notice that * Arguments to f *after* the strict one are handled by the ApplyToVal case of mkDupableCont. Eg f [..hole..] E * We can only do the let-binding of E because the function part of a StrictArg continuation is an explicit syntax tree. In earlier versions we represented it as a function (CoreExpr -> CoreEpxr) which we couldn't take apart. Do *not* duplicate StrictBind and StritArg continuations. We gain nothing by propagating them into the expressions, and we do lose a lot. The desire not to duplicate is the entire reason that mkDupableCont returns a pair of continuations. Note [Duplicating StrictBind] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Unlike StrictArg, there doesn't seem anything to gain from duplicating a StrictBind continuation, so we don't. Note [Single-alternative cases] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This case is just like the ArgOf case. Here's an example: data T a = MkT !a ...(MkT (abs x))... Then we get case (case x of I# x' -> case x' <# 0# of True -> I# (negate# x') False -> I# x') of y { DEFAULT -> MkT y Because the (case x) has only one alternative, we'll transform to case x of I# x' -> case (case x' <# 0# of True -> I# (negate# x') False -> I# x') of y { DEFAULT -> MkT y But now we do *NOT* want to make a join point etc, giving case x of I# x' -> let $j = \y -> MkT y in case x' <# 0# of True -> $j (I# (negate# x')) False -> $j (I# x') In this case the $j will inline again, but suppose there was a big strict computation enclosing the orginal call to MkT. Then, it won't "see" the MkT any more, because it's big and won't get duplicated. And, what is worse, nothing was gained by the case-of-case transform. So, in circumstances like these, we don't want to build join points and push the outer case into the branches of the inner one. Instead, don't duplicate the continuation. When should we use this strategy? We should not use it on *every* single-alternative case: e.g. case (case ....) of (a,b) -> (# a,b #) Here we must push the outer case into the inner one! Other choices: * Match [(DEFAULT,_,_)], but in the common case of Int, the alternative-filling-in code turned the outer case into case (...) of y { I# _ -> MkT y } * Match on single alternative plus (not (isDeadBinder case_bndr)) Rationale: pushing the case inwards won't eliminate the construction. But there's a risk of case (...) of y { (a,b) -> let z=(a,b) in ... } Now y looks dead, but it'll come alive again. Still, this seems like the best option at the moment. * Match on single alternative plus (all (isDeadBinder bndrs)) Rationale: this is essentially seq. * Match when the rhs is *not* duplicable, and hence would lead to a join point. This catches the disaster-case above. We can test the *un-simplified* rhs, which is fine. It might get bigger or smaller after simplification; if it gets smaller, this case might fire next time round. NB also that we must test contIsDupable case_cont *too, because case_cont might be big! HOWEVER: I found that this version doesn't work well, because we can get let x = case (...) of { small } in ...case x... When x is inlined into its full context, we find that it was a bad idea to have pushed the outer case inside the (...) case. Note [Single-alternative-unlifted] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Here's another single-alternative where we really want to do case-of-case: data Mk1 = Mk1 Int# | Mk2 Int# M1.f = \r [x_s74 y_s6X] case case y_s6X of tpl_s7m { M1.Mk1 ipv_s70 -> ipv_s70; M1.Mk2 ipv_s72 -> ipv_s72; } of wild_s7c { __DEFAULT -> case case x_s74 of tpl_s7n { M1.Mk1 ipv_s77 -> ipv_s77; M1.Mk2 ipv_s79 -> ipv_s79; } of wild1_s7b { __DEFAULT -> ==# [wild1_s7b wild_s7c]; }; }; So the outer case is doing *nothing at all*, other than serving as a join-point. In this case we really want to do case-of-case and decide whether to use a real join point or just duplicate the continuation: let $j s7c = case x of Mk1 ipv77 -> (==) s7c ipv77 Mk1 ipv79 -> (==) s7c ipv79 in case y of Mk1 ipv70 -> $j ipv70 Mk2 ipv72 -> $j ipv72 Hence: check whether the case binder's type is unlifted, because then the outer case is *not* a seq. -}
gcampax/ghc
compiler/simplCore/Simplify.hs
bsd-3-clause
118,609
18
25
36,268
14,239
7,535
6,704
-1
-1
#!/usr/bin/env runhaskell import Distribution.PackageDescription import Distribution.Simple import Distribution.Simple.LocalBuildInfo main :: IO () main = defaultMain
jeffwheeler/pointedlist
Setup.hs
bsd-3-clause
168
0
6
16
33
19
14
5
1
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} module Data.StreamLike where import Data.ListLike as LL import Data.Word import Data.Bits import Data.Functor import Data.ByteString as B import Control.Comonad -- | Stream type. s is a stream like list or ByteString; c is a type of -- stream's element; i is stream size type. class StreamLike s c where head :: s -> c tail :: s -> s -- take :: Int -> s -> s -- drop :: Int -> s -> s -- splitAt :: Int -> s -> (s,s) -- null :: s -> Bool -- empty :: s -- length :: s -> Int -- span :: (c->Bool) -> s -> (s,s) -- toList :: s -> [c] -- conv :: (Integral a, Integral b) => a -> b -- conv = fromInteger . toInteger data Container s c a = Container (a -> c -> a) s a -- instance (LL.ListLike s c) => Functor (Container a s c) where -- fmap f (Container a b) = Container a (f b) -- class Listable s c where -- foobar :: s -> c -- instance (LL.ListLike s c) => Listable (Container x s) c where instance (LL.ListLike s c) => StreamLike (Container s c a) c where head (Container f s x) = LL.head s tail (Container f s x) = Container f (LL.tail s) (f x (LL.head s)) -- take (Container a b) = LL.take -- drop (Container a b) = LL.drop -- splitAt (Container a b) = LL.splitAt -- null (Container a b) = LL.null -- empty (Container a b) = LL.empty -- length (Container a b) = LL.length -- span (Container a b) = LL.span -- toList (Container a b) = LL.toList -- instance StreamLike ByteString Word8 Int where -- head = B.head -- tail = B.tail -- take = B.take -- drop = B.drop -- splitAt n = B.splitAt n -- null = B.null -- empty = B.empty -- length = B.length -- span = B.span -- toList = B.unpack
ierton/yteratee
src/Data/StreamLike.hs
bsd-3-clause
1,841
0
11
486
240
148
92
17
0
{-# LANGUAGE MultiParamTypeClasses #-} -- | The Scale module implements scales. module Music.Diatonic.Scale ( Scale, Scl(..), majorScale, minorScale, majorPentatonicScale, minorPentatonicScale, minorHarmonicScale, minorMelodicScale, tetrachord ) where import Music.Diatonic data Scale = Diatonic Quality Note | Pentatonic Quality Note | Harmonic Note | Melodic Note deriving (Eq) class Scl a where scale :: a -> Scale instance Nte Scale where noteMap f (Diatonic Major n) = majorScale . f $ n noteMap f (Diatonic Minor n) = minorScale . f $ n noteMap f (Pentatonic Major n) = majorPentatonicScale . f $ n noteMap f (Pentatonic Minor n) = minorPentatonicScale . f $ n noteMap f (Harmonic n) = minorHarmonicScale . f $ n noteMap f (Melodic n) = minorMelodicScale . f $ n notePlus f s1 s2 = f (tonic s1) (tonic s2) instance Nts Scale where notes (Diatonic Major n) = init $ tc1 ++ tc2 where tc1 = tetrachord n tc2 = tetrachord (Maj2nd `above` last tc1) notes (Diatonic Minor n) = zipWith ($) [id, id, lower, id, id, lower, lower] (notes . majorScale $ n) notes (Pentatonic Major n) = concat . zipWith ($) [return, return, return, const [], return, return, const []] $ (notes . majorScale $ n) notes (Pentatonic Minor n) = concat . zipWith ($) [return, const [], return, return, return, const [], return] $ (notes . minorScale $ n) notes (Harmonic n) = zipWith ($) [id, id, id, id, id, id, raise] $ (notes . minorScale $ n) notes (Melodic n) = zipWith ($) [id, id, id, id, id, raise, raise] $ (notes . minorScale $ n) instance Qual Scale where quality (Diatonic q _) = q quality (Pentatonic q _) = q quality (Harmonic _) = Minor quality (Melodic _) = Minor instance Show Scale where show s@(Harmonic n) = (show n) ++ "m (harmonic)" show s@(Melodic n) = (show n) ++ "m (melodic)" show s@(Pentatonic q n) = (show n) ++ (if quality s == Minor then "m" else "") ++ " (pentatonic)" show s@(Diatonic q n) = (show n) ++ (if quality s == Minor then "m" else "") instance Deg Scale Note where first (Diatonic q t) = t first (Pentatonic q t) = t first (Harmonic t) = t first (Melodic t) = t degrees s = map (\n -> (notePlus degree (first s) n, n)) ns where ns = notes s instance Equiv Scale where equiv s1 s2 = enote && etype where enote = notePlus equiv s1 s2 etype = (toC $# s1) == (toC $# s2) toC = const C -- | Creates a 'Major' diatonic 'Scale' using the given 'Note' as the tonic. majorScale :: Note -> Scale majorScale = Diatonic Major -- | Creates a 'Minor' diatonic 'Scale' using the given 'Note' as the tonic. minorScale :: Note -> Scale minorScale = Diatonic Minor -- | Creates a 'Major' pentatonic 'Scale' using the given 'Note' as the tonic. majorPentatonicScale :: Note -> Scale majorPentatonicScale = Pentatonic Major -- | Creates a 'Minor' pentatonic 'Scale' using the given 'Note' as the tonic. minorPentatonicScale :: Note -> Scale minorPentatonicScale = Pentatonic Minor -- | Creates a 'Minor' harmonic 'Scale' using the given 'Note' as the tonic. minorHarmonicScale :: Note -> Scale minorHarmonicScale = Harmonic -- | Creates a 'Minor' melodic 'Scale' using the given 'Note' as the tonic. minorMelodicScale :: Note -> Scale minorMelodicScale = Melodic -- | Returns a tetrachord using the given 'Note' as the starting note. -- -- > tetrachord G == [G,A,B,C] tetrachord :: Note -> [Note] tetrachord n = scanl (\n i -> i `above` n) n [Maj2nd, Maj2nd, Min2nd]
xpika/music-diatonic
Music/Diatonic/Scale.hs
bsd-3-clause
3,559
0
12
796
1,240
671
569
66
1
----------------------------------------------------------------------------- -- | -- Module : Network.HTTP.HandleStream -- Copyright : (c) 2008- Sigbjorn Finne -- License : BSD -- -- Maintainer : Sigbjorn Finne <sigbjorn.finne@gmail.com> -- Stability : experimental -- Portability : non-portable (not tested) -- -- A 'HandleStream'-based version of "Network.HTTP" interface. -- -- For more detailed information about what the individual exports do, please consult -- the documentation for "Network.HTTP". /Notice/ however that the functions here do -- not perform any kind of normalization prior to transmission (or receipt); you are -- responsible for doing any such yourself, or, if you prefer, just switch to using -- "Network.HTTP" function instead. -- ----------------------------------------------------------------------------- module Network.HTTP.HandleStream ( simpleHTTP -- :: Request ty -> IO (Result (Response ty)) , simpleHTTP_ -- :: HStream ty => HandleStream ty -> Request ty -> IO (Result (Response ty)) , sendHTTP -- :: HStream ty => HandleStream ty -> Request ty -> IO (Result (Response ty)) , sendHTTP_notify -- :: HStream ty => HandleStream ty -> Request ty -> IO () -> IO (Result (Response ty)) , receiveHTTP -- :: HStream ty => HandleStream ty -> IO (Result (Request ty)) , respondHTTP -- :: HStream ty => HandleStream ty -> Response ty -> IO () , simpleHTTP_debug -- :: FilePath -> Request DebugString -> IO (Response DebugString) ) where ----------------------------------------------------------------- ------------------ Imports -------------------------------------- ----------------------------------------------------------------- import Network.BufferType import Network.Stream ( fmapE, Result ) import Network.StreamDebugger ( debugByteStream ) import Network.TCP (HStream(..), HandleStream ) import Network.HTTP.Base import Network.HTTP.Headers import Network.HTTP.Utils ( trim, readsOne ) import Data.Char (toLower) import Data.Maybe (fromMaybe) import Control.Monad (when) ----------------------------------------------------------------- ------------------ Misc ----------------------------------------- ----------------------------------------------------------------- -- | @simpleHTTP@ transmits a resource across a non-persistent connection. simpleHTTP :: HStream ty => Request ty -> IO (Result (Response ty)) simpleHTTP r = do auth <- getAuth r c <- openStream (host auth) (fromMaybe 80 (port auth)) simpleHTTP_ c r -- | @simpleHTTP_debug debugFile req@ behaves like 'simpleHTTP', but logs -- the HTTP operation via the debug file @debugFile@. simpleHTTP_debug :: HStream ty => FilePath -> Request ty -> IO (Result (Response ty)) simpleHTTP_debug httpLogFile r = do auth <- getAuth r c0 <- openStream (host auth) (fromMaybe 80 (port auth)) c <- debugByteStream httpLogFile c0 simpleHTTP_ c r -- | Like 'simpleHTTP', but acting on an already opened stream. simpleHTTP_ :: HStream ty => HandleStream ty -> Request ty -> IO (Result (Response ty)) simpleHTTP_ s r = sendHTTP s r -- | @sendHTTP hStream httpRequest@ transmits @httpRequest@ over -- @hStream@, but does not alter the status of the connection, nor request it to be -- closed upon receiving the response. sendHTTP :: HStream ty => HandleStream ty -> Request ty -> IO (Result (Response ty)) sendHTTP conn rq = sendHTTP_notify conn rq (return ()) -- | @sendHTTP_notify hStream httpRequest action@ behaves like 'sendHTTP', but -- lets you supply an IO @action@ to execute once the request has been successfully -- transmitted over the connection. Useful when you want to set up tracing of -- request transmission and its performance. sendHTTP_notify :: HStream ty => HandleStream ty -> Request ty -> IO () -> IO (Result (Response ty)) sendHTTP_notify conn rq onSendComplete = do when providedClose $ (closeOnEnd conn True) catchIO (sendMain conn rq onSendComplete) (\e -> do { close conn; ioError e }) where providedClose = findConnClose (rqHeaders rq) -- From RFC 2616, section 8.2.3: -- 'Because of the presence of older implementations, the protocol allows -- ambiguous situations in which a client may send "Expect: 100- -- continue" without receiving either a 417 (Expectation Failed) status -- or a 100 (Continue) status. Therefore, when a client sends this -- header field to an origin server (possibly via a proxy) from which it -- has never seen a 100 (Continue) status, the client SHOULD NOT wait -- for an indefinite period before sending the request body.' -- -- Since we would wait forever, I have disabled use of 100-continue for now. sendMain :: HStream ty => HandleStream ty -> Request ty -> (IO ()) -> IO (Result (Response ty)) sendMain conn rqst onSendComplete = do --let str = if null (rqBody rqst) -- then show rqst -- else show (insertHeader HdrExpect "100-continue" rqst) writeBlock conn (buf_fromStr bufferOps $ show rqst) -- write body immediately, don't wait for 100 CONTINUE writeBlock conn (rqBody rqst) onSendComplete rsp <- getResponseHead conn switchResponse conn True False rsp rqst -- Hmmm, this could go bad if we keep getting "100 Continue" -- responses... Except this should never happen according -- to the RFC. switchResponse :: HStream ty => HandleStream ty -> Bool {- allow retry? -} -> Bool {- is body sent? -} -> Result ResponseData -> Request ty -> IO (Result (Response ty)) switchResponse _ _ _ (Left e) _ = return (Left e) -- retry on connreset? -- if we attempt to use the same socket then there is an excellent -- chance that the socket is not in a completely closed state. switchResponse conn allow_retry bdy_sent (Right (cd,rn,hdrs)) rqst = case matchResponse (rqMethod rqst) cd of Continue | not bdy_sent -> do {- Time to send the body -} writeBlock conn (rqBody rqst) >>= either (return . Left) (\ _ -> do rsp <- getResponseHead conn switchResponse conn allow_retry True rsp rqst) | otherwise -> do {- keep waiting -} rsp <- getResponseHead conn switchResponse conn allow_retry bdy_sent rsp rqst Retry -> do {- Request with "Expect" header failed. Trouble is the request contains Expects other than "100-Continue" -} writeBlock conn ((buf_append bufferOps) (buf_fromStr bufferOps (show rqst)) (rqBody rqst)) rsp <- getResponseHead conn switchResponse conn False bdy_sent rsp rqst Done -> do when (findConnClose hdrs) (closeOnEnd conn True) return (Right $ Response cd rn hdrs (buf_empty bufferOps)) DieHorribly str -> do close conn return (responseParseError "Invalid response:" str) ExpectEntity -> do r <- fmapE (\ (ftrs,bdy) -> Right (Response cd rn (hdrs++ftrs) bdy)) $ maybe (maybe (hopefulTransfer bo (readLine conn) []) (\ x -> readsOne (linearTransfer (readBlock conn)) (return$responseParseError "unrecognized content-length value" x) x) cl) (ifChunked (chunkedTransfer bo (readLine conn) (readBlock conn)) (uglyDeathTransfer "sendHTTP")) tc case r of Left{} -> do close conn return r Right (Response _ _ hs _) -> do when (findConnClose hs) (closeOnEnd conn True) return r where tc = lookupHeader HdrTransferEncoding hdrs cl = lookupHeader HdrContentLength hdrs bo = bufferOps -- reads and parses headers getResponseHead :: HStream ty => HandleStream ty -> IO (Result ResponseData) getResponseHead conn = fmapE (\es -> parseResponseHead (map (buf_toStr bufferOps) es)) (readTillEmpty1 bufferOps (readLine conn)) -- | @receiveHTTP hStream@ reads a 'Request' from the 'HandleStream' @hStream@ receiveHTTP :: HStream bufTy => HandleStream bufTy -> IO (Result (Request bufTy)) receiveHTTP conn = getRequestHead >>= either (return . Left) processRequest where -- reads and parses headers getRequestHead :: IO (Result RequestData) getRequestHead = do fmapE (\es -> parseRequestHead (map (buf_toStr bufferOps) es)) (readTillEmpty1 bufferOps (readLine conn)) processRequest (rm,uri,hdrs) = fmapE (\ (ftrs,bdy) -> Right (Request uri rm (hdrs++ftrs) bdy)) $ maybe (maybe (return (Right ([], buf_empty bo))) -- hopefulTransfer "" (\ x -> readsOne (linearTransfer (readBlock conn)) (return$responseParseError "unrecognized Content-Length value" x) x) cl) (ifChunked (chunkedTransfer bo (readLine conn) (readBlock conn)) (uglyDeathTransfer "receiveHTTP")) tc where -- FIXME : Also handle 100-continue. tc = lookupHeader HdrTransferEncoding hdrs cl = lookupHeader HdrContentLength hdrs bo = bufferOps -- | @respondHTTP hStream httpResponse@ transmits an HTTP 'Response' over -- the 'HandleStream' @hStream@. It could be used to implement simple web -- server interactions, performing the dual role to 'sendHTTP'. respondHTTP :: HStream ty => HandleStream ty -> Response ty -> IO () respondHTTP conn rsp = do writeBlock conn (buf_fromStr bufferOps $ show rsp) -- write body immediately, don't wait for 100 CONTINUE writeBlock conn (rspBody rsp) return () ------------------------------------------------------------------------------ headerName :: String -> String headerName x = map toLower (trim x) ifChunked :: a -> a -> String -> a ifChunked a b s = case headerName s of "chunked" -> a _ -> b
astro/HTTPbis
Network/HTTP/HandleStream.hs
bsd-3-clause
10,063
65
20
2,431
2,044
1,044
1,000
144
6
module Sexy.Instances.Plus.Double where import Sexy.Classes (Plus(..)) import Sexy.Data (Double) import qualified Prelude as P instance Plus Double where (+) = (P.+)
DanBurton/sexy
src/Sexy/Instances/Plus/Double.hs
bsd-3-clause
169
0
6
24
57
38
19
6
0
----------------------------------------------------------------------------- -- -- Module : GameOfLife.Ui.Text -- Copyright : 2016 Author name here -- License : BSD3 -- -- Maintainer : bnazariy@gmail.com -- Stability : -- Portability : -- -- | -- ----------------------------------------------------------------------------- module GameOfLife.Ui.Text ( showGridEffective, runGameContiniously ) where import GameOfLife import Data.List(intercalate, elemIndex) import Data.List.Split(splitOn) import System.Console.ANSI import Control.Monad(forM_, when) import Data.Maybe(fromMaybe) import Control.Concurrent(threadDelay) -- Redraws only lines with cells . showGridEffective :: RenderFunc showGridEffective _ [] = return () showGridEffective True g = return () showGridEffective False g = do -- All indexes of lines with True ( cell ). let lns = filter (\x -> x /= -1) $ map (\(i,_) -> fromMaybe (-1) i ) $ -- All lines with true. filter (\(_, x) -> id `any` x) $ -- List elements with indexes. map (\x -> (elemIndex x g, x)) g forM_ lns ( \x -> do setCursorPosition x 0 clearLine forM_ (render x) (\ch -> do when (ch == '@') $ setSGR [SetColor Foreground Vivid Red] putChar ch setSGR [Reset] ) return ()) where render x = [if x' then '@' else ' ' | x' <- g !! x] -- Run game generation after generation. -- Takes GameOptions delay and render function. -- Render function takes Grid and bool argument -- If argument is true than function should clear screen. -- If false it should draw new generation. runGameContiniously :: GameFunc runGameContiniously opts delay renderF = do let gen = nextGeneration $ grid opts renderF True gen renderF False gen threadDelay ((100 * 60) * delay) runGameContiniously (createOpts gen 10) delay renderF return () runGame :: GameOptions -> IO () runGame opts = putStr $ intercalate "\n" $ map showGrid (take (runs opts) $ iterate nextGeneration (grid opts)) showGrid :: Grid -> String showGrid [] = "" showGrid g = let w = length (head g) h = length g in intercalate "\n" [ [ if (g !! y) !! x then '@' else '-' | x <- [0 .. w - 1] ] | y <- [0 .. h - 1] ] ++ "\n"
AM636E/HaskellGameOfLife
src/GameOfLife/Ui/Text.hs
bsd-3-clause
2,502
0
20
750
682
361
321
49
2
import NLP.DictParser main :: IO () main = getContents >>= \c -> print (parseString c)
mwotton/dictparser
src/main.hs
bsd-3-clause
99
1
9
27
42
20
22
3
1
module Main where import Data.Graph.Inductive.Graph import Data.Graph.Inductive.Tree (Gr) import System.Exit import Flow source = 1 sink = 2 node = 3 graph :: Gr String Int graph = insEdge (node, sink, 1) . insEdge (source, node, 3) . insEdge (source, sink, 10) $ insNodes [(source, "source"), (sink, "sink"), (node, "a")] empty noSolution :: (Show a, Show b, DynGraph g) => g a b -> IO () noSolution graph = do putStrLn "There is no solution for:" prettyPrint graph exitFailure printProblem :: DynGraph g => AFlow g -> IO () printProblem (Flow cap flow) = do putStrLn "-- Problem" prettyPrint cap putStrLn "\n" putStrLn "-- Solution" prettyPrint flow main :: IO () main = do let prob = initialFlow graph source putStrLn "\n#\n# Initial Flow\n#\n" printProblem prob putStrLn "\n#\n# Solved Problem\n#\n" maybe (noSolution graph) printProblem $ maximalFlow graph source sink
thsutton/mf
src/Main.hs
bsd-3-clause
911
0
10
176
346
174
172
30
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE Strict #-} module Layers.Pool where import Network import Util import Static import Data.Singletons.TypeLits import Data.Array.Repa import Data.Serialize data Pool = Pool instance Serialize Pool where put _ = return () get = return Pool instance Creatable Pool where seeded _ = Pool instance Updatable Pool where type Gradient Pool = () instance ( KnownNat h, KnownNat (Halve h), KnownNat w, KnownNat (Halve w), KnownNat bat, KnownNat d ) => Layer (ZZ ::. bat ::. d ::. h ::. w) Pool where type LOutput (ZZ ::. bat ::. d ::. h ::. w) Pool = (ZZ ::. bat ::. d ::. Halve h ::. Halve w) runForward _ x = sComputeP$ sTraverse x f where f lx (b:.y:.x) = maximum [ lx$ b :. 2*y :. 2*x , lx$ b :. 2*y+1 :. 2*x , lx$ b :. 2*y :. 2*x+1 , lx$ b :. 2*y+1 :. 2*x+1 ] runBackwards _ (SArray x) (SArray y) (SArray dy) = do dx <- sComputeP$ sFromFunction f return ((), dx) where halve (b:.y:.x) = b:. y `div` 2 :. x `div` 2 f pos | x ! pos == y ! halve pos = dy ! halve pos | otherwise = 0
jonascarpay/convoluted
src/Layers/Pool.hs
bsd-3-clause
1,383
0
16
442
543
282
261
-1
-1
-- | Quick, hacky sendmail wrapper module Sihemo.Sendmail ( sendmail ) where import System.Process (readProcess) sendmail :: String -- ^ Recipient -> String -- ^ Subject -> [String] -- ^ Content (lines) -> IO () -- ^ Blocks until mail is sent sendmail recipient subject body = do _ <- readProcess "/usr/sbin/sendmail" ["-t"] $ unlines $ [ "To: " ++ recipient , "Subject: " ++ subject , "" ] ++ body return ()
jaspervdj/sihemo
src/Sihemo/Sendmail.hs
bsd-3-clause
497
0
12
161
115
63
52
14
1
module Derivative where import qualified Data.Map as M import Control.Monad (sequence) data Expr = Con String | Num Double | Fun String Int | Add [Expr] | Mul [Expr] | Div Expr Expr | Neg Expr | Cos Expr | Sin Expr | Ln Expr | Exp Expr | Pow Expr Double deriving (Eq, Ord) evaluate :: Expr -> M.Map Expr Double -> Maybe Double evaluate (Num a) _ = Just a evaluate (Con a) t = t M.!? Con a evaluate (Fun a b) t = t M.!? Fun a b evaluate (Add xs) t = fmap sum (traverse (`evaluate` t) xs) evaluate (Mul xs) t = fmap product (traverse (`evaluate` t) xs) evaluate (Div a b) t = (/) <$> evaluate a t <*> evaluate b t evaluate (Neg a) t = negate <$> evaluate a t evaluate (Cos a) t = cos <$> evaluate a t evaluate (Sin a) t = sin <$> evaluate a t evaluate (Ln a) t = (/(log $ exp 1)) <$> (log <$> evaluate a t) evaluate (Exp a) t = exp <$> evaluate a t evaluate (Pow a b) t = (**b) <$> evaluate a t derivative :: Expr -> Expr derivative (Num _) = Num 0 derivative (Con _) = Num 0 derivative (Fun f o) = Fun f (o+1) derivative (Add es) = Add (fmap derivative es) derivative (Mul []) = Num 0 derivative (Mul (e:es)) = Add [Mul ((derivative e):es), Mul [derivative (Mul es),e]] derivative (Div e1 e2) = Add [Mul [(derivative e1), e2], (Neg (Mul [e1, (derivative e2)]))] derivative (Neg e) = Neg (derivative e) derivative (Cos e) = Neg (Mul [(derivative e), (Sin e)]) derivative (Sin e) = Mul [(derivative e), (Cos e)] derivative (Exp e) = Mul [(derivative e), (Exp e)] derivative (Ln e) = Div (derivative e) e derivative (Pow _ 0) = Num 0 derivative (Pow e n) = Mul [(Num n), (derivative e), (Pow e (n-1))] partialDerivative :: Expr -> Expr -> Expr partialDerivative (Num _) _ = Num 0 partialDerivative (Con _) _ = Num 0 partialDerivative (Fun f o) (Fun f2 o2) = if f == f2 && o ==o2 then Num 1 else Num 0 partialDerivative (Add es) f = Add (fmap ((flip partialDerivative) f) es) partialDerivative (Mul []) _ = Num 0 partialDerivative (Mul (e:es)) f = Add [Mul ((partialDerivative e f):es), Mul [partialDerivative (Mul es) f,e]] partialDerivative (Div e1 e2) f = Add [Mul [(partialDerivative e1 f), e2], (Neg (Mul [e1, (partialDerivative e2 f)]))] partialDerivative (Neg e) f = Neg (partialDerivative e f) partialDerivative (Cos e) f = Neg (Mul [(partialDerivative e f), (Sin e)]) partialDerivative (Sin e) f = Mul [(partialDerivative e f), (Cos e)] partialDerivative (Exp e) f = Mul [(partialDerivative e f), (Exp e)] partialDerivative (Ln e) f = Div (partialDerivative e f) e partialDerivative (Pow _ 0) _ = Num 0 partialDerivative (Pow e n) f = Mul [(Num n), (partialDerivative e f), (Pow e (n-1))] simplify :: Expr -> Expr simplify (Mul []) = Num 1 simplify (Mul es) = if elem (Num 0) es then Num 0 else Mul (fmap simplify es) simplify (Add []) = Num 0 simplify (Add es) = Add $ fmap simplify (filter (/= (Num 0)) es) simplify (Div (Num 0) _) = Num 0 simplify (Div e1 e2) = Div (simplify e1) (simplify e2) simplify (Exp (Num 0)) = Num 1 simplify (Exp e) = Exp (simplify e) simplify (Neg e) = Neg (simplify e) simplify (Fun s o) = Fun s o simplify (Con s) = Con s simplify o = o instance Show Expr where show (Con s) = s show (Num f) = show f show (Fun s o) = s ++ (replicate o '\'') show (Add [] ) = show "" show (Add (e:[]) ) = show e show (Add (e:es) ) = (show e) ++ " + " ++ (show (Add es)) show (Mul [] ) = show "" show (Mul (e:[])) = show e show (Mul (e:es)) = (show e) ++ "." ++ (show (Mul es)) show (Div e1 e2) = "(" ++ show e1 ++ " / " ++ show e2 ++ ")" show (Neg e) = "-" ++ show e show (Cos e) = "cos(" ++ show e ++ ")" show (Sin e) = "sin(" ++ show e ++ ")" show (Ln e) = "ln" ++ show e show (Exp e) = "e^("++show e++")" show (Pow e f) = show e ++ "^(" ++ show f++")"
GintMist/double-pendulum
src/derivative.hs
bsd-3-clause
3,964
0
13
1,038
2,276
1,147
1,129
88
2
module Main where import System.Environment import Data.Tree import Data.Char data Op = Plus | Minus | Times | Div deriving Show data Elem = Op Op | Int Int deriving Show type Expr = Tree Elem lexer :: String -> [ Elem ] lexer "" = [ ] lexer ( ' ' : cs ) = lexer cs lexer ( '+' : cs ) = Op Plus : lexer cs lexer ( '-' : cs ) = Op Minus : lexer cs lexer ( '*' : cs ) = Op Times : lexer cs lexer ( '/' : cs ) = Op Div : lexer cs lexer ca@( c : _ ) | isDigit c = let ( ret, rest ) = span isDigit ca in Int ( read ret ) : lexer rest lexer _ = error "lex error" parser :: [ Elem ] -> Expr parser [ e@( Int _ ) ] = Node e [ ] parser ( e1@( Int _ ) : e2@( Op _ ) : rest ) = Node e2 [ Node e1 [ ], parser rest ] eval :: Expr -> Int eval ( Node ( Int i ) [ ] ) = i eval ( Node ( Op Plus ) [ e1, e2 ] ) = eval e1 + eval e2 eval ( Node ( Op Minus ) [ e1, e2 ] ) = eval e1 - eval e2 eval ( Node ( Op Times ) [ e1, e2 ] ) = eval e1 * eval e2 eval ( Node ( Op Div ) [ e1, e2 ] ) = eval e1 `div` eval e2 main :: IO () main = do [ expr ] <- getArgs print $ eval $ parser $ lexer expr
YoshikuniJujo/toyhaskell_haskell
tests/testOp.hs
bsd-3-clause
1,089
38
11
314
646
330
316
31
1
-- | -- Module: WildBind.X11.KeySym -- Description: Re-export KeySyms -- Maintainer: Toshio Ito <debug.ito@gmail.com> -- -- This module re-exports X11 'KeySym's. -- -- @since 0.2.0.0 module WildBind.X11.KeySym ( -- * The type KeySym, -- * Alphabet xK_a, xK_b, xK_c, xK_d, xK_e, xK_f, xK_g, xK_h, xK_i, xK_j, xK_k, xK_l, xK_m, xK_n, xK_o, xK_p, xK_q, xK_r, xK_s, xK_t, xK_u, xK_v, xK_w, xK_x, xK_y, xK_z, xK_A, xK_B, xK_C, xK_D, xK_E, xK_F, xK_G, xK_H, xK_I, xK_J, xK_K, xK_L, xK_M, xK_N, xK_O, xK_P, xK_Q, xK_R, xK_S, xK_T, xK_U, xK_V, xK_W, xK_X, xK_Y, xK_Z, -- * Numbers xK_0, xK_1, xK_2, xK_3, xK_4, xK_5, xK_6, xK_7, xK_8, xK_9, -- * ASCII symbols xK_space, xK_exclam, xK_quotedbl, xK_numbersign, xK_dollar, xK_percent, xK_ampersand, xK_apostrophe, xK_quoteright, xK_parenleft, xK_parenright, xK_asterisk, xK_plus, xK_comma, xK_minus, xK_period, xK_slash, xK_colon, xK_semicolon, xK_less, xK_equal, xK_greater, xK_question, xK_at, xK_bracketleft, xK_backslash, xK_bracketright, xK_asciicircum, xK_underscore, xK_grave, xK_quoteleft, xK_braceleft, xK_bar, xK_braceright, xK_asciitilde, -- * Control keys xK_BackSpace, xK_Tab, xK_Linefeed, xK_Clear, xK_Return, xK_Pause, xK_Scroll_Lock, xK_Sys_Req, xK_Escape, xK_Delete, xK_Multi_key, xK_Codeinput, xK_SingleCandidate, xK_MultipleCandidate, xK_PreviousCandidate, xK_Home, xK_Left, xK_Up, xK_Right, xK_Down, xK_Prior, xK_Page_Up, xK_Next, xK_Page_Down, xK_End, xK_Begin, xK_Select, xK_Print, xK_Execute, xK_Insert, xK_Undo, xK_Redo, xK_Menu, xK_Find, xK_Cancel, xK_Help, xK_Break, xK_Mode_switch, xK_script_switch, xK_Num_Lock, -- * Number pad keys xK_KP_Space, xK_KP_Tab, xK_KP_Enter, xK_KP_F1, xK_KP_F2, xK_KP_F3, xK_KP_F4, xK_KP_Home, xK_KP_Left, xK_KP_Up, xK_KP_Right, xK_KP_Down, xK_KP_Prior, xK_KP_Page_Up, xK_KP_Next, xK_KP_Page_Down, xK_KP_End, xK_KP_Begin, xK_KP_Insert, xK_KP_Delete, xK_KP_Equal, xK_KP_Multiply, xK_KP_Add, xK_KP_Separator, xK_KP_Subtract, xK_KP_Decimal, xK_KP_Divide, xK_KP_0, xK_KP_1, xK_KP_2, xK_KP_3, xK_KP_4, xK_KP_5, xK_KP_6, xK_KP_7, xK_KP_8, xK_KP_9, -- * Function keys xK_F1, xK_F2, xK_F3, xK_F4, xK_F5, xK_F6, xK_F7, xK_F8, xK_F9, xK_F10, xK_F11, xK_L1, xK_F12, xK_L2, xK_F13, xK_L3, xK_F14, xK_L4, xK_F15, xK_L5, xK_F16, xK_L6, xK_F17, xK_L7, xK_F18, xK_L8, xK_F19, xK_L9, xK_F20, xK_L10, xK_F21, xK_R1, xK_F22, xK_R2, xK_F23, xK_R3, xK_F24, xK_R4, xK_F25, xK_R5, xK_F26, xK_R6, xK_F27, xK_R7, xK_F28, xK_R8, xK_F29, xK_R9, xK_F30, xK_R10, xK_F31, xK_R11, xK_F32, xK_R12, xK_F33, xK_R13, xK_F34, xK_R14, xK_F35, xK_R15, -- * Modifier keys xK_Shift_L, xK_Shift_R, xK_Control_L, xK_Control_R, xK_Caps_Lock, xK_Shift_Lock, xK_Meta_L, xK_Meta_R, xK_Alt_L, xK_Alt_R, xK_Super_L, xK_Super_R, xK_Hyper_L, xK_Hyper_R, -- * Alphabet with accent and ligatures xK_Agrave, xK_Aacute, xK_Acircumflex, xK_Atilde, xK_Adiaeresis, xK_Aring, xK_AE, xK_Ccedilla, xK_Egrave, xK_Eacute, xK_Ecircumflex, xK_Ediaeresis, xK_Igrave, xK_Iacute, xK_Icircumflex, xK_Idiaeresis, xK_ETH, xK_Eth, xK_Ntilde, xK_Ograve, xK_Oacute, xK_Ocircumflex, xK_Otilde, xK_Odiaeresis, xK_multiply, xK_Ooblique, xK_Ugrave, xK_Uacute, xK_Ucircumflex, xK_Udiaeresis, xK_Yacute, xK_THORN, xK_Thorn, xK_ssharp, xK_agrave, xK_aacute, xK_acircumflex, xK_atilde, xK_adiaeresis, xK_aring, xK_ae, xK_ccedilla, xK_egrave, xK_eacute, xK_ecircumflex, xK_ediaeresis, xK_igrave, xK_iacute, xK_icircumflex, xK_idiaeresis, xK_eth, xK_ntilde, xK_ograve, xK_oacute, xK_ocircumflex, xK_otilde, xK_odiaeresis, xK_division, xK_oslash, xK_ugrave, xK_uacute, xK_ucircumflex, xK_udiaeresis, xK_yacute, xK_thorn, xK_ydiaeresis, -- * Other symbols xK_nobreakspace, xK_exclamdown, xK_cent, xK_sterling, xK_currency, xK_yen, xK_brokenbar, xK_section, xK_diaeresis, xK_copyright, xK_ordfeminine, xK_guillemotleft, xK_notsign, xK_hyphen, xK_registered, xK_macron, xK_degree, xK_plusminus, xK_twosuperior, xK_threesuperior, xK_acute, xK_mu, xK_paragraph, xK_periodcentered, xK_cedilla, xK_onesuperior, xK_masculine, xK_guillemotright, xK_onequarter, xK_onehalf, xK_threequarters, xK_questiondown, -- * special keysym xK_VoidSymbol, ) where import Graphics.X11.Xlib
debug-ito/wild-bind
wild-bind-x11/src/WildBind/X11/KeySym.hs
bsd-3-clause
7,315
0
4
3,663
1,079
726
353
351
0
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeOperators #-} -- | -- Module : Data.Array.Accelerate.Math.DFT -- Copyright : [2012] Manuel M T Chakravarty, Gabriele Keller, Trevor L. McDonell -- License : BSD3 -- -- Maintainer : Manuel M T Chakravarty <chak@cse.unsw.edu.au> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- Compute the Discrete Fourier Transform (DFT) along the lower order dimension -- of an array. -- -- This uses a naïve algorithm which takes O(n^2) time. However, you can -- transform an array with an arbitrary extent, unlike with FFT which requires -- each dimension to be a power of two. -- -- The `dft` and `idft` functions compute the roots of unity as needed. If you -- need to transform several arrays with the same extent than it is faster to -- compute the roots once using `rootsOfUnity` or `inverseRootsOfUnity` -- respectively, then call `dftG` directly. -- -- You can also compute single values of the transform using `dftGS` -- module Data.Array.Accelerate.Math.DFT ( dft, idft, dftG, dftGS, ) where import Prelude as P hiding ((!!)) import Data.Array.Accelerate as A import Data.Array.Accelerate.Math.DFT.Roots import Data.Array.Accelerate.Data.Complex -- | Compute the DFT along the low order dimension of an array -- dft :: (Shape sh, Slice sh, Elt e, IsFloating e) => Acc (Array (sh:.Int) (Complex e)) -> Acc (Array (sh:.Int) (Complex e)) dft v = dftG (rootsOfUnity (shape v)) v -- | Compute the inverse DFT along the low order dimension of an array -- idft :: (Shape sh, Slice sh, Elt e, IsFloating e) => Acc (Array (sh:.Int) (Complex e)) -> Acc (Array (sh:.Int) (Complex e)) idft v = let sh = shape v n = indexHead sh roots = inverseRootsOfUnity sh scale = lift (A.fromIntegral n :+ constant 0) in A.map (/scale) $ dftG roots v -- | Generic function for computation of forward and inverse DFT. This function -- is also useful if you transform many arrays of the same extent, and don't -- want to recompute the roots for each one. -- -- The extent of the input and roots must match. -- dftG :: forall sh e. (Shape sh, Slice sh, Elt e, IsFloating e) => Acc (Array (sh:.Int) (Complex e)) -- ^ roots of unity -> Acc (Array (sh:.Int) (Complex e)) -- ^ input array -> Acc (Array (sh:.Int) (Complex e)) dftG roots arr = A.fold (+) (constant (0 :+ 0)) $ A.zipWith (*) arr' roots' where base = shape arr l = indexHead base extend = lift (base :. shapeSize base) -- Extend the entirety of the input arrays into a higher dimension, reading -- roots from the appropriate places and then reduce along this axis. -- -- In the calculation for 'roots'', 'i' is the index into the extended -- dimension, with corresponding base index 'ix' which we are attempting to -- calculate the single DFT value of. The rest proceeds as per 'dftGS'. -- arr' = A.generate extend (\ix' -> let i = indexHead ix' in arr !! i) roots' = A.generate extend (\ix' -> let ix :. i = unlift ix' sh :. n = unlift (fromIndex base i) :: Exp sh :. Exp Int k = indexHead ix in roots ! lift (sh :. (k*n) `mod` l)) -- | Compute a single value of the DFT. -- dftGS :: forall sh e. (Shape sh, Slice sh, Elt e, IsFloating e) => Exp (sh :. Int) -- ^ index of the value we want -> Acc (Array (sh:.Int) (Complex e)) -- ^ roots of unity -> Acc (Array (sh:.Int) (Complex e)) -- ^ input array -> Acc (Scalar (Complex e)) dftGS ix roots arr = let k = indexHead ix l = indexHead (shape arr) -- all the roots we need to multiply with roots' = A.generate (shape arr) (\ix' -> let sh :. n = unlift ix' :: Exp sh :. Exp Int in roots ! lift (sh :. (k*n) `mod` l)) in A.foldAll (+) (constant (0 :+ 0)) $ A.zipWith (*) arr roots'
thielema/accelerate-fft
Data/Array/Accelerate/Math/DFT.hs
bsd-3-clause
4,284
0
20
1,305
1,016
554
462
48
1
{-# LANGUAGE CPP #-} {-# LANGUAGE ForeignFunctionInterface #-} {-# LANGUAGE JavaScriptFFI #-} {-# OPTIONS_HADDOCK hide #-} module JavaScript.Blob ( Blob , readBlob , isBlob ) where import Control.Exception (mask_) import Data.ByteString (ByteString) #ifdef ghcjs_HOST_OS import GHCJS.Foreign (bufferByteString) import GHCJS.Types (JSRef) #else import JavaScript.NoGHCJS #endif data Blob_ type Blob = JSRef Blob_ #ifdef ghcjs_HOST_OS foreign import javascript interruptible "var reader = new FileReader();\ reader.addEventListener('loadend', function() {\ $c(reader.result);\ });\ reader.readAsArrayBuffer($1);" ffi_readBlob :: Blob -> IO (JSRef a) foreign import javascript unsafe "$1 instanceof Blob" ffi_blobCheck :: JSRef a -> IO Bool #else ffi_readBlob :: Blob -> IO (JSRef a) ffi_blobCheck :: JSRef a -> IO Bool ffi_readBlob = error "ffi_readBlob: only available in JavaScript" ffi_blobCheck = error "ffi_blobCheck: only available in JavaScript" #endif readBlob :: Blob -> IO ByteString readBlob b = bufferByteString 0 0 =<< mask_ (ffi_readBlob b) isBlob :: JSRef a -> IO Bool isBlob = ffi_blobCheck
mstksg/ghcjs-websockets
src/JavaScript/Blob.hs
mit
1,311
22
8
353
238
139
99
-1
-1
{-# LANGUAGE ScopedTypeVariables #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -- Various orphan instances and functions that we don't want to appear in client module Unison.ABT.Extra where import Control.Applicative import Data.Bytes.Serial (Serial(..), Serial1(..)) import Data.Bytes.VarInt (VarInt(..)) import Data.List hiding (cycle) import Data.Ord import Data.Vector ((!)) import Prelude hiding (abs,cycle) import Unison.ABT import Unison.Var (Var) import qualified Data.Bytes.Get as Get import qualified Data.Bytes.Put as Put import qualified Data.Set as Set import qualified Data.Foldable as Foldable import qualified Data.Map as Map import qualified Data.Vector as Vector import qualified Unison.Digest as Digest import qualified Unison.Var as Var -- | We ignore annotations in the `Term`, as these should never affect the -- meaning of the term. hash :: forall f v a . (Foldable f, Digest.Digestable1 f, Var v) => Term f v a -> Digest.Hash hash t = hash' [] t where hash' :: [Either [v] v] -> Term f v a -> Digest.Hash hash' env (Term _ _ t) = case t of Var v -> maybe die hashInt ind where lookup (Left cycle) = elem v cycle lookup (Right v') = v == v' ind = findIndex lookup env -- env not likely to be very big, prefer to encode in one byte if possible hashInt :: Int -> Digest.Hash hashInt i = Digest.run (serialize (VarInt i)) die = error $ "unknown var in environment: " ++ show (Var.name v) Cycle (AbsN' vs t) -> hash' (Left vs : env) t Cycle t -> hash' env t Abs v t -> hash' (Right v : env) t Tm t -> Digest.digest1 (hashCycle env) (hash' env) $ t hashCycle :: [Either [v] v] -> [Term f v a] -> Digest.DigestM (Term f v a -> Digest.Hash) hashCycle env@(Left cycle : envTl) ts | length cycle == length ts = let permute p xs = case Vector.fromList xs of xs -> map (xs !) p hashed = map (\(i,t) -> ((i,t), hash' env t)) (zip [0..] ts) pt = map fst (sortBy (comparing snd) hashed) (p,ts') = unzip pt in case map Right (permute p cycle) ++ envTl of env -> Foldable.traverse_ (serialize . hash' env) ts' *> pure (hash' env) hashCycle env ts = Foldable.traverse_ (serialize . hash' env) ts *> pure (hash' env) -- | Use the `hash` function to efficiently remove duplicates from the list, preserving order. distinct :: (Foldable f, Digest.Digestable1 f, Var v) => [Term f v a] -> [Term f v a] distinct ts = map fst (sortBy (comparing snd) m) where m = Map.elems (Map.fromList (map hash ts `zip` (ts `zip` [0 :: Int .. 1]))) -- | Use the `hash` function to remove elements from `t1s` that exist in `t2s`, preserving order. subtract :: (Foldable f, Digest.Digestable1 f, Var v) => [Term f v a] -> [Term f v a] -> [Term f v a] subtract t1s t2s = let skips = Set.fromList (map hash t2s) in filter (\t -> Set.notMember (hash t) skips) t1s instance (Foldable f, Serial a, Serial v, Ord v, Serial1 f) => Serial (Term f v a) where serialize (Term _ a e) = serialize a *> case e of Var v -> Put.putWord8 0 *> serialize v Cycle body -> Put.putWord8 1 *> serialize body Abs v body -> Put.putWord8 2 *> serialize v *> serialize body Tm v -> Put.putWord8 3 *> serializeWith serialize v deserialize = do ann <- deserialize b <- Get.getWord8 case b of 0 -> annotatedVar ann <$> deserialize 1 -> cycle' ann <$> deserialize 2 -> abs' ann <$> deserialize <*> deserialize 3 -> tm' ann <$> deserializeWith deserialize _ -> fail ("unknown byte tag, expected one of {0,1,2}, got: " ++ show b)
CGenie/platform
node/src/Unison/ABT/Extra.hs
mit
3,610
0
17
843
1,389
722
667
68
7
{- | Module : ./Common/Lib/Maybe.hs Description : MaybeT monad transformer without the non-portable features Copyright : C. Maeder and Uni Bremen 2002-2005 License : GPLv2 or higher, see LICENSE.txt Maintainer : jonathan.von_schroeder@dfki.de Stability : experimental Portability : portable This module is a replacement of module Control.Monad.Maybe and only contains the Monad instance for the newtype MaybeT m. -} module Common.Lib.Maybe (MaybeT (..), liftToMaybeT) where import Control.Applicative import Control.Monad -- | A monad transformer which adds Maybe semantics to an existing monad. newtype MaybeT m a = MaybeT { runMaybeT :: m (Maybe a) } instance Monad m => Functor (MaybeT m) where fmap = liftM instance Monad m => Applicative (MaybeT m) where pure = return (<*>) = ap instance Monad m => Monad (MaybeT m) where fail _ = MaybeT $ return Nothing return = MaybeT . return . Just x >>= f = MaybeT $ runMaybeT x >>= maybe (return Nothing) (runMaybeT . f) liftToMaybeT :: Monad m => m a -> MaybeT m a liftToMaybeT = MaybeT . liftM Just
spechub/Hets
Common/Lib/Maybe.hs
gpl-2.0
1,090
0
9
216
239
127
112
15
1
{-# Language TemplateHaskell #-} {-# Language OverloadedStrings #-} module BitcoinCore.Keys ( PublicKeyRep(..) , Address(..) , WIFPrivateKey(..) , genKeys , getAddress , getWIFPrivateKey , getPrivateKeyFromWIF , getPubKey , btcCurve , serializePrivateKey , deserializePrivateKey , serializePublicKeyRep , deserializePublicKeyRep , PubKeyFormat(..) , PubKeyHash(..) , addressToPubKeyHash , hashPubKeyRep , addrTxt ) where import General.Util import General.Types (Network(..)) import General.Hash ( Hash(..) , hashObject , ripemdSha256 ) import Prelude hiding (take, concat) import Data.ByteString (ByteString) import Crypto.PubKey.ECC.Types ( Curve , getCurveByName , Point(..) , CurveName(SEC_p256k1) ) import Crypto.PubKey.ECC.Generate (generate, generateQ) import Crypto.PubKey.ECC.ECDSA ( PublicKey(..) , PrivateKey(..)) import Crypto.OpenSSL.ECC ( ecGroupFromCurveOID , EcGroup , ecPointFromOct , ecPointToAffineGFp ) import qualified Data.Text as T import Data.Binary (Binary(..)) import Data.Binary.Put (Put) import qualified Data.Binary.Put as Put import Data.Binary.Get (Get) import qualified Data.Binary.Get as Get import qualified Data.ByteString.Lazy as BL import Data.Maybe (fromMaybe) import Control.Lens (makeLenses, (^.)) data PublicKeyRep = PublicKeyRep PubKeyFormat PublicKey deriving (Eq, Show) data PubKeyFormat = Compressed | Uncompressed deriving (Eq, Show) -- WIFPrivateKey and Address have base58 -> use text rep -- TODO: add base58 type? newtype WIFPrivateKey = WIF T.Text deriving (Eq, Show) newtype Address = Address { _addrTxt :: T.Text } deriving (Eq, Show) makeLenses ''Address type PubKeyHash = Hash PublicKeyRep -- Bitcoin uses a specefic eliptic curve, secp256k1, -- to generate public private key pairs btcCurve :: Curve btcCurve = getCurveByName SEC_p256k1 btcEcGroup :: EcGroup btcEcGroup = fromMaybe (error "Unable to get secp256k1 ec group. This should never happen.") (ecGroupFromCurveOID "secp256k1") genKeys :: IO (PublicKey, PrivateKey) genKeys = generate btcCurve getPubKey :: PrivateKey -> PublicKey getPubKey privKey = PublicKey btcCurve pubPoint where pubPoint = generateQ btcCurve (private_d privKey) -- Addresses are generated from public key by -- SHA256, then RIPEMD160 hashing of the public key -- Then Base58 encoding the resulting hash -- https://github.com/bitcoinbook/bitcoinbook/blob/first_edition/ch04.asciidoc#bitcoin-addresses getAddress :: PublicKeyRep -> Network -> Address getAddress pubKeyRep network = Address $ encodeBase58Check (addressPrefix network) payload where payload = Payload . hash . hashPubKeyRep $ pubKeyRep addressPrefix MainNet = Prefix 0x00 addressPrefix TestNet3 = Prefix 0x6F addressToPubKeyHash :: Address -> PubKeyHash addressToPubKeyHash address = Hash hash where (_, Payload hash, _) = decodeBase58Check $ address^.addrTxt getWIFPrivateKey :: PrivateKey -> WIFPrivateKey getWIFPrivateKey privateKey = WIF $ encodeBase58Check privateKeyPrefix (Payload . serializePrivateKey $ privateKey) getPrivateKeyFromWIF :: WIFPrivateKey -> PrivateKey getPrivateKeyFromWIF (WIF wifText) = if prefix == privateKeyPrefix then deserializePrivateKey payload else error $ "Unable to read WIF PrivateKey. Invalid prefix: " ++ show prefix where (prefix, Payload payload, checksum) = decodeBase58Check wifText privateKeyPrefix :: Prefix privateKeyPrefix = Prefix 0x80 serializePrivateKey :: PrivateKey -> ByteString serializePrivateKey = BL.toStrict . Put.runPut . Put.putByteString . unrollWithPad BE 32 . fromIntegral . private_d deserializePrivateKey :: ByteString -> PrivateKey deserializePrivateKey = PrivateKey btcCurve . roll BE . Get.runGet (Get.getByteString 32) . getLazyBS where getLazyBS bs = BL.fromChunks [bs] instance Binary PublicKeyRep where get = deserializePublicKeyRep put = serializePublicKeyRep hashPubKeyRep :: PublicKeyRep -> Hash PublicKeyRep hashPubKeyRep = hashObject ripemdSha256 serializePublicKeyRep :: PublicKeyRep -> Put -- See: https://github.com/bitcoinbook/bitcoinbook/blob/first_edition/ch04.asciidoc#public-key-formats serializePublicKeyRep (PublicKeyRep Uncompressed pubKey) = do Put.putWord8 4 Put.putByteString . unrollWithPad BE 32 $ x Put.putByteString . unrollWithPad BE 32 $ y where Point x y = public_q pubKey -- See: https://github.com/bitcoinbook/bitcoinbook/blob/first_edition/ch04.asciidoc#compressed-public-keys serializePublicKeyRep (PublicKeyRep Compressed pubKey) = do Put.putWord8 prefix Put.putByteString . unrollWithPad BE 32 $ x where Point x y = public_q pubKey prefix = if isEven y then 2 else 3 isEven n = n `mod` 2 == 0 deserializePublicKeyRep :: Get PublicKeyRep deserializePublicKeyRep = do prefix <- Get.lookAhead Get.getWord8 let pubKeyFormat = case prefix of 0x04 -> Uncompressed 0x03 -> Compressed 0x02 -> Compressed bs <- Get.getByteString $ repLength pubKeyFormat case getPubKey bs of Left error -> fail $ "failed deserializing public key: " ++ error Right pubKey -> return $ PublicKeyRep pubKeyFormat pubKey where getPubKey :: ByteString -> Either String PublicKey getPubKey bs = do ecPoint <- ecPointFromOct btcEcGroup bs let (x, y) = ecPointToAffineGFp btcEcGroup ecPoint btcPubKey = PublicKey btcCurve (Point x y) return btcPubKey repLength Uncompressed = 65 repLength Compressed = 33
clample/lamdabtc
backend/src/BitcoinCore/Keys.hs
bsd-3-clause
5,582
0
14
1,009
1,335
731
604
148
5
module Main where import Weigh import Tidal.UIB main :: IO () main = mainWith $ do euclidB fixB
d0kt0r0/Tidal
bench/Memory/Main.hs
gpl-3.0
113
0
7
35
37
20
17
8
1
{-# LANGUAGE OverloadedStrings #-} {- | Module : Network.MPD.Commands.Mount Copyright : (c) Joachim Fasting 2014 License : MIT Maintainer : joachifm@fastmail.fm Stability : stable Portability : unportable Mounting remote storage. -} module Network.MPD.Commands.Mount ( mount , unmount , listMounts , listNeighbors ) where import qualified Network.MPD.Applicative.Internal as A import qualified Network.MPD.Applicative.Mount as A import Network.MPD.Core mount :: (MonadMPD m) => String -> String -> m () mount p = A.runCommand . A.mount p unmount :: (MonadMPD m) => String -> m () unmount = A.runCommand . A.unmount listMounts :: (MonadMPD m) => m [(String, String)] listMounts = A.runCommand A.listMounts listNeighbors :: (MonadMPD m) => m [(String, String)] listNeighbors = A.runCommand A.listNeighbors
bens/libmpd-haskell
src/Network/MPD/Commands/Mount.hs
lgpl-2.1
847
0
9
154
208
121
87
17
1
module NestedImporting2 where import NestedImporting2.A main :: Fay () main = print r
fpco/fay
tests/NestedImporting2.hs
bsd-3-clause
88
0
6
15
27
15
12
4
1
module Root.Src.Main where main = do putStrLn "Hello Haskell World!"
codeboardio/kali
test/src_examples/haskell/several_files3/Root/Src/Main.hs
mit
68
0
7
9
18
10
8
2
1
{-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_HADDOCK show-extensions #-} -- | -- Module : Yi.Keymap.Vim.Ex.Commands.Write -- License : GPL-2 -- Maintainer : yi-devel@googlegroups.com -- Stability : experimental -- Portability : portable module Yi.Keymap.Vim.Ex.Commands.Write (parse) where import Control.Applicative (Alternative ((<|>)), Applicative ((*>)), (<$>)) import Control.Monad (void, when) import Data.Monoid ((<>)) import qualified Data.Text as T (Text, pack) import qualified Text.ParserCombinators.Parsec as P (anyChar, many, many1, space, string, try) import Yi.Buffer (BufferRef) import Yi.Editor (printMsg) import Yi.File (fwriteBufferE, viWrite, viWriteTo) import Yi.Keymap (Action (YiA), YiM) import Yi.Keymap.Vim.Common (EventString) import qualified Yi.Keymap.Vim.Ex.Commands.Common as Common (forAllBuffers, impureExCommand, needsSaving, parse) import Yi.Keymap.Vim.Ex.Types (ExCommand (cmdAction, cmdShow)) parse :: EventString -> Maybe ExCommand parse = Common.parse $ (P.try (P.string "write") <|> P.string "w") *> (parseWriteAs <|> parseWrite) where parseWrite = do alls <- P.many (P.try ( P.string "all") <|> P.string "a") return $! writeCmd $ not (null alls) parseWriteAs = do void $ P.many1 P.space filename <- T.pack <$> P.many1 P.anyChar return $! writeAsCmd filename writeCmd :: Bool -> ExCommand writeCmd allFlag = Common.impureExCommand { cmdShow = "write" <> if allFlag then "all" else "" , cmdAction = YiA $ if allFlag then Common.forAllBuffers tryWriteBuffer >> printMsg "All files written" else viWrite } writeAsCmd :: T.Text -> ExCommand writeAsCmd filename = Common.impureExCommand { cmdShow = "write " <> filename , cmdAction = YiA $ viWriteTo filename } tryWriteBuffer :: BufferRef -> YiM () tryWriteBuffer buf = do ns <- Common.needsSaving buf when ns . void $ fwriteBufferE buf
TOSPIO/yi
src/library/Yi/Keymap/Vim/Ex/Commands/Write.hs
gpl-2.0
2,270
0
16
687
571
330
241
40
3
-- {-# LANGUAGE NoImplicitPrelude #-} -- {-# LANGUAGE QuasiQuotes #-} -- {-# LANGUAGE TemplateHaskell #-} -- | Test suite for GHCi like applications including both GHCi and Intero. module Stack.GhciSpec where import Test.Hspec spec :: Spec spec = return () {- Commented out as part of the fix for https://github.com/commercialhaskell/stack/issues/3309 Not sure if maintaining this test is worth the effort. import qualified Data.ByteString.Lazy as LBS import qualified Data.Map as M import qualified Data.Set as S import qualified Data.Text as T import qualified Data.Text.Encoding as T import Distribution.License (License (BSD3)) import qualified Distribution.ModuleName as ModuleName import Distribution.PackageDescription (BuildType(..)) import Stack.Prelude import Stack.Types.Package import Stack.Types.PackageName import Stack.Types.Version import Test.Hspec import NeatInterpolation import Path import Path.Extra (pathToText) import qualified System.FilePath as FP import Stack.Ghci import Stack.Ghci.Script (scriptToLazyByteString) import Stack.Ghci.PortableFakePaths textToLazy :: Text -> LBS.ByteString textToLazy = LBS.fromStrict . T.encodeUtf8 -- | Matches two strings, after converting line-ends in the second to Unix ones -- (in a hacky way) and converting both to the same type. Workaround for -- https://github.com/nikita-volkov/neat-interpolation/issues/14. shouldBeLE :: LBS.ByteString -> Text -> Expectation shouldBeLE actual expected = shouldBe actual (textToLazy $ T.filter (/= '\r') expected) baseProjDir, projDirA, projDirB :: Path Abs Dir baseProjDir = $(mkAbsDir $ defaultDrive FP.</> "Users" FP.</> "someone" FP.</> "src") projDirA = baseProjDir </> $(mkRelDir "project-a") projDirB = baseProjDir </> $(mkRelDir "project-b") relFile :: Path Rel File relFile = $(mkRelFile $ "exe" FP.</> "Main.hs") absFile :: Path Abs File absFile = projDirA </> relFile projDirAT, projDirBT, relFileT, absFileT :: Text projDirAT = pathToText projDirA projDirBT = pathToText projDirB relFileT = pathToText relFile absFileT = pathToText absFile spec :: Spec spec = do describe "GHCi" $ do describe "Script rendering" $ do describe "should render GHCi scripts" $ do it "with one library package" $ do let res = scriptToLazyByteString $ renderScriptGhci packages_singlePackage Nothing [] res `shouldBeLE` ghciScript_projectWithLib it "with one main package" $ do let res = scriptToLazyByteString $ renderScriptGhci [] (Just absFile) [] res `shouldBeLE` ghciScript_projectWithMain it "with one library and main package" $ do let res = scriptToLazyByteString $ renderScriptGhci packages_singlePackage (Just absFile) [] res `shouldBeLE` ghciScript_projectWithLibAndMain it "with multiple library packages" $ do let res = scriptToLazyByteString $ renderScriptGhci packages_multiplePackages Nothing [] res `shouldBeLE` ghciScript_multipleProjectsWithLib describe "should render intero scripts" $ do it "with one library package" $ do let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage Nothing [] res `shouldBeLE` interoScript_projectWithLib it "with one main package" $ do let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage (Just absFile) [] res `shouldBeLE` interoScript_projectWithMain it "with one library and main package" $ do let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage (Just absFile) [] res `shouldBeLE` interoScript_projectWithLibAndMain it "with multiple library packages" $ do let res = scriptToLazyByteString $ renderScriptIntero packages_multiplePackages Nothing [] res `shouldBeLE` interoScript_multipleProjectsWithLib -- Exptected Intero scripts interoScript_projectWithLib :: Text interoScript_projectWithLib = [text| :cd-ghc $projDirAT :add Lib.A :module + Lib.A |] interoScript_projectWithMain :: Text interoScript_projectWithMain = [text| :cd-ghc $projDirAT :add Lib.A :cd-ghc $projDirAT :add $absFileT :module + Lib.A |] interoScript_projectWithLibAndMain :: Text interoScript_projectWithLibAndMain = [text| :cd-ghc $projDirAT :add Lib.A :cd-ghc $projDirAT :add $absFileT :module + Lib.A |] interoScript_multipleProjectsWithLib :: Text interoScript_multipleProjectsWithLib = [text| :cd-ghc $projDirAT :add Lib.A :cd-ghc $projDirBT :add Lib.B :module + Lib.A Lib.B |] -- Expected GHCi Scripts ghciScript_projectWithLib :: Text ghciScript_projectWithLib = [text| :add Lib.A :module + Lib.A |] ghciScript_projectWithMain :: Text ghciScript_projectWithMain = [text| :add $absFileT :module + |] ghciScript_projectWithLibAndMain :: Text ghciScript_projectWithLibAndMain = [text| :add Lib.A :add $absFileT :module + Lib.A |] ghciScript_multipleProjectsWithLib :: Text ghciScript_multipleProjectsWithLib = [text| :add Lib.A :add Lib.B :module + Lib.A Lib.B |] -- Expected Legacy GHCi scripts ghciLegacyScript_projectWithMain :: Text ghciLegacyScript_projectWithMain = [text| :add :add $absFileT :module + |] ghciLegacyScript_projectWithLibAndMain :: Text ghciLegacyScript_projectWithLibAndMain = [text| :add Lib.A :add $absFileT :module + Lib.A |] ghciLegacyScript_multipleProjectsWithLib :: Text ghciLegacyScript_multipleProjectsWithLib = [text| :add Lib.A Lib.B :module + Lib.A Lib.B |] -- Sample GHCi load configs packages_singlePackage :: [GhciPkgInfo] packages_singlePackage = [ GhciPkgInfo { ghciPkgModules = S.fromList [ModuleName.fromString "Lib.A"] , ghciPkgDir = projDirA , ghciPkgName = $(mkPackageName "package-a") , ghciPkgOpts = [] , ghciPkgModFiles = S.empty , ghciPkgCFiles = S.empty , ghciPkgMainIs = M.empty , ghciPkgTargetFiles = Nothing , ghciPkgPackage = Package { packageName = $(mkPackageName "package-a") , packageVersion = $(mkVersion "0.1.0.0") , packageLicense = BSD3 , packageFiles = GetPackageFiles undefined , packageDeps = M.empty , packageTools = [] , packageAllDeps = S.empty , packageGhcOptions = [] , packageFlags = M.empty , packageDefaultFlags = M.empty , packageHasLibrary = True , packageTests = M.empty , packageBenchmarks = S.empty , packageExes = S.empty , packageOpts = GetPackageOpts undefined , packageHasExposedModules = True , packageBuildType = Just Simple , packageSetupDeps = Nothing } } ] packages_multiplePackages :: [GhciPkgInfo] packages_multiplePackages = [ GhciPkgInfo { ghciPkgModules = S.fromList [ModuleName.fromString "Lib.A"] , ghciPkgDir = projDirA , ghciPkgName = $(mkPackageName "package-a") , ghciPkgOpts = [] , ghciPkgModFiles = S.empty , ghciPkgCFiles = S.empty , ghciPkgMainIs = M.empty , ghciPkgTargetFiles = Nothing , ghciPkgPackage = Package { packageName = $(mkPackageName "package-a") , packageVersion = $(mkVersion "0.1.0.0") , packageLicense = BSD3 , packageFiles = GetPackageFiles undefined , packageDeps = M.empty , packageTools = [] , packageAllDeps = S.empty , packageGhcOptions = [] , packageFlags = M.empty , packageDefaultFlags = M.empty , packageHasLibrary = True , packageTests = M.empty , packageBenchmarks = S.empty , packageExes = S.empty , packageOpts = GetPackageOpts undefined , packageHasExposedModules = True , packageBuildType = Just Simple , packageSetupDeps = Nothing } } , GhciPkgInfo { ghciPkgModules = S.fromList [ModuleName.fromString "Lib.B"] , ghciPkgDir = projDirB , ghciPkgName = $(mkPackageName "package-b") , ghciPkgOpts = [] , ghciPkgModFiles = S.empty , ghciPkgCFiles = S.empty , ghciPkgMainIs = M.empty , ghciPkgTargetFiles = Nothing , ghciPkgPackage = Package { packageName = $(mkPackageName "package-b") , packageVersion = $(mkVersion "0.1.0.0") , packageLicense = BSD3 , packageFiles = GetPackageFiles undefined , packageDeps = M.empty , packageTools = [] , packageAllDeps = S.empty , packageGhcOptions = [] , packageFlags = M.empty , packageDefaultFlags = M.empty , packageHasLibrary = True , packageTests = M.empty , packageBenchmarks = S.empty , packageExes = S.empty , packageOpts = GetPackageOpts undefined , packageHasExposedModules = True , packageBuildType = Just Simple , packageSetupDeps = Nothing } } ] -}
MichielDerhaeg/stack
src/test/Stack/GhciSpec.hs
bsd-3-clause
9,341
0
6
2,415
31
20
11
4
1
{-# LANGUAGE TypeFamilies, QuasiQuotes, TemplateHaskell, MultiParamTypeClasses, OverloadedStrings #-} module YesodCoreTest.Csrf (csrfSpec, Widget, resourcesApp) where import Yesod.Core import Test.Hspec import Network.Wai import Network.Wai.Test import Web.Cookie import qualified Data.Map as Map import Data.ByteString.Lazy (fromStrict) import Data.Monoid ((<>)) data App = App mkYesod "App" [parseRoutes| / HomeR GET POST |] instance Yesod App where yesodMiddleware = defaultYesodMiddleware . defaultCsrfMiddleware getHomeR :: Handler Html getHomeR = defaultLayout [whamlet| <p> Welcome to my test application. |] postHomeR :: Handler Html postHomeR = defaultLayout [whamlet| <p> Welcome to my test application. |] runner :: Session () -> IO () runner f = toWaiApp App >>= runSession f csrfSpec :: Spec csrfSpec = describe "A Yesod application with the defaultCsrfMiddleware" $ do it "serves a includes a cookie in a GET request" $ runner $ do res <- request defaultRequest assertStatus 200 res assertClientCookieExists "Should have an XSRF-TOKEN cookie" defaultCsrfCookieName it "200s write requests with the correct CSRF header, but no param" $ runner $ do getRes <- request defaultRequest assertStatus 200 getRes csrfValue <- fmap setCookieValue requireCsrfCookie postRes <- request (defaultRequest { requestMethod = "POST", requestHeaders = [(defaultCsrfHeaderName, csrfValue)] }) assertStatus 200 postRes it "200s write requests with the correct CSRF param, but no header" $ runner $ do getRes <- request defaultRequest assertStatus 200 getRes csrfValue <- fmap setCookieValue requireCsrfCookie let body = "_token=" <> csrfValue postRes <- srequest $ SRequest (defaultRequest { requestMethod = "POST", requestHeaders = [("Content-Type","application/x-www-form-urlencoded")] }) (fromStrict body) assertStatus 200 postRes it "403s write requests without the CSRF header" $ runner $ do res <- request (defaultRequest { requestMethod = "POST" }) assertStatus 403 res it "403s write requests with the wrong CSRF header" $ runner $ do getRes <- request defaultRequest assertStatus 200 getRes csrfValue <- fmap setCookieValue requireCsrfCookie res <- request (defaultRequest { requestMethod = "POST", requestHeaders = [(defaultCsrfHeaderName, csrfValue <> "foo")] }) assertStatus 403 res it "403s write requests with the wrong CSRF param" $ runner $ do getRes <- request defaultRequest assertStatus 200 getRes csrfValue <- fmap setCookieValue requireCsrfCookie let body = "_token=" <> (csrfValue <> "foo") postRes <- srequest $ SRequest (defaultRequest { requestMethod = "POST", requestHeaders = [("Content-Type","application/x-www-form-urlencoded")] }) (fromStrict body) assertStatus 403 postRes requireCsrfCookie :: Session SetCookie requireCsrfCookie = do cookies <- getClientCookies case Map.lookup defaultCsrfCookieName cookies of Just c -> return c Nothing -> error "Failed to lookup CSRF cookie"
MaxGabriel/yesod
yesod-core/test/YesodCoreTest/Csrf.hs
mit
3,256
0
18
740
756
378
378
63
2
{-# LANGUAGE OverloadedStrings #-} module TestImport ( module Yesod.Test , module Model , module Foundation , module Database.Persist , runDB , Spec , Example ) where import Yesod.Test import Database.Persist hiding (get) import Database.Persist.Sql (SqlPersistM, runSqlPersistMPool) import Control.Monad.IO.Class (liftIO) import Foundation import Model type Spec = YesodSpec App type Example = YesodExample App runDB :: SqlPersistM a -> Example a runDB query = do pool <- fmap connPool getTestYesod liftIO $ runSqlPersistMPool query pool
zhy0216/haskell-learning
yosog/tests/TestImport.hs
mit
583
0
8
118
149
87
62
21
1
module Crosscells.Puzzle ( Op(..) , Constraint(..) , Puzzle , compile ) where import Data.List import Data.Maybe import Data.Ord import Crosscells.Region import Crosscells.Tokens type Puzzle = [(Constraint, [(Coord, Op)])] data Op = Add Int | Mul Int deriving (Read, Show, Ord, Eq) data Constraint = Count Int | Arith Int deriving (Read, Show, Ord, Eq) compile :: [(Region, Token)] -> Puzzle compile elts = mapMaybe (\(x,y) -> compile1 x y elts) elts compileBox :: Token -> [Op] compileBox (Plus n) = [Add n] compileBox (Times n) = [Mul n] compileBox _ = [] compile1 :: Region -> Token -> [(Region, Token)] -> Maybe (Constraint, [(Coord, Op)]) compile1 region token = case token of Arrow dir -> compile1' cnstPredicate boxPredicate boxOrder where cnstPredicate = pointsTo (flipDirection dir) (topLeft region) boxPredicate = pointsTo dir (topLeft region) boxOrder = sortBy $ case dir of U -> flip (comparing (coordRow . fst)) D -> comparing (coordRow . fst) L -> flip (comparing (coordCol . fst)) R -> comparing (coordCol . fst) Box -> compile1' predicate predicate id where predicate = contained region . topLeft _ -> const Nothing compile1' cnstPredicate boxPredicate boxOrder elts = case rawConstraints of [c] -> Just c _ -> Nothing where boxes = boxOrder [ (topLeft reg, box) | (reg, elt) <- elts, boxPredicate reg, box <- compileBox elt] rawConstraints = [ c | (reg, elt) <- elts , cnstPredicate reg , c <- case elt of Bracketed n -> [(Count n, boxes)] Number n -> [(Arith n, boxes)] _ -> [] ]
glguy/5puzzle
Crosscells/Puzzle.hs
isc
1,792
0
18
546
677
363
314
49
6
{-# LANGUAGE OverloadedStrings #-} module SymBoilerPlate where import SymMap import Control.Monad import Data.Aeson import Data.HashMap.Strict as H import System.IO.Unsafe import System.Random {-@ nonDet :: a -> x:Int -> {v:Int | 0 <= v && v < x } @-} nonDet :: a -> Int -> Int nonDet _ x = nonDetRange 0 x {-@ nonDetRange :: x:Int -> y:Int -> {v:Int | x <= v && v < y} @-} nonDetRange :: Int -> Int -> Int nonDetRange x y = unsafePerformIO $ do g <- getStdGen (x, _) <- return $ randomR (x, y-1) g return x instance DefaultMap Int where def = 0 instance DefaultMap (Val p) where def = VUnInit {-@ data Val p = VUnit {} | VUnInit {} | VInt { vInt :: Int } | VString { vString :: String } | VSet { vSetName :: String } | VPid { vPid :: p } | VInR { vInR :: Val p } | VInL { vInL :: Val p } | VPair { vLeft :: Val p, vRight :: Val p } @-} data Val p = VUnit {} | VUnInit {} | VInt { vInt :: Int } | VString { vString :: String } | VSet { vSetName :: String } | VPid { vPid :: p } | VInR { vInR :: Val p } | VInL { vInL :: Val p } | VPair { vLeft :: Val p, vRight :: Val p } deriving (Show) instance (FromJSON p) => FromJSON (Val p) where parseJSON (Object o) = case H.toList o of [(key,val)] | key == "VUnit" -> return VUnit | key == "VUnInit" -> return VUnInit | key == "VInt" -> VInt <$> parseJSON val | key == "VString" -> VString <$> parseJSON val | key == "VSet" -> VSet <$> parseJSON val | key == "VPid" -> VPid <$> parseJSON val | key == "VInR" -> VInR <$> parseJSON val | key == "VInL" -> VInL <$> parseJSON val | key == "VPair" -> do (l,r) <- parseJSON val return (VPair l r) | otherwise -> mzero parseJSON _ = mzero instance (ToJSON p) => ToJSON (Val p) where toJSON VUnit = object [ "VUnit" .= Null ] toJSON VUnInit = object [ "VUnInit" .= Null ] toJSON (VInt i) = object [ "VInt" .= toJSON i ] toJSON (VString s) = object [ "VString" .= toJSON s ] toJSON (VSet s) = object [ "VSet" .= toJSON s ] toJSON (VPid p) = object [ "VPid" .= toJSON p ] toJSON (VInR v) = object [ "VInR" .= toJSON v ] toJSON (VInL v) = object [ "VInL" .= toJSON v ] toJSON (VPair l r) = object [ "VPair" .= toJSON (l,r) ] liquidAssert p x = if p then Right x else Left x isVUnit, isVUnInit, isVInt, isVString, isVPid, isVInR, isVInL, isVPair, isVSet :: Val p -> Bool isVUnit VUnit{} = True isVUnit _ = False isVUnInit VUnInit{} = True isVUnInit _ = False isVInt VInt{} = True isVInt _ = False isVString VString{} = True isVString _ = False isVSet VSet{} = True isVSet _ = False isVPid VPid{} = True isVPid _ = False isVInR VInR{} = True isVInR _ = False isVInL VInL{} = True isVInL _ = False isVPair VPair{} = True isVPair _ = False {-@ measure isVUnit @-} {-@ measure isVUnInit @-} {-@ measure isVInt @-} {-@ measure isVString @-} {-@ measure isVPid @-} {-@ measure isVInL @-} {-@ measure isVInR @-} {-@ measure isVPair @-}
abakst/symmetry
checker/include/SymBoilerPlateQC.hs
mit
3,533
2
14
1,312
1,057
551
506
75
2
-- Get the lowest common multiple of all integers between 1 and 20, that is, the lowest number that is divisible by all numbers from 1 to 20 main = print getProblem5Value getProblem5Value :: Integer getProblem5Value = getLeastCommonMultiple [1..20] -- Lowest Common Multiple: takes a list of numbers and returns the lowest common multiple of those numbers getLeastCommonMultiple :: [Integer] -> Integer getLeastCommonMultiple list = getLCM 1 [] list -- getLCM n factn is: 'n' is the product so far, 'factn' is the tracked prime factorization of 'n', 'is' is the remaining numbers to factor into the multiple where getLCM n _ [] = n getLCM n factn (i:is) | i' == 1 = getLCM n factn is -- if i' is 1, we can ignore it | otherwise = getLCM (n*i') (i':factn) is -- otherwise we multiply n by i', add it to the prime factors of n, and continue where i' = divByAll factn i -- we want the lowest multiple, so any numbers multiplied into n already should be factored out of i -- this will factor each number in a list of numbers out of a target number, only if the target is divisible by each number divByAll :: [Integer] -> Integer -> Integer divByAll [] target = target divByAll (i:is) target | target `mod` i == 0 = divByAll is (target `div` i) | otherwise = divByAll is target
jchitel/ProjectEuler.hs
Problems/Problem0005.hs
mit
1,334
0
10
295
246
128
118
15
2
{-# Language BangPatterns #-} {-# Language GeneralizedNewtypeDeriving #-} {-# Language Rank2Types #-} module Unison.Runtime.Bits where import Data.Tuple (swap) import Data.List import Unison.Runtime.Unfold (Unfold) import qualified Unison.Runtime.Unfold as U newtype Bits = Bits { bitstream :: Unfold Bit } deriving (Eq,Ord,Show) data Bit = Zero | One | Both deriving (Eq,Ord,Show) matches :: Bit -> Bool -> Bool matches Both _ = True matches Zero False = True matches One True = True matches _ _ = False type Score = Double from01s :: [Int] -> Bits from01s bs = fromList (map f bs) where f 0 = Zero f 1 = One f i = error ("from01s: must be 0 or 1, got " ++ show i) fromList :: [Bit] -> Bits fromList bs = Bits (U.fromList bs) toList :: Bits -> [Bit] toList (Bits bs) = U.toList bs -- | Achieves maximum value of n/2 when both `zeros` and `ones` are n/2. -- As distribution is more skewed toward either bit, score approaches 0. -- Satisfies: `score n n 0 == 0`, `score n 0 n == 0`, `score n 0 0 == 0`. -- There is a linear penalty if zeros + ones < n. So `score 10 4 4` will -- be less than `score 10 5 5`. score :: Double -> Double -> Double -> Score score n zeros ones = let p0 = zeros / n; p1 = ones / n in p0 * (n - zeros) + p1 * (n - ones) bitCounts' :: (Double -> Double -> Bool) -> [Bit] -> (Double,Double) bitCounts' halt bs = go 0 0 bs where go !zeros !ones [] = (zeros, ones) go !zeros !ones (b:bs) | halt zeros ones = (zeros, ones) | otherwise = case b of Zero -> go (zeros + 1) ones bs One -> go zeros (ones + 1) bs Both -> go (zeros + 1) (ones + 1) bs mostSignificantBit :: [Bits] -> Maybe (Int, Score) mostSignificantBit bs = go (Nothing,0) (U.columns (map bitstream bs)) where n = fromIntegral (length bs) lengthGT xs n = not (null (dropWhile (\(_,m) -> m <= n) (xs `zip` [1..]))) value = maybe 0 snd rem z o = (n-z) `min` (n-o) maxPossible z o = score n (z `max` o) (m + ((n/2 - m) `min` rem z o)) where m = z `min` o stop best z o | z `max` o > n/2 = maxPossible z o <= best stop _ _ _ = False go (!best,!_) [] = best go (!best,!i) (bs:tl) | not (lengthGT bs (value best * 2)) = best | otherwise = case bitCounts' (stop (value best)) bs of (z,o) -> go (if s > value best then Just (i, s) else best, i + 1) tl where s = score n z o bitCounts :: [Bits] -> [(Int,Int)] bitCounts bs = sums (map bitstream bs) where sumCol = foldl' step (0,0) where step (z,o) b = case b of Zero -> (z+1,o); One -> (z,o+1); Both -> (z+1,o+1) sums [] = [] sums bs = let (col, bs') = unzip [ (b, tl) | Just (b, tl) <- map U.uncons bs ] in (if null bs' then [] else sumCol col : sums bs') mostSignificantBits :: [Bits] -> [(Int,Score)] mostSignificantBits bs = go (map rank $ bitCounts bs) where rank = let n = fromIntegral (length bs) in \(zeros, ones) -> score n (fromIntegral zeros) (fromIntegral ones) go ranks = map swap $ sortBy (flip compare) (ranks `zip` [0..]) sample :: [Bits] sample = [ from01s[1,0] , from01s[1,0] , from01s[1,1,0,1] , from01s[1,1,1,1] , from01s[1,1,0,1] , from01s[1,1,0,1,1] , from01s[1,1,1,1] , from01s[1,1,0,1] , from01s[1,1,1,1] ] sampleMsb :: Maybe (Int,Score) sampleMsb = mostSignificantBit sample
nightscape/platform
node/src/Unison/Runtime/Bits.hs
mit
3,275
0
16
765
1,594
861
733
80
5
{-# LANGUAGE MultiParamTypeClasses #-} -- module module RCL.Error where -- imports import Control.Monad.Error -- exported functions withError :: MonadError a m => Either a b -> m b withError = either throwError return testError :: MonadError e m => m a -> m Bool testError e = (e >> return False) `catchError` const (return True)
nicuveo/RCL
src/RCL/Error.hs
mit
353
0
8
78
109
57
52
7
1
{-# LANGUAGE OverloadedStrings #-} module DarkSky.Response.DataBlock where import DarkSky.Response.Icon import DarkSky.Response.DataPoint (DataPoint) import Data.Aeson import Data.Text (Text) data DataBlock = DataBlock { data' :: [DataPoint] , summary :: Maybe Text , icon :: Maybe Icon } deriving (Eq, Show) instance FromJSON DataBlock where parseJSON = withObject "datablock" $ \o -> do data'' <- o .: "data" summary' <- o .:? "summary" icon' <- o .:? "icon" return DataBlock { data' = data'' , summary = summary' , icon = icon' } emptyDataBlock :: DataBlock emptyDataBlock = DataBlock { data' = [] , summary = Nothing , icon = Nothing }
peterstuart/dark-sky
src/DarkSky/Response/DataBlock.hs
mit
737
0
12
197
203
116
87
29
1
import Chorale.Test.Common as ChoraleTestCommon import Test.Framework main :: IO () main = defaultMainWithArgs testsToRun ["--maximum-generated-tests=1000"] testsToRun :: [Test] testsToRun = ChoraleTestCommon.tests
mocnik-science/chorale
tests/Test.hs
mit
218
0
6
24
52
30
22
6
1
-- 54 - 60 -- https://wiki.haskell.org/99_questions/54A_to_60 module NinetyNine.P5X where import Data.List (findIndex, genericIndex) import Data.Maybe (fromJust) data BTree a = Empty | Branch a (BTree a) (BTree a) deriving (Eq, Ord, Show) {- 54A. Check whether a given term represents a binary tree. In Prolog or Lisp, one writes a predicate to do this. Example in Lisp: * (istree (a (b nil nil) nil)) T * (istree (a (b nil nil))) NIL Haskell's type system ensures that all terms of type Tree a are binary trees: it is just not possible to construct an invalid tree with this type. Hence, it is redundant to introduce a predicate to check this property: it would always return True. -} {- 55. Construct completely balanced binary trees. In a completely balanced binary tree, the following property holds for every node: The number of nodes in its left subtree and the number of nodes in its right subtree are almost equal, which means their difference is not greater than one. Write a function cbal-tree to construct completely balanced binary trees for a given number of nodes. The predicate should generate all solutions via backtracking. Put the letter 'x' as information into all nodes of the tree. Example: * cbal-tree(4,T). T = t(x, t(x, nil, nil), t(x, nil, t(x, nil, nil))) ; T = t(x, t(x, nil, nil), t(x, t(x, nil, nil), nil)) ; etc......No Example in Haskell, whitespace and "comment diagrams" added for clarity and exposition: *Main> cbalTrees 4 [ -- permutation 1 -- x -- / \ -- x x -- \ -- x Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' Empty (Branch 'x' Empty Empty)) , -- permutation 2 -- x -- / \ -- x x -- / -- x Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' (Branch 'x' Empty Empty) Empty) , -- permutation 3 -- x -- / \ -- x x -- \ -- x Branch 'x' (Branch 'x' Empty (Branch 'x' Empty Empty)) (Branch 'x' Empty Empty) , -- permutation 4 -- x -- / \ -- x x -- / -- x Branch 'x' (Branch 'x' (Branch 'x' Empty Empty) Empty) (Branch 'x' Empty Empty) ] -} cbalTrees :: Integral n => a -> n -> [BTree a] cbalTrees _ 0 = [Empty] cbalTrees x n = f =<< [d .. d + m] where (d, m) = divMod (pred n) 2 f i = Branch x <$> cbalTrees x i <*> cbalTrees x (pred n - i) {- 56. Symmetric binary trees. Let us call a binary tree symmetric if you can draw a vertical line through the root node and then the right subtree is the mirror image of the left subtree. Write a predicate symmetric/1 to check whether a given binary tree is symmetric. Hint: Write a predicate mirror/2 first to check whether one tree is the mirror image of another. We are only interested in the structure, not in the contents of the nodes. Example in Haskell: *Main> symmetric (Branch 'x' (Branch 'x' Empty Empty) Empty) False *Main> symmetric (Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' Empty Empty)) True -} symmetric :: BTree a -> Bool symmetric t = h t t where h Empty Empty = True h (Branch _ lx rx) (Branch _ ly ry) = h lx ry && h rx ly h _ _ = False {- 57. Binary search trees (dictionaries). Use the predicate add/3, developed in chapter 4 of the course, to write a predicate to construct a binary search tree from a list of integer numbers. Example: * construct([3,2,5,7,1],T). T = t(3, t(2, t(1, nil, nil), nil), t(5, nil, t(7, nil, nil))) Then use this predicate to test the solution of the problem P56. Example: * test-symmetric([5,3,18,1,4,12,21]). Yes * test-symmetric([3,2,5,7,4]). No Example in Haskell: *Main> construct [3, 2, 5, 7, 1] Branch 3 (Branch 2 (Branch 1 Empty Empty) Empty) (Branch 5 Empty (Branch 7 Empty Empty)) *Main> symmetric . construct $ [5, 3, 18, 1, 4, 12, 21] True *Main> symmetric . construct $ [3, 2, 5, 7, 1] True -} construct :: Ord a => [a] -> BTree a construct = foldl (flip add) Empty where add x Empty = Branch x Empty Empty add x (Branch y l r) | x < y = Branch y (add x l) r add x (Branch y l r) | x > y = Branch y l (add x r) add _ t = t {- 58. Generate-and-test paradigm. Apply the generate-and-test paradigm to construct all symmetric, completely balanced binary trees with a given number of nodes. Example: * sym-cbal-trees(5,Ts). Ts = [t(x, t(x, nil, t(x, nil, nil)), t(x, t(x, nil, nil), nil)), t(x, t(x, t(x, nil, nil), nil), t(x, nil, t(x, nil, nil)))] Example in Haskell: *Main> symCbalTrees 5 [ Branch 'x' (Branch 'x' Empty (Branch 'x' Empty Empty)) (Branch 'x' (Branch 'x' Empty Empty) Empty) , Branch 'x' (Branch 'x' (Branch 'x' Empty Empty) Empty) (Branch 'x' Empty (Branch 'x' Empty Empty)) ] -} symCbalTrees :: Integral n => a -> n -> [BTree a] symCbalTrees x = filter symmetric . cbalTrees x {- 59. Construct height-balanced binary trees. In a height-balanced binary tree, the following property holds for every node: The height of its left subtree and the height of its right subtree are almost equal, which means their difference is not greater than one. Construct a list of all height-balanced binary trees with the given element and the given maximum height. Example: ?- hbal_tree(3,T). T = t(x, t(x, t(x, nil, nil), t(x, nil, nil)), t(x, t(x, nil, nil), t(x, nil, nil))) ; T = t(x, t(x, t(x, nil, nil), t(x, nil, nil)), t(x, t(x, nil, nil), nil)) ; etc......No Example in Haskell: *Main> take 4 $ hbalTreesH 'x' 3 [ Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' Empty (Branch 'x' Empty Empty)) , Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' (Branch 'x' Empty Empty) Empty) , Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' Empty Empty)) , Branch 'x' (Branch 'x' Empty (Branch 'x' Empty Empty)) (Branch 'x' Empty Empty) ] -} hbalTreesH :: Integral n => a -> n -> [BTree a] hbalTreesH _ 0 = [Empty] hbalTreesH x 1 = [Branch x Empty Empty] hbalTreesH x h = f =<< lrhs where lrhs = [(pred . pred, pred), (pred, pred), (pred, pred . pred)] f (lh, rh) = Branch x <$> hbalTreesH x (lh h) <*> hbalTreesH x (rh h) hbalTreesH' :: Integral n => a -> n -> [BTree a] hbalTreesH' x = genericIndex tss where tss = [Empty] : [Branch x Empty Empty] : zipWith h tss (tail tss) h xs ys = f =<< [(xs, ys), (ys, ys), (ys, xs)] f (ls, rs) = Branch x <$> ls <*> rs {- 60. Construct height-balanced binary trees with a given number of nodes. Consider a height-balanced binary tree of height H. What is the maximum number of nodes it can contain? Clearly, MaxN = 2**H - 1. However, what is the minimum number MinN? This question is more difficult. Try to find a recursive statement and turn it into a function minNodes that returns the minimum number of nodes in a height-balanced binary tree of height H. On the other hand, we might ask: what is the maximum height H a height-balanced binary tree with N nodes can have? Write a function maxHeight that computes this. Now, we can attack the main problem: construct all the height-balanced binary trees with a given number of nodes. Find out how many height-balanced trees exist for N = 15. Example in Prolog: ?- count_hbal_trees(15,C). C = 1553 Example in Haskell: *Main> length $ hbalTrees 'x' 15 1553 *Main> map (hbalTrees 'x') [0..3] [ [ Empty ] , [ Branch 'x' Empty Empty ] , [ Branch 'x' Empty (Branch 'x' Empty Empty) , Branch 'x' (Branch 'x' Empty Empty) Empty ] , [ Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' Empty Empty) ] ] -} maxNodesByHeight :: Integral n => n -> n maxNodesByHeight = pred . (2 ^) minNodesByHeight :: Integral n => n -> n minNodesByHeight = genericIndex minNodesSequence maxHeightByNodes :: Integral n => n -> n maxHeightByNodes = fromIntegral . pred . fromJust . flip findIndex minNodesSequence . (<) minHeightByNodes :: Integral n => n -> n minHeightByNodes = ceiling . logBase 2 . fromIntegral . succ minNodesSequence :: Integral n => [n] minNodesSequence = fromIntegral <$> ns where ns = 0 : 1 : zipWith ((+) . succ) ns (tail ns) :: [Integer] countNodes :: Integral n => BTree a -> n countNodes Empty = 0 countNodes (Branch _ l r) = succ $ countNodes l + countNodes r hbalTrees :: Integral n => a -> n -> [BTree a] hbalTrees x n = [t | h <- [minHeightByNodes n .. maxHeightByNodes n], t <- hbalTreesH x h, countNodes t == n]
airt/Haskell-99
src/NinetyNine/P5X.hs
mit
8,529
0
11
2,036
1,149
593
556
50
4
-------------------------------------------------------------------------------- {-# LANGUAGE OverloadedStrings, TupleSections, LambdaCase #-} module PrevNextPost where import Control.Applicative (Alternative (..)) import Data.Char import Data.Maybe import Data.Monoid import qualified Data.Set as S import Hakyll import Text.Pandoc.Options import System.FilePath (takeBaseName, takeFileName, takeDirectory, joinPath, splitPath, replaceExtension) import Control.Lens hiding (Context) import Control.Monad import Data.List import qualified Data.Map as M import qualified Data.MultiMap as MM import Text.Printf --import qualified Data.Tree as T import Debug.Trace import Utilities import HakyllUtils import Data.Time.Format (parseTime, defaultTimeLocale) -- import System.Locale (defaultTimeLocale) import Data.Time.Clock (UTCTime) prevNextContext :: Pattern -> Context String prevNextContext postsGlob = field "nextPost" (nextPostUrl postsGlob) <> field "prevPost" (previousPostUrl postsGlob) previousPostUrl :: Pattern -> Item String -> Compiler String previousPostUrl postsGlob post = do let ident = itemIdentifier post posts <- getMatches postsGlob dates <- mapM (getItemUTC defaultTimeLocale) posts let sorted = sort $ zip dates posts (_, ordPosts) = unzip sorted let ident' = itemBefore ordPosts ident case ident' of Just i -> (fmap (maybe empty $ toUrl) . getRoute) i Nothing -> empty nextPostUrl :: Pattern -> Item String -> Compiler String nextPostUrl postsGlob post = do let ident = itemIdentifier post posts <- getMatches postsGlob dates <- mapM (getItemUTC defaultTimeLocale) posts let sorted = sort $ zip dates posts (_, ordPosts) = unzip sorted let ident' = itemAfter ordPosts ident case ident' of Just i -> (fmap (maybe empty $ toUrl) . getRoute) i Nothing -> empty itemAfter' :: Eq a => [(a,b)] -> a -> Maybe b itemAfter' xys x = do let (xs, ys) = unzip xys x' <- lookup x $ zip xs (tail xs) lookup x' $ zip xs ys itemAfter :: Eq a => [a] -> a -> Maybe a itemAfter xs x = lookup x $ zip xs (tail xs) itemBefore' :: Eq a => [(a,b)] -> a -> Maybe b itemBefore' xys x = do let (xs, ys) = unzip xys x' <- lookup x $ zip (tail xs) xs lookup x' $ zip xs ys itemBefore :: Eq a => [a] -> a -> Maybe a itemBefore xs x = lookup x $ zip (tail xs) xs urlOfPost :: Item String -> Compiler String urlOfPost = fmap (maybe empty $ toUrl) . getRoute . itemIdentifier
holdenlee/philosophocle
src/PrevNextPost.hs
mit
2,716
0
16
715
876
442
434
65
2
module Api.Controllers.User ( authenticate , create , unverifiedEdit , verifyEdit ) where import Api.Types.Fields (UserToken (..)) import Api.Types.Server (ApiActionM, ApiException (..), mailer) import Control.Monad.Reader (asks, lift) import Control.Applicative ((<$>), (<|>)) import Control.Monad.IO.Class (liftIO) import Control.Monad.Trans.Maybe (MaybeT (..), runMaybeT) import Web.Scotty.Trans (header, json, raise) import qualified Api.Mailers.Verify as Verify import qualified Api.Mappers.Resource as Resource import qualified Api.Mappers.PendingUserResource as Pending import qualified Api.Mappers.User as User import qualified Data.Text.Lazy as LT import qualified Data.Text as ST import Api.Helpers.Controller import Api.Types.Resource import Api.Types.PendingUserResource import Api.Types.User authenticate :: ApiActionM s User authenticate = do foundUser <- loginFromHeader case foundUser of Just user -> return user _ -> raise UnauthorizedUser -- only used to register a new device create :: ApiActionM s () create = reqQuery User.insert >>= json -- only used after registration when a user needs to initially connect to a -- contact unverifiedEdit :: User -> ApiActionM s () unverifiedEdit _ = do sendEmail <- lift $ asks mailer fields <- fromParams pending <- reqQuery $ Pending.insert fields liftIO . sendEmail $ Verify.mkEmail pending json ("ok" :: ST.Text) verifyEdit :: ApiActionM s () verifyEdit = do uuid <- reqParam "uuid" user <- reqQuery $ runMaybeT $ do pending <- MaybeT $ Pending.findByUuid uuid _ <- MaybeT $ Just <$> Pending.delete (pend_id pending) resource <- MaybeT (Resource.findByEmail $ email pending) <|> MaybeT (Resource.insert . fromEmail $ email pending) MaybeT . User.update $ User (uid pending) (Just $ res_id resource) json user where uid = pend_userId . pend_fields email = pend_resourceEmail . pend_fields -- private functions loginFromHeader :: ApiActionM s (Maybe User) loginFromHeader = do authToken <- header "Authorization" case LT.words <$> authToken of Just ["Token", token] -> do uid <- reqParam "user_id" query $ User.findByLogin . Login uid . UserToken $ LT.toStrict token _ -> raise MissingAuthToken
bendyworks/api-server
lib/Api/Controllers/User.hs
mit
2,392
0
17
530
680
369
311
57
2
{-# LANGUAGE OverloadedStrings #-} import Control.Monad (foldM) import Test.Hspec (Spec, describe, it, shouldBe) import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith) import Forth (ForthError(..), emptyState, evalText, toList) main :: IO () main = hspecWith defaultConfig {configFastFail = True} specs specs :: Spec specs = do let runTexts = fmap toList . foldM (flip evalText) emptyState describe "parsing and numbers" $ it "numbers just get pushed onto the stack" $ runTexts ["1 2 3 4 5"] `shouldBe` Right [1, 2, 3, 4, 5] describe "addition" $ do it "can add two numbers" $ runTexts ["1 2 +"] `shouldBe` Right [3] it "errors if there is nothing on the stack" $ runTexts ["+"] `shouldBe` Left StackUnderflow it "errors if there is only one value on the stack" $ runTexts ["1 +"] `shouldBe` Left StackUnderflow describe "subtraction" $ do it "can subtract two numbers" $ runTexts ["3 4 -"] `shouldBe` Right [-1] it "errors if there is nothing on the stack" $ runTexts ["-"] `shouldBe` Left StackUnderflow it "errors if there is only one value on the stack" $ runTexts ["1 -"] `shouldBe` Left StackUnderflow describe "multiplication" $ do it "can multiply two numbers" $ runTexts ["2 4 *"] `shouldBe` Right [8] it "errors if there is nothing on the stack" $ runTexts ["*"] `shouldBe` Left StackUnderflow it "errors if there is only one value on the stack" $ runTexts ["1 *"] `shouldBe` Left StackUnderflow describe "division" $ do it "can divide two numbers" $ runTexts ["12 3 /"] `shouldBe` Right [4] it "performs integer division" $ runTexts ["8 3 /"] `shouldBe` Right [2] it "errors if dividing by zero" $ runTexts ["4 0 /"] `shouldBe` Left DivisionByZero it "errors if there is nothing on the stack" $ runTexts ["/"] `shouldBe` Left StackUnderflow it "errors if there is only one value on the stack" $ runTexts ["1 /"] `shouldBe` Left StackUnderflow describe "combined arithmetic" $ do it "addition and subtraction" $ runTexts ["1 2 + 4 -"] `shouldBe` Right [-1] it "multiplication and division" $ runTexts ["2 4 * 3 /"] `shouldBe` Right [2] describe "dup" $ do it "copies a value on the stack" $ runTexts ["1 dup" ] `shouldBe` Right [1, 1] it "copies the top value on the stack" $ runTexts ["1 2 dup"] `shouldBe` Right [1, 2, 2] it "errors if there is nothing on the stack" $ runTexts ["dup" ] `shouldBe` Left StackUnderflow describe "drop" $ do it "removes the top value on the stack if it is the only one" $ runTexts ["1 drop" ] `shouldBe` Right [] it "removes the top value on the stack if it is not the only one" $ runTexts ["1 2 drop"] `shouldBe` Right [1] it "errors if there is nothing on the stack" $ runTexts ["drop" ] `shouldBe` Left StackUnderflow describe "swap" $ do it "swaps the top two values on the stack if they are the only ones" $ runTexts ["1 2 swap" ] `shouldBe` Right [2, 1] it "swaps the top two values on the stack if they are not the only ones" $ runTexts ["1 2 3 swap"] `shouldBe` Right [1, 3, 2] it "errors if there is nothing on the stack" $ runTexts ["swap" ] `shouldBe` Left StackUnderflow it "errors if there is only one value on the stack" $ runTexts ["1 swap" ] `shouldBe` Left StackUnderflow describe "over" $ do it "copies the second element if there are only two" $ runTexts ["1 2 over" ] `shouldBe` Right [1, 2, 1] it "copies the second element if there are more than two" $ runTexts ["1 2 3 over"] `shouldBe` Right [1, 2, 3, 2] it "errors if there is nothing on the stack" $ runTexts ["over" ] `shouldBe` Left StackUnderflow it "errors if there is only one value on the stack" $ runTexts ["1 over" ] `shouldBe` Left StackUnderflow describe "user-defined words" $ do it "can consist of built-in words" $ runTexts [ ": dup-twice dup dup ;" , "1 dup-twice" ] `shouldBe` Right [1, 1, 1] it "execute in the right order" $ runTexts [ ": countup 1 2 3 ;" , "countup" ] `shouldBe` Right [1, 2, 3] it "can override other user-defined words" $ runTexts [ ": foo dup ;" , ": foo dup dup ;" , "1 foo" ] `shouldBe` Right [1, 1, 1] it "can override built-in words" $ runTexts [ ": swap dup ;" , "1 swap" ] `shouldBe` Right [1, 1] it "can override built-in operators" $ runTexts [ ": + * ;" , "3 4 +" ] `shouldBe` Right [12] it "can use different words with the same name" $ runTexts [ ": foo 5 ;" , ": bar foo ;" , ": foo 6 ;" , "bar foo" ] `shouldBe` Right [5, 6] it "can define word that uses word with the same name" $ runTexts [ ": foo 10 ;" , ": foo foo 1 + ;" , "foo" ] `shouldBe` Right [11] it "cannot redefine numbers" $ runTexts [": 1 2 ;"] `shouldBe` Left InvalidWord it "errors if executing a non-existent word" $ runTexts ["1 foo"] `shouldBe` Left (UnknownWord "foo") describe "case-insensitivity" $ do it "DUP is case-insensitive" $ runTexts ["1 DUP Dup dup" ] `shouldBe` Right [1, 1, 1, 1] it "DROP is case-insensitive" $ runTexts ["1 2 3 4 DROP Drop drop"] `shouldBe` Right [1] it "SWAP is case-insensitive" $ runTexts ["1 2 SWAP 3 Swap 4 swap"] `shouldBe` Right [2, 3, 4, 1] it "OVER is case-insensitive" $ runTexts ["1 2 OVER Over over" ] `shouldBe` Right [1, 2, 1, 2, 1] it "user-defined words are case-insensitive" $ runTexts [ ": foo dup ;" , "1 FOO Foo foo" ] `shouldBe` Right [1, 1, 1, 1] it "definitions are case-insensitive" $ runTexts [ ": SWAP DUP Dup dup ;" , "1 swap" ] `shouldBe` Right [1, 1, 1, 1] -- ab8d473c39114365fb88f8406ea7a1783f0a40f4
exercism/xhaskell
exercises/practice/forth/test/Tests.hs
mit
6,377
1
13
2,029
1,663
844
819
127
1
{-# LANGUAGE DataKinds, FlexibleInstances, MultiParamTypeClasses, OverloadedStrings, ScopedTypeVariables, TypeOperators #-} module Hevents.Eff.Demo where -- * Imports, stuff to make the compiler happy import Control.Category import Control.Concurrent.Async import Control.Concurrent.STM import Control.Exception (finally, throwIO) import Control.Monad.Except import qualified Control.Monad.State as ST import Control.Monad.Trans.Either import qualified Data.ByteString.Builder as BS import Data.Either (rights) import Data.Proxy import Data.Serialize (Serialize, get, put) import Data.Typeable import Data.Void import Hevents.Eff as W import Network.HTTP.Client (Manager, defaultManagerSettings, newManager) import Prelude hiding (init, (.)) import Servant import Servant.Client import System.Environment import Test.Hspec import Test.QuickCheck as Q import Test.QuickCheck.Monadic as Q -- * Let's start writing a test... aCounter :: Spec aCounter = describe "Counter Model" $ do it "should apply events from commands given they respect bounds" $ property $ prop_shouldApplyCommandRespectingBounds it "should not apply commands over bounds" $ property $ prop_shouldNotApplyCommandsOverBounds prop_shouldApplyCommandRespectingBounds :: Command Counter -> Bool prop_shouldApplyCommandRespectingBounds c@(Increment n) = let OK result = init `act` c in init `apply` result == Counter n prop_shouldApplyCommandRespectingBounds c@(Decrement n) = let counter20 = Counter 20 OK result = counter20 `act` c in counter20 `apply` result == Counter (20 - n) prop_shouldNotApplyCommandsOverBounds :: [ Command Counter ] -> Bool prop_shouldNotApplyCommandsOverBounds commands = let finalCounter = counter $ ST.execState (mapM updateModel commands) init in finalCounter >= 0 && finalCounter <= 100 newtype Counter = Counter { counter :: Int } deriving (Eq,Show) data CCounter = Increment Int | Decrement Int deriving (Eq, Show) data ECounter = Added Int deriving (Eq,Show) data ErCounter = OutOfBounds deriving (Eq,Show) type instance Command Counter = CCounter type instance Event Counter = ECounter type instance Error Counter = ErCounter instance Model Counter where init = Counter 0 Counter k `act` Increment n = if k + n <= 100 then OK $ Added n else KO OutOfBounds Counter k `act` Decrement n = if k - n >= 0 then OK $ Added (-n) else KO OutOfBounds Counter k `apply` Added n = Counter $ k + n instance Arbitrary CCounter where arbitrary = oneof [ Increment <$> choose (0,20) , Decrement <$> choose (0,20) ] -- * We now have a fully functional event-sourced bounded counter *Model* -- let's expose some services that end users could access... -- -- First write tests representing services interactions data CounterAction = GetCounter | IncCounter Int | DecCounter Int deriving (Show) instance Arbitrary CounterAction where -- we use frequency to represent some expected (or observed) behaviour -- our users' behaviour model could be much more complex... arbitrary = frequency [ (3, return GetCounter) , (2, IncCounter <$> choose (0,10)) , (1, DecCounter <$> choose (0,10)) ] prop_servicesRespectCounterBounds :: [ CounterAction ] -> Property prop_servicesRespectCounterBounds actions = Q.monadicIO $ do results <- Q.run $ do (model, storage) <- prepareContext mapM (effect storage model . interpret) actions assert $ all (\c -> c >= 0 && c <= 100) (rights results) -- this is where we define the initial state of our services and model prepareContext = (,) <$> newTVarIO (W.init :: Counter) <*> atomically W.makeMemoryStore -- defines how to interpret our action model in terms of actual services type EventSourced m a = Eff (State m :> Store :> Exc ServantErr :> Lift STM :> Void) a interpret GetCounter = getCounter interpret (IncCounter n) = increment n interpret (DecCounter n) = decrement n getCounter :: EventSourced Counter Int getCounter = counter <$> getState increment :: Int -> EventSourced Counter Int increment n = applyCommand (Increment n) >>= storeEvent decrement :: Int -> EventSourced Counter Int decrement n = applyCommand (Decrement n) >>= storeEvent storeEvent :: Either ErCounter ECounter -> EventSourced Counter Int storeEvent = either (throwExc . fromModelError) (either (throwExc . fromDBError) (const $ counter <$> getState) <=< store) where fromModelError e = err400 { errBody = BS.toLazyByteString $ BS.stringUtf8 $ "Invalid command " ++ show e } fromDBError e = err500 { errBody = BS.toLazyByteString $ BS.stringUtf8 $ "DB Error " ++ show e } instance Serialize ECounter where put (Added i) = put i get = Added <$> get instance Versionable ECounter -- * Expose our counter services through a REST API type CounterApi = "counter" :> (Get '[JSON] Int :<|> "increment" :> Capture "inc" Int :> Get '[JSON] Int :<|> "decrement" :> Capture "dec" Int :> Get '[JSON] Int) counterApi :: Proxy CounterApi counterApi = Proxy -- * Let's write a test for our API against actual services, using user-centric actions prop_counterServerImplementsCounterApi :: [ CounterAction ] -> Property prop_counterServerImplementsCounterApi actions = Q.monadicIO $ do let baseUrl = BaseUrl Http "localhost" 8082 "" results <- Q.run $ do mgr <- newManager defaultManagerSettings (model, storage) <- prepareContext server <- W.runWebServerErr 8082 counterApi (Nat $ ExceptT . effect storage model) handler mapM (runClient mgr baseUrl) actions `finally` cancel server assert $ all (\c -> c >= 0 && c <= 100) results runClient m b GetCounter = either throwIO return =<< runExceptT (counterState m b) runClient m b (IncCounter n) = either throwIO return =<< runExceptT (incCounter n m b) runClient m b (DecCounter n) = either throwIO return =<< runExceptT (decCounter n m b) counterState :<|> incCounter :<|> decCounter = client counterApi handler = getCounter :<|> increment :<|> decrement -- * Main server main :: IO () main = do [port] <- getArgs (model, storage) <- prepareContext W.runWebServerErr (Prelude.read port) counterApi (Nat $ ExceptT . effect storage model) handler >>= wait
abailly/hevents
test/Hevents/Eff/Demo.hs
mit
7,032
0
16
1,936
1,808
955
853
-1
-1
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE BangPatterns #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE AutoDeriveTypeable #-} {-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleContexts #-} module IHaskell.Display.Widgets.Layout.Types where import Prelude hiding (Right,Left) import Control.Monad (unless) import qualified Control.Exception as Ex import Data.List (intercalate) import Data.Vinyl (Rec(..)) import qualified IHaskell.Display.Widgets.Singletons as S import IHaskell.Display.Widgets.Types import IHaskell.Display.Widgets.Layout.Common type LayoutClass = [ 'S.ModelModule , 'S.ModelModuleVersion , 'S.ModelName , 'S.ViewModule , 'S.ViewModuleVersion , 'S.ViewName , 'S.LAlignContent , 'S.LAlignItems , 'S.LAlignSelf , 'S.LBorder , 'S.LBottom , 'S.LDisplay , 'S.LFlex , 'S.LFlexFlow , 'S.LGridArea , 'S.LGridAutoColumns , 'S.LGridAutoFlow , 'S.LGridAutoRows , 'S.LGridColumn , 'S.LGridGap , 'S.LGridRow , 'S.LGridTemplateAreas , 'S.LGridTemplateColumns , 'S.LGridTemplateRows , 'S.LHeight , 'S.LJustifyContent , 'S.LJustifyItems , 'S.LLeft , 'S.LMargin , 'S.LMaxHeight , 'S.LMaxWidth , 'S.LMinHeight , 'S.LMinWidth , 'S.LOrder , 'S.LOverflow , 'S.LOverflowX , 'S.LOverflowY , 'S.LPadding , 'S.LRight , 'S.LTop , 'S.LVisibility , 'S.LWidth ] type instance FieldType 'S.LAlignContent = Maybe String type instance FieldType 'S.LAlignItems = Maybe String type instance FieldType 'S.LAlignSelf = Maybe String type instance FieldType 'S.LBorder = Maybe String type instance FieldType 'S.LBottom = Maybe String type instance FieldType 'S.LDisplay = Maybe String type instance FieldType 'S.LFlex = Maybe String type instance FieldType 'S.LFlexFlow = Maybe String type instance FieldType 'S.LGridArea = Maybe String type instance FieldType 'S.LGridAutoColumns = Maybe String type instance FieldType 'S.LGridAutoFlow = Maybe String type instance FieldType 'S.LGridAutoRows = Maybe String type instance FieldType 'S.LGridColumn = Maybe String type instance FieldType 'S.LGridGap = Maybe String type instance FieldType 'S.LGridRow = Maybe String type instance FieldType 'S.LGridTemplateAreas = Maybe String type instance FieldType 'S.LGridTemplateColumns = Maybe String type instance FieldType 'S.LGridTemplateRows = Maybe String type instance FieldType 'S.LHeight = Maybe String type instance FieldType 'S.LJustifyContent = Maybe String type instance FieldType 'S.LJustifyItems = Maybe String type instance FieldType 'S.LLeft = Maybe String type instance FieldType 'S.LMargin = Maybe String type instance FieldType 'S.LMaxHeight = Maybe String type instance FieldType 'S.LMaxWidth = Maybe String type instance FieldType 'S.LMinHeight = Maybe String type instance FieldType 'S.LMinWidth = Maybe String type instance FieldType 'S.LOrder = Maybe String type instance FieldType 'S.LOverflow = Maybe String type instance FieldType 'S.LOverflowX = Maybe String type instance FieldType 'S.LOverflowY = Maybe String type instance FieldType 'S.LPadding = Maybe String type instance FieldType 'S.LRight = Maybe String type instance FieldType 'S.LTop = Maybe String type instance FieldType 'S.LVisibility = Maybe String type instance FieldType 'S.LWidth = Maybe String -- type family WidgetFields (w :: WidgetType) :: [Field] where type instance WidgetFields 'LayoutType = LayoutClass -- | A record representing a widget of the Layour class from IPython defaultLayoutWidget :: Rec Attr LayoutClass defaultLayoutWidget = (S.SModelModule =:! "@jupyter-widgets/base") :& (S.SModelModuleVersion =:! "1.1.0") :& (S.SModelName =:! "LayoutModel") :& (S.SViewModule =:! "@jupyter-widgets/base") :& (S.SViewModuleVersion =:! "1.1.0") :& (S.SViewName =:! "LayoutView") :& (AlignContent =:. (Nothing, venum alignContentProps)) :& (AlignItems =:. (Nothing, venum alignItemProps)) :& (AlignSelf =:. (Nothing, venum alignSelfProps)) :& (Border =:: Nothing) :& (Bottom =:: Nothing) :& (Display =:: Nothing) :& (Flex =:: Nothing) :& (FlexFlow =:: Nothing) :& (GridArea =:: Nothing) :& (GridAutoColumns =:: Nothing) :& (GridAutoFlow =:. (Nothing, venum gridAutoFlowProps)) :& (GridAutoRows =:: Nothing) :& (GridColumn =:: Nothing) :& (GridGap =:: Nothing) :& (GridRow =:: Nothing) :& (GridTemplateAreas =:: Nothing) :& (GridTemplateColumns =:: Nothing) :& (GridTemplateRows =:: Nothing) :& (Height =:: Nothing) :& (JustifyContent =:: Nothing) :& (JustifyItems =:: Nothing) :& (Left =:: Nothing) :& (Margin =:: Nothing) :& (MaxHeight =:: Nothing) :& (MaxWidth =:: Nothing) :& (MinHeight =:: Nothing) :& (MinWidth =:: Nothing) :& (Order =:: Nothing) :& (Overflow =:. (Nothing, venum overflowProps)) :& (OverflowX =:. (Nothing, venum overflowProps)) :& (OverflowY =:. (Nothing, venum overflowProps)) :& (Padding =:: Nothing) :& (Right =:: Nothing) :& (Top =:: Nothing) :& (Visibility =:. (Nothing, venum visibilityProps)) :& (Width =:: Nothing) :& RNil where venum :: [String] -> Maybe String -> IO (Maybe String) venum _ Nothing = return Nothing venum xs (Just f) = do unless (f `elem` xs) (Ex.throw $ Ex.AssertionFailed ("The value should be one of: " ++ intercalate ", " xs)) return $ Just f
gibiansky/IHaskell
ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Layout/Types.hs
mit
7,299
0
49
2,550
1,705
945
760
155
2
module Wyas.Types ( LispVal(..) , LispError(..) , ThrowsError , trapError , extractValue ) where import Control.Monad.Except import Text.ParserCombinators.Parsec (ParseError) data LispVal = Atom String | Bool Bool | Character Char | DottedList [LispVal] LispVal | List [LispVal] | Number Integer | String String instance Show LispVal where show = showVal showVal :: LispVal -> String showVal (Atom val) = val showVal (Bool True) = "#t" showVal (Bool False) = "#f" showVal (Character val) = show val showVal (Number val) = show val showVal (DottedList xs val) = concat ["(", unwordsShowVal xs, " . ", showVal val, ")"] showVal (List xs) = concat ["(", unwordsShowVal xs, ")"] showVal (String val) = concat ["\"", val, "\""] unwordsShowVal :: [LispVal] -> String unwordsShowVal = unwords . map showVal data LispError = NumArgs Integer [LispVal] | TypeMismatch String LispVal | Parser ParseError | BadSpecialForm String LispVal | NotFunction String String | UnboundVar String String | Default String instance Show LispError where show = showError showError :: LispError -> String showError (UnboundVar message varName) = concat [message, ": ", varName] showError (BadSpecialForm message form) = concat [message, ": ", show form] showError (NotFunction message func) = concat [message, ": ", func] showError (NumArgs expected found) = concat ["Expected: ", show expected, " args; found values ", unwordsShowVal found] showError (TypeMismatch expected found) = concat ["Invalid type: expected ", expected, " found, ", show found] showError (Parser parseErr) = "Parse error at " ++ show parseErr showError (Default message) = message type ThrowsError = Either LispError trapError :: ThrowsError String -> ThrowsError String trapError action = catchError action (return . show) extractValue :: ThrowsError a -> a extractValue (Right val) = val
saclark/wyas
src/Wyas/Types.hs
mit
2,120
0
7
559
649
349
300
52
1
{-# LANGUAGE ScopedTypeVariables #-} module Jabara.Util.MonthSpec (spec) where import Data.Time.Calendar import Jabara.Util.Month import Test.Hspec import Test.Hspec.QuickCheck (prop) import Text.Read spec :: Spec spec = do describe "read month" $ do it "readMaybe \"\"" $ do let r::Maybe Month = readMaybe "" r `shouldBe` Nothing prop "read test by QuickCheck" test_read test_read :: String -> Bool test_read s = case readMaybe s :: Maybe Month of Nothing -> True Just m -> case toGregorian $ mDay m of (_, mon, _) -> 1 <= mon && mon <= 12
jabaraster/jabara-util
test/Jabara/Util/MonthSpec.hs
mit
701
0
17
248
196
102
94
19
2
module Y2018.M07.D03.Exercise where {-- Yesterday, we translated JSON that was a mapping of String -> String to a Codex that was a mapping Vertex -> Relations where Relations was Map Vertex (Vertex, Strength) and we learned that the keys of the relations were really just indices of the arrayed (Vertex,Strength) pairing. Our PhD comfirmed this fact, and said he did it this way because, and I quote: "I'm an idiot." An honest PhD. What did I do to deserve this honor today? So, today, let's upload the codex to a graph database. I'm partial to neo4j, but you can choose any of them, even d3js or the sigma graphing library or whatever works for you. I'm easy. --} -- below imports available via 1HaskellADay git repository import Data.Relation import Graph.Query import Y2018.M06.D27.Exercise graphMeBAYBEE :: Codex -> IO () graphMeBAYBEE codex = undefined {-- graphMeBAYBEE takes the codex you created in Y2018.M06.D27 and returns (in the real world) a graph database, it does this by converting the codex to a set of Relation values. >>> honk <- readMap (exDir ++ honkin) >>> codex = mapping2Codex honk --} codex2Rels :: Codex -> [Relation Vert Arr Vert] codex2Rels codex = undefined -- of course, you neet the vert and arr types data Vert = V Vertex data Arr = E Strength -- both of which have to be defined as instances in the Relation domain instance Node Vert where asNode vert = undefined instance Edge Arr where asEdge arr = undefined -- from this we should be able the Cyph the codex. What do you get?
geophf/1HaskellADay
exercises/HAD/Y2018/M07/D03/Exercise.hs
mit
1,543
0
7
294
130
74
56
14
1
module Tools.BlankChopperSpec (main, spec) where import Test.Hspec import Tools.BlankChopper main :: IO () main = hspec spec spec :: Spec spec = do describe "chop" $ do context "breaks on spaces" $ do itChopsAsSpecSamples [ ("x y", ["x", "y"]) , ("a b", ["a", "b"]) , (" m n ", ["m", "n"]) , ("more then one", ["more", "then", "one"]) , (" blanked front", ["blanked", "front"]) , ("tails should be empty ", ["tails", "should", "be", "empty"]) , (" shrouded with void ", ["shrouded", "with", "void"]) ] context "breaks on tabs" $ do itChopsAsSpecSamples [ ("t\tt", ["t", "t"]) , ("q\t\t\ty", ["q", "y"]) , ("\ttabs\tevery\twhere\t", ["tabs", "every", "where"]) , ("\t\t\t\t\t\tmuch\ttabs\tat\tstart", ["much", "tabs", "at", "start"]) , ("only\tend\tis\ttabed\t\t\t\t", ["only", "end", "is", "tabed"]) , ("\t\t\t\tlike\tfog\twithin\ttabs\t\t\t\t", ["like", "fog", "within", "tabs"]) ] context "breaks on new lines" $ do itChopsAsSpecSamples [ ("1\n2", ["1", "2"]) , ("\n4\n3\n2\n1\n", ["4", "3", "2", "1"]) , ("\n\n\n\nmagic\n\n\nnumber\n\n\n\nseven\n\n", ["magic", "number", "seven"]) ] itChopsAsSpecSamples specs = mapM_ itChops specs itChops (input, result) = let chopHeader i r = "choped as: " ++ show r ++ ", for an input: '" ++ i ++ "'" assertChop i r = chop i `shouldBe` r in it (chopHeader input result) $ do assertChop input result
DominikJaniec/LearnHaskell
problems/calculator/test/Tools/BlankChopperSpec.hs
mit
1,578
0
16
435
496
290
206
35
1
module Handler.CacheSpec (spec) where import TestImport spec :: Spec spec = withApp $ do describe "getCacheR" $ do error "Spec not implemented: getCacheR"
swamp-agr/carbuyer-advisor
test/Handler/CacheSpec.hs
mit
171
0
11
39
44
23
21
6
1
{-# LANGUAGE TypeFamilies, KindSignatures, ConstraintKinds, ExplicitNamespaces, GADTs, TypeOperators, DataKinds, RankNTypes, AllowAmbiguousTypes, RecordWildCards #-} module ConstraintWitness.Internal ( (:~:)(..), Witness, canonicalWitness, expose, useWitness ) where import Data.Type.Equality ((:~:)(..)) import Data.HList import GHC.Prim (Constraint) -- | Magical type family that turns a constraint into a (value-level) witness. -- The rules are: -- Witness () -> () -- Witness (a ~ b) -> a :~: b -- Witness <a typeclass constraint> -> <a dictionary for that typeclass> -- Witness (?p :: a) -> a -- Witness (x, y, ..., z) -> HList '[Witness x, Witness y, .., Witness z] type family Witness (ct :: Constraint) :: * where Witness () = () -- Bogus equation so GHC doesn't choke -- class IsTypeClass (ct :: Constraint) where -- type Dict ct :: * -- -- class HasClasses (cts :: [Constraint]) where -- type Dicts cts :: [*] -- -- instance HasClasses '[] where type Dicts '[] = '[] -- -- instance {-# OVERLAPS #-} (IsTypeClass ct, HasClasses cts) => HasClasses (ct ': cts) where -- type Dicts (ct ': cts) = Dict ct ': Dicts cts -- -- instance HasClasses cts => HasClasses (ct ': cts) where type Dicts (ct ': cts) = Dicts cts -- | Tries to provide a canonical witness for the given constraint. This is: -- - () for empty constraints -- - Refl for equality constraints -- - the dictionary for typeclass constraints -- - the implicit parameter's value for ImplicitParams -- - a HList of the component constraints' canonical witnesses for conjoined constraints. -- It's implemented by CoreToCore magic, so we leave it as `undefined` here and make sure -- it won't be inlined so that we can still find it in the CoreToCore passes. canonicalWitness :: forall (ct :: Constraint). ct => Witness ct canonicalWitness = undefined -- implemented by compiler plug in {-# NOINLINE canonicalWitness #-} -- | Transforms a constraint into a witness-value argument, *exposing* it. expose :: forall (ct :: Constraint) a. (ct => a) -> (Witness ct -> a) expose thing witness = undefined -- | Alias of expose, with the arguments flipped. This is mostly useful as an argument to higher-order functions. useWitness :: forall (ct :: Constraint) a. Witness ct -> (ct => a) -> a useWitness witness thing = undefined data Isomorphism a b = Iso {appIso :: a -> b, appRevIso :: b -> a} mkIso :: (a -> b) -> (b -> a) -> Isomorphism a b mkIso fwd bwd = Iso {appIso = fwd, appRevIso = bwd} revIso :: Isomorphism a b -> Isomorphism b a revIso Iso{..} = Iso {appIso = appRevIso, appRevIso = appIso}
Solonarv/constraint-witness
plugin/ConstraintWitness/Internal.hs
mit
2,684
0
10
563
380
234
146
33
1
-- Project Euler Problem 22 - names scores -- -- Sum of letter values in names weighted by position in list -- -- import Data.String import Data.List alph = zip ['A'..'Z'] [1..26] elim_just :: Maybe a -> a elim_just (Just a) = a wordscore x = sum [ elim_just (lookup a alph) | a <- x] -- str = "\"MARY\",\"PATRICIA\",\"BOB\"" split :: (Eq c) => c -> [c] -> [[c]] split _ [] = [] -- split d (s:d:t) = s:(split d (s:d:t)) split d x = a:(if b==[] then [] else split d (tail b)) where (a,b) = span (/= d) x delfromlist :: (Eq c) => c -> [c] -> [c] delfromlist d x = [a | a<-x, a /= d] sorted_list str = (sort (split ',' (delfromlist '\"' str))) para_score :: [(Int, String)] -> Int para_score [] = 0 para_score (x:xs) = ((fst x)*(wordscore (snd x))) + (para_score xs) main = do -- print (words (replace "\"" "" (replace "," " " str))) -- print (zip [1..] sorted_list) str <- readFile "p022_names.txt" print (para_score (zip [1..] (sorted_list str)))
yunwilliamyu/programming-exercises
project_euler/p022_names_scores.hs
cc0-1.0
978
4
13
213
429
224
205
18
2
module MonsterParser (parseMonsterFile) where import Text.Parsec import Text.Parsec.String (Parser, parseFromFile) import qualified Monster as M import Rdice (Dice(ZeroDie), d) import GeneralParse import Types as T import qualified Data.Map as Map monsterFile :: Parser [M.Monster] monsterFile = junk >> monsterEntry `sepEndBy` junk junk = anyChar `manyTill` (lookAhead nameFieldStart <|> (eof >> return " ")) parseMonsterFile :: String -> IO [M.Monster] parseMonsterFile filename = do maybeMonsters <- parseFromFile monsterFile filename case maybeMonsters of Right monsters -> return monsters Left parseError -> error $ show parseError nameFieldStart :: Parser String nameFieldStart = try $ do string "N:" digits <- many1 digit if digits == "0" then parserFail "not a monster" else return digits monsterEntry :: Parser M.Monster monsterEntry = try $ do name <- nameField spaces depth <- depthField skipTillField "I" (speed,health,lightRadius) <- infoField --Field A: always comes after I: spaces will <- alertnessField skipTillField "P" (evasion,protDice) <- protectionField --If the attack field B: exists (as it does for every monster --except silent watchers), it always comes after P: spaces attacks <- option [] $ do many1 $ do notFollowedBy $ string "F:" attack <- attackField spaces return attack (critRes,resistances,hatesLight,slainBy,glows) <- flagsField anyChar `manyTill` try endOfMonsterEntry return $ M.Monster { M.name = name, M.depth = depth, M.speed = speed, M.will = will, M.evasion = evasion, M.protDice = protDice, M.attacks = attacks, M.lightRadius = lightRadius, M.hatesLight = hatesLight, M.health = health, M.glows = glows, M.seenByPlayer = True, M.resistances = resistances, M.criticalRes = critRes, M.slainBy = slainBy, M.onLitSquare = False, M.alertness = M.Alert} endOfMonsterEntry = try $ eol >> eol endOfField = many spaceBar >> eol endOfSubField = try (string ":") <|> endOfField skipTillField fid = ignoreUntil fieldStart endOfMonsterEntry >> eol where fieldStart = lookAhead $ eol >> string (fid ++ ":") lookForField fid = try $ lookAhead $ (notFollowedBy endOfMonsterEntry >> anyChar) `manyTill` fieldStart where fieldStart = try $ eol >> string (fid ++ ":") anySubField :: Parser String anySubField = anyChar `manyTill` lookAhead endOfSubField subField :: Parser a -> Parser a subField parser = try $ do contents <- parser --If necessary, skip to the beginning of the next subfield --or the end of this field anyChar `manyTill` endOfSubField return contents ignoreSubField = subField anySubField nameField :: Parser String nameField = try $ do string "N:" ignoreSubField -- serial number; ignored for now subField $ many1 (letter <|> oneOf " ,-'") --name of monster depthField :: Parser Int depthField = try $ do string "W:" parseInt --monster depth infoField :: Parser (Int, Dice, Int) infoField = try $ do string "I:" speed <- subField parseInt health <- subField diceParser ignoreSubField lightRadius <- subField parseInt return (speed,health,lightRadius) alertnessField :: Parser Int alertnessField = try $ do string "A:" count 3 ignoreSubField parseInt --Will score --The P: field is for evasion and protection dice protectionField :: Parser (Int,Dice) protectionField = do string "P:" (ev,protDice) <- parseDefenseTuple return (ev, protDice) attackField :: Parser Attack attackField = do string "B:" (brands, sharpness, canCrit, alwaysHits) <- parseAttackEffects (accuracy, damDice) <- parseAttackTuple return $ Attack {accuracy = accuracy, damage = damDice, brands = brands, slays = [], sharpness = sharpness, critThreshold = M.monsterCritThreshold damDice, canCrit = canCrit, alwaysHits = alwaysHits} parseAttackEffects :: Parser ([Element], Double, Bool, Bool) parseAttackEffects = do (alwaysHits,sharpness,canCrit) <- subField $ (try (string "CRAWL") >> return (False,0.5, True)) <|> (try (string "ENGULF") >> return (False,1.0, False)) <|> (try (string "TOUCH") >> return (False,0.0, False)) <|> (try (string "SPORE") >> return (True,0.0, False)) <|> (anySubField >> return (False,1.0,True)) brands <- option [] (try $ subField $ (try (string "FIRE") >> return [Fire]) <|> (try (string "COLD") >> return [Cold]) <|> (try (string "POISON") >> return [Poison]) <|> (try (string "DARK") >> return [Dark]) <|> --If the field starts with a '(', then this monster didn't have --an attack effect field, and this field is in fact the attackTuple --field parsed in the next step of attackField (notFollowedBy (char '(') >> anySubField >> return [])) return (brands, sharpness, canCrit, alwaysHits) flagsField :: Parser (M.MonsterCritRes, Map.Map Element Int, Bool, Maybe Slay, Bool) flagsField = do string "F:" <?> "start of flags field" flags <- flag `sepBy` try flagSep let critRes = if "NO_CRIT" `elem` flags then M.CritImmune else if "RES_CRIT" `elem` flags then M.CritResistant else M.NoCritRes resList = resistancesFromFlags flags vulnList = vulnerabilitiesFromFlags flags resMap = T.makeResistanceMap resList vulnList maybeSlainBy = slayFromFlags flags hatesLight = "HURT_LITE" `elem` flags glows = "GLOW" `elem` flags return (critRes, resMap, hatesLight, maybeSlainBy, glows) where flag = many1 (upper <|> digit <|> char '_') flagSep = try (spaces >> char '|' >> spaces >> return "|") <|> try (many spaceBar >> eol >> string "F:") resistancesFromFlags [] = [] resistancesFromFlags (f:fs) = case f of "RES_FIRE" -> Fire : resistancesFromFlags fs "RES_COLD" -> Cold : resistancesFromFlags fs "RES_POIS" -> Poison : resistancesFromFlags fs _ -> resistancesFromFlags fs vulnerabilitiesFromFlags [] = [] vulnerabilitiesFromFlags (f:fs) = case f of "HURT_FIRE" -> Fire : vulnerabilitiesFromFlags fs "HURT_COLD" -> Cold : vulnerabilitiesFromFlags fs _ -> vulnerabilitiesFromFlags fs slayFromFlags [] = Nothing slayFromFlags (f:fs) = case f of "WOLF" -> Just SlayWolves "ORC" -> Just SlayOrcs "UNDEAD" -> Just SlayUndead "RAUKO" -> Just SlayRaukar "TROLL" -> Just SlayTrolls "DRAGON" -> Just SlayDragons "SPIDER" -> Just SlaySpiders _ -> slayFromFlags fs
kryft/fsil
MonsterParser.hs
gpl-2.0
7,111
0
19
1,979
2,035
1,061
974
183
8
-- | Tables of portage specific convertations module Portage.Tables ( set_build_slot ) where import Portage.Dependency.Builder import Portage.Dependency.Types import Portage.PackageId import Data.Monoid set_build_slot :: Dependency -> Dependency set_build_slot = overAtom $ \a@(Atom pn dr (DAttr _ u)) -> case mconcat $ map First $ map (matches a) slottedPkgs of First (Just s) -> Atom pn dr (DAttr s u) First Nothing -> Atom pn dr (DAttr AnyBuildTimeSlot u) where matches (Atom pn _ _) (nm,s) | pn == nm = Just s | otherwise = Nothing slottedPkgs :: [(PackageName, SlotDepend)] slottedPkgs = [ (mkPackageName "dev-haskell" "quickcheck", GivenSlot "2=") , (mkPackageName "dev-haskell" "hdbc", GivenSlot "2=") ]
Heather/hackport
Portage/Tables.hs
gpl-3.0
784
0
12
177
261
137
124
19
2
module Util where import Data.Either import DFA import Parser fromRight = either undefined id eq s1 s2 = shortestWord $ symmetricDiff (dfa s1) (dfa s2) where dfa = toDFA . fromRight . parseRegex
TeofilC/regeq
src/Util.hs
gpl-3.0
204
0
8
42
71
38
33
7
1
{-# LANGUAGE LambdaCase #-} module HaSnip.Protocol.Ben where import Foreign.Ptr import qualified Data.ByteString as SB import qualified Data.ByteString.Lazy as LB import qualified Data.ByteString.Char8 as CB import Data.Word import Control.Concurrent import Control.Monad import Network.Socket(SockAddr(SockAddrInet, SockAddrInet6)) import Network.Socket.ByteString import Network.ENet import qualified Network.ENet.Bindings as B import qualified Network.ENet.Host as Host import qualified Network.ENet.Packet as Packet import qualified Network.ENet.Peer as Peer import Data.Word import Data.Int import Data.BitSet.Generic import Data.ByteString import HaSnip.Types -- localhost w/ default port is: SockAddrInet 32887 16777343 data NetState = NS (Ptr B.Host) (Ptr B.Peer) pre :: Word32 -> SockAddr -> IO NetState pre magic servAddr = withENetDo $ do client <- Host.create Nothing -- create a client host 1 -- only allow 1 outgoing connection 1 -- allow only 1 channel to be used, 0 0 -- unlimited bandwidth in 0 -- unlimited bandwidth out -- ENet after the first couple versions uses this Host.compressWithRangeCoder client host <- Host.connect client -- us servAddr -- server address 1 -- use 1 channel magic -- AoS-version-specific "magic number" return $ NS client host post :: NetState -> IO () post (NS client peer) = Peer.disconnect peer 0 >> loop where loop :: IO () loop = Host.service client 1000 >>= \case (Just (B.Event t _ _ _ p)) -> case t of B.Disconnect -> Peer.reset(peer) B.Receive -> Packet.destroy p >> loop _ -> loop Nothing -> loop receive :: world -> map -> Ptr B.Host -> NetState -> GState -> IO GState receive w m client ns gs = Host.service client 1000 >>= \case (Just (B.Event t p chan dat pack)) -> case t of B.None -> print "got none" >> return gs B.Connect -> print "got connect" >> return gs B.Disconnect -> print "got disconnect" >> return gs B.Receive -> print "got receive" >> return gs Nothing -> print "no packet" >> return gs
Ericson2314/hasnip
src/HaSnip/Protocol/Ben.hs
gpl-3.0
2,279
0
16
620
599
323
276
54
5
module Text.Lox.Types where type Id = Char type Op = Char data Term = Const Int | Var Id | Fn Id [Term] | Operation Op Term Term deriving (Show) data Fml = Atom Id | Eq Term Term | Neq Term Term | Pred Id [Term] | Not Fml | Forall Id Fml | Exists Id Fml | And Fml Fml | Or Fml Fml | OnlyIf Fml Fml | Iff Fml Fml | Xor Fml Fml | Diamond Fml | Box Fml deriving (Show)
knuton/lox
Text/Lox/Types.hs
gpl-3.0
538
0
7
258
163
96
67
23
0
-- grid is a game written in Haskell -- Copyright (C) 2018 karamellpelle@hotmail.com -- -- This file is part of grid. -- -- grid is free software: you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by -- the Free Software Foundation, either version 3 of the License, or -- (at your option) any later version. -- -- grid is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details. -- -- You should have received a copy of the GNU General Public License -- along with grid. If not, see <http://www.gnu.org/licenses/>. -- {-# LANGUAGE MagicHash #-} {-# LANGUAGE UnboxedTuples #-} module Game.Grid.GridWorld.SegmentArray ( SegmentArray, makeSegmentArray, segmentarrayRead, segmentarrayWrite, ) where import MyPrelude import Game.Grid.GridWorld.Segment import Data.Int -- making unportable import GHC.Prim import GHC.Exts import GHC.Types import GHC.Int import System.IO.Unsafe -------------------------------------------------------------------------------- -- SegmentArray -- we need some type of sequential container for the segments of path. since the -- tail of the segments syncs our GL data, we want fast access to the tail. also, -- we want the path to be able to grow modulo its size. so I decided to use a -- custom array type. -- fixme: verify that this type is fast (enough) -- note: in GHC, the datatypes Int8/Int16/... are implemented by the word size -- of the architecture (which is 32 bits on ARMv7), so using them does not -- save memory. but in this implementation of SegmentArray, we use the cor- -- responding bits and so save memory. data SegmentArray = SegmentArray (MutableByteArray# RealWorld) unsafeState# :: (State# RealWorld -> (# State# RealWorld, a #)) -> a unsafeState# = unsafePerformIO . IO makeSegmentArray :: UInt -> SegmentArray makeSegmentArray (W# size#) = unsafeState# $ \s# -> case newAlignedPinnedByteArray# (word2Int# (size# `timesWord#` 16##)) 4# s# of (# s'#, mba# #) -> (# s'#, SegmentArray mba# #) segmentarrayWrite :: SegmentArray -> UInt -> Segment -> SegmentArray segmentarrayWrite sa@(SegmentArray mba#) (W# ix#) (Segment (Node (I16# x#) (I16# y#) (I16# z#)) (Turn (I8# x0#) (I8# x1#) (I8# x2#) (I8# y0#) (I8# y1#) (I8# y2#) (I8# z0#) (I8# z1#) (I8# z2#))) = unsafeState# $ \s# -> case writeInt16Array# mba# (ix2Bytes# ix# 0##) x# s# of { s# -> case writeInt16Array# mba# (ix2Bytes# ix# 1##) y# s# of { s# -> case writeInt16Array# mba# (ix2Bytes# ix# 2##) z# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 6##) x0# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 7##) x1# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 8##) x2# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 9##) y0# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 10##) y1# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 11##) y2# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 12##) z0# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 13##) z1# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 14##) z2# s# of { s# -> case writeInt8Array# mba# (ix4Bytes# ix# 15##) 0# s# of { s# -> (# s#, sa #) }}}}}}}}}}}}} where ix2Bytes# ix# a# = word2Int# ((8## `timesWord#` ix#) `plusWord#` a#) ix4Bytes# ix# a# = word2Int# ((16## `timesWord#` ix#) `plusWord#` a#) segmentarrayRead :: SegmentArray -> UInt -> Segment segmentarrayRead (SegmentArray mba#) (W# ix#) = unsafeState# $ \s# -> case readInt16Array# mba# (ix2Bytes# ix# 0##) s# of { (# s#, x# #) -> case readInt16Array# mba# (ix2Bytes# ix# 1##) s# of { (# s#, y# #) -> case readInt16Array# mba# (ix2Bytes# ix# 2##) s# of { (# s#, z# #) -> case readInt8Array# mba# (ix4Bytes# ix# 6##) s# of { (# s#, x0# #) -> case readInt8Array# mba# (ix4Bytes# ix# 7##) s# of { (# s#, x1# #) -> case readInt8Array# mba# (ix4Bytes# ix# 8##) s# of { (# s#, x2# #) -> case readInt8Array# mba# (ix4Bytes# ix# 9##) s# of { (# s#, y0# #) -> case readInt8Array# mba# (ix4Bytes# ix# 10##) s# of { (# s#, y1# #) -> case readInt8Array# mba# (ix4Bytes# ix# 11##) s# of { (# s#, y2# #) -> case readInt8Array# mba# (ix4Bytes# ix# 12##) s# of { (# s#, z0# #) -> case readInt8Array# mba# (ix4Bytes# ix# 13##) s# of { (# s#, z1# #) -> case readInt8Array# mba# (ix4Bytes# ix# 14##) s# of { (# s#, z2# #) -> case readInt8Array# mba# (ix4Bytes# ix# 15##) s# of { (# s#, _ #) -> (# s#, Segment (Node (I16# x#) (I16# y#) (I16# z#)) (Turn (I8# x0#) (I8# x1#) (I8# x2#) (I8# y0#) (I8# y1#) (I8# y2#) (I8# z0#) (I8# z1#) (I8# z2#)) #) }}}}}}}}}}}}} where ix2Bytes# ix# a# = word2Int# ((8## `timesWord#` ix#) `plusWord#` a#) ix4Bytes# ix# a# = word2Int# ((16## `timesWord#` ix#) `plusWord#` a#) {- -- | we create a newtype so that we later can just store the direction if we want -- (strip Segment for structure) newtype StorableSegment = StorableSegment Segment wrapStorableSegment :: Segment -> StorableSegment wrapStorableSegment seg = StorableSegment seg instance Storable StorableSegment where sizeOf _ = 16 alignment _ = 4 -- ^ 4 byte align on ARM (?) peek ptr = do -- node x <- peekByteOff ptr 0 :: IO Int16 y <- peekByteOff ptr 2 :: IO Int16 z <- peekByteOff ptr 4 :: IO Int16 -- turn x0 <- peekByteOff ptr 6 :: IO Int8 x1 <- peekByteOff ptr 7 :: IO Int8 x2 <- peekByteOff ptr 8 :: IO Int8 y0 <- peekByteOff ptr 9 :: IO Int8 y1 <- peekByteOff ptr 10 :: IO Int8 y2 <- peekByteOff ptr 11 :: IO Int8 z0 <- peekByteOff ptr 12 :: IO Int8 z1 <- peekByteOff ptr 13 :: IO Int8 z2 <- peekByteOff ptr 14 :: IO Int8 -- 15 is empty! return $ StorableSegment $ Segment (Node x y z) (Turn x0 x1 x2 y0 y1 y2 z0 z1 z2) poke ptr (StorableSegment (Segment (Node x y z) (Turn x0 x1 x2 y0 y1 y2 z0 z1 z2))) = do -- node pokeByteOff ptr 0 x pokeByteOff ptr 2 y pokeByteOff ptr 4 z -- turn pokeByteOff ptr 6 x0 pokeByteOff ptr 7 x1 pokeByteOff ptr 8 x2 pokeByteOff ptr 9 y0 pokeByteOff ptr 10 y1 pokeByteOff ptr 11 y2 pokeByteOff ptr 12 z0 pokeByteOff ptr 13 z1 pokeByteOff ptr 14 z2 -- 15: empty! -------------------------------------------------------------------------------- -- tmp: instance Show Path where show path = "Path:" ++ "\npathCurrent: " ++ show (pathCurrent path) ++ "\npathAlpha: " ++ show (pathAlpha path) ++ "\npathSegments: " ++ show (pathSegments path) ++ "\npathSpeed: " ++ show (pathSpeed path) ++ "\npathTurnState: " ++ maybe "(not defined)" (const "(defined)") (pathTurnState path) -}
karamellpelle/grid
designer/source/Game/Grid/GridWorld/SegmentArray.hs
gpl-3.0
7,674
0
51
2,294
1,474
800
674
73
1
answer :: Int answer = foldl1 lcm [1..20]
rodgzilla/project-euler
problem_005/problem.hs
gpl-3.0
42
0
6
8
21
11
10
2
1
{- Hexadecimal utilkity functions -} module Hex (hexEncode, hexDecode, triples, ) where import Numeric (readHex) import Data.Char (ord, chr) fromHex :: String -> Int fromHex = fst.head.readHex hexChars :: String hexChars = ['0'..'9'] ++ ['a'..'f'] hexDigitToChar :: Int -> Char hexDigitToChar n = hexChars !! n hexToString :: Int -> String hexToString = reverse . hexToString' hexToString' n | n < 0x10 = (hexDigitToChar n):"" | otherwise = hexDigitToChar (n `mod` 0x10) : hexToString (n `div` 0x10) triples [] = [] triples (a:b:c:xs) = (a,b,c):triples xs padEven s | length s `mod` 2 == 0 = s | otherwise = "0" ++ s toPairs :: String -> [String] toPairs = reverse . toPairs' [] . padEven toPairs' l "" = l toPairs' l (a:b:cs) = toPairs' ((a:b:[]):l) cs hexToBytes :: String -> [Int] hexToBytes s = map fromHex $ toPairs s bytesToHex :: [Int] -> String bytesToHex xs = concat $ map hexToString xs hexEncodeString :: String -> String hexEncodeString s = padEven $ bytesToHex $ map ord s hexDecodeString :: String -> String hexDecodeString s = map chr (hexToBytes s) hexEncode :: [Int] -> String hexEncode = bytesToHex hexDecode :: String -> [Int] hexDecode = hexToBytes
CharlesRandles/cryptoChallenge
hex.hs
gpl-3.0
1,270
0
10
298
512
273
239
38
1
{-# LANGUAGE ImplicitParams, CPP #-} {-# OPTIONS_GHC -Wall #-} -- module SXXVector (MyVector, populateVector, printVector) where module SXXVector where import qualified Data.Vector.Unboxed.Mutable as V import Control.Monad.Primitive (PrimState) import Data.Int (Int32, Int16) #if __GLASGOW_HASKELL__ < 706 import Data.IORef (IORef, readIORef, writeIORef) #else import Data.IORef (IORef, readIORef, writeIORef, modifyIORef') #endif #if __GLASGOW_HASKELL__ < 710 import Control.Applicative #endif type MyVector = V.MVector (PrimState IO) Int32 -- this code requires the ghc ImplicitParams extension pop :: (?mem :: MyVector) => IO Int32 pop = deref =<< getSP <* addSP 1 deref :: (?mem :: MyVector) => Int32 -> IO Int32 deref val = V.read ?mem (toInt val) push :: (?mem :: MyVector) => Int32 -> IO () push val = addSP (-1) >> getSP >>= (\sp -> write sp val) getArg :: (?pc :: IORef Int16, ?mem :: MyVector) => IO Int32 getArg = incPC >> readIORef ?pc >>= deref . toCell -- write :: (?mem :: MyVector) => Int -> Int32 -> IO () write :: (?mem :: MyVector, Integral a) => a -> Int32 -> IO () write = V.write ?mem . toInt getSP :: (?mem :: MyVector) => IO Int32 getSP = V.read ?mem 0 addSP :: (?mem :: MyVector) => Int32 -> IO () addSP = modVal 0 . (+) modVal :: (?mem :: MyVector) => Int -> (Int32 -> Int32) -> IO () -- modVal = ((.) . (>>=) . V.read ?mem) <*> ((.) . write) modVal x f = V.read ?mem x >>= (\val -> write x (f val)) incPC :: (?pc :: IORef Int16) => IO () incPC = modifyIORef' ?pc (+1) setPC :: (?pc :: IORef Int16, Integral a) => a -> IO () -- subtract 1 from the addr since PC will be incremented afterward setPC = (writeIORef ?pc) . toPC . subtract 1 getPC :: (?pc :: IORef Int16) => IO Int16 getPC = readIORef ?pc -- for converting to the type of the instruction pointer toPC :: Integral a => a -> Int16 toPC = fromIntegral -- for converting to the type of the memory vector toCell :: Integral a => a -> Int32 toCell = fromIntegral -- for converting to the type used by many haskell library functions toInt :: Integral a => a -> Int toInt = fromIntegral -- this is for boolean operations class SXXBool a where toInt32 :: a -> Int32 toBool :: a -> Bool and :: (SXXBool b) => a -> b -> Int32 and a b = toInt32 (toBool a && toBool b) -- or cannot be named (|) because '|' is a reserved word or :: (SXXBool b) => a -> b -> Int32 or a b = toInt32 (toBool a || toBool b) not' :: (SXXBool a) => a -> Int32 not' = toInt32 . not . toBool instance SXXBool Bool where toBool = id toInt32 True = 1 toInt32 False = 0 instance SXXBool Int32 where toBool = (/= 0) toInt32 = id #if __GLASGOW_HASKELL__ < 706 -- strict version of modifyIORef modifyIORef' ref f = do x <- readIORef ref let x' = f x x' `seq` writeIORef ref x' #endif
ninedotnine/bugfree-computing-machine
src/SXXVector.hs
gpl-3.0
2,823
0
10
614
898
491
407
57
1
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} module Language.VHDL.Lexer ( abstractLiteral , antiQ , bitStringLiteral , braces , brackets , charLiteral , colon , comma , commaSep , commaSep1 , decimal , dot , float , hexadecimal , identifier , integer , lexeme , octal , parens , reserved , semi , semiSep , semiSep1 , stringLiteral , symbol , spaceConsumer , basedLiteral , stringDelimiter ) where import Control.Arrow (first) import Control.Monad (unless, when) import Data.Char (chr, digitToInt, isDigit) import Data.Data (Data) import qualified Data.HashSet as S import Data.Monoid ((<>)) import Data.Text (Text) import qualified Data.Text as T import Language.VHDL.Parser.Monad (Parser, quotesEnabled) import Language.VHDL.Parser.Util import Language.VHDL.Syntax import Numeric (readInt) import Text.Megaparsec import Text.Megaparsec.Char import qualified Text.Megaparsec.Char.Lexer as L vhdlReserved :: S.HashSet Text vhdlReserved = S.fromList [ "abs" , "access" , "after" , "alias" , "all" , "and" , "architecture" , "array" , "assert" , "attribute" , "begin" , "block" , "body" , "buffer" , "bus" , "case" , "component" , "configuration" , "constant" , "disconnect" , "downto" , "else" , "elsif" , "end" , "entity" , "exit" , "file" , "for" , "function" , "generate" , "generic" , "group" , "guarded" , "if" , "impure" , "in" , "inertial" , "inout" , "is" , "label" , "library" , "linkage" , "literal" , "loop" , "map" , "mod" , "nand" , "new" , "next" , "nor" , "not" , "null" , "of" , "on" , "open" , "or" , "others" , "out" , "package" , "port" , "postponed" , "procedure" , "process" , "pure" , "range" , "record" , "register" , "reject" , "rem" , "report" , "return" , "rol" , "ror" , "select" , "severity" , "signal" , "shared" , "sla" , "sll" , "sra" , "srl" , "subtype" , "then" , "to" , "transport" , "type" , "unaffected" , "units" , "until" , "use" , "variable" , "wait" , "when" , "while" , "with" , "xnor" , "xor" , "access" , "after" , "alias" , "all" , "and" , "architecture" , "array" , "assert" , "attribute" , "begin" , "block" , "body" , "buffer" , "bus" , "case" , "component" , "configuration" , "constant" , "disconnect" , "downto" , "else" , "elsif" , "entity" , "exit" , "file" , "for" , "function" , "generate" , "generic" , "group" , "guarded" , "if" , "impure" , "in" , "inertial" , "inout" , "is" , "label" , "library" , "linkage" , "literal" , "loop" , "map" , "mod" , "nand" , "new" , "next" , "nor" , "not" , "null" , "of" , "on" , "open" , "or" , "others" , "out" , "package" , "port" , "postponed" , "procedure" , "process" , "pure" , "range" , "record" , "register" , "reject" , "rem" , "report" , "return" , "rol" , "ror" , "select" , "severity" , "signal" , "shared" , "sla" , "sll" , "sra" , "srl" , "subtype" , "then" , "to" , "transport" , "type" , "unaffected" , "units" , "until" , "use" , "variable" , "wait" , "when" , "while" , "with" , "xnor" , "xor" ] spaceConsumer :: Parser () spaceConsumer = L.space space1 (L.skipLineComment "--") empty {-# INLINEABLE spaceConsumer #-} reserved' :: S.HashSet T.Text -> T.Text -> Parser T.Text reserved' t w = lexeme $ try $ do r <- string' w <* notFollowedBy identChar unless (T.toLower r `S.member` t) $ fail (T.unpack r <> " is not a reserved word") return r {-# INLINEABLE reserved' #-} reserved :: T.Text -> Parser T.Text reserved = reserved' vhdlReserved {-# INLINEABLE reserved #-} charLiteral :: Parser CharacterLiteral charLiteral = antiQ AntiClit $ CLit <$> lexeme (char '\'' *> (char '"' <|> char '\\' <|> graphicalChar) <* char '\'') {-# INLINEABLE charLiteral #-} stringLiteral :: Parser StringLiteral stringLiteral = antiQ AntiSlit $ SLit <$> stringLiteral' {-# INLINEABLE stringLiteral #-} decimal, hexadecimal, octal :: Parser Integer hexadecimal = try (string' "0x") >> L.hexadecimal octal = try (string' "0o") >> L.octal decimal = L.decimal {-# INLINEABLE decimal #-} {-# INLINEABLE octal #-} {-# INLINEABLE hexadecimal #-} float :: Parser Double float = lexeme $ L.signed spaceConsumer L.float {-# INLINEABLE float #-} lexeme :: Parser a -> Parser a lexeme = L.lexeme spaceConsumer {-# INLINE lexeme #-} symbol :: T.Text -> Parser T.Text symbol = L.symbol spaceConsumer {-# INLINEABLE symbol #-} parens, braces, brackets :: Parser a -> Parser a parens = between (symbol "(") (symbol ")") braces = between (symbol "{") (symbol "}") brackets = between (symbol "[") (symbol "]") {-# INLINEABLE parens #-} {-# INLINEABLE braces #-} {-# INLINEABLE brackets #-} semi, comma, colon, dot :: Parser T.Text semi = symbol ";" comma = symbol "," colon = symbol ":" <* notFollowedBy (symbol "=") dot = symbol "." {-# INLINEABLE semi #-} {-# INLINEABLE comma #-} {-# INLINEABLE colon #-} {-# INLINEABLE dot #-} semiSep, semiSep1, commaSep, commaSep1 :: Parser a -> Parser [a] semiSep = flip sepBy semi semiSep1 = flip sepBy1 semi commaSep = flip sepBy comma commaSep1 = flip sepBy1 comma {-# INLINEABLE semiSep #-} {-# INLINEABLE commaSep #-} {-# INLINEABLE semiSep1 #-} {-# INLINEABLE commaSep1 #-} ------------------------------------------------------------------------------------ -- Antiquote parsing functions parseAntiExpr :: Parser String parseAntiExpr = firstPar where firstPar = do c <- char '(' cs <- rest 1 return (c : cs) rest :: Int -> Parser String rest 0 = return [] rest nest = do c <- anyChar case c of '"' -> do s <- hsString cs <- rest nest return ((c : s) ++ cs) '(' -> do cs <- rest (nest + 1) return (c : cs) ')' -> do cs <- rest (nest - 1) return (c : cs) _ -> do cs <- rest nest return (c : cs) hsString = do c <- anyChar case c of '\\' -- FIXME: is this sufficient? -> do c2 <- anyChar cs <- hsString return (c : c2 : cs) '"' -> return [c] _ -> do cs <- hsString return (c : cs) antiQ :: (Data a) => (String -> a) -> Parser a -> Parser a antiQ q p = try (lexeme parseQ) <|> p where parseQ = do _ <- char '$' let qn = toQQString $ q "" qs <- T.unpack <$> string (T.pack qn) _ <- char ':' identOrExpr <- optional $ lookAhead (char '(') i <- case identOrExpr of Just _ -> parseAntiExpr Nothing -- FIXME: Using VHDL reserved words are fine here -> identifier >>= \case (Ident e) -> pure (T.unpack e) (ExtendedIdent e) -> pure (T.unpack e) (AntiIdent e) -> pure e qe <- quotesEnabled unless qe $ fail "QuasiQuotation syntax not emabled" unless (qs == qn) $ fail $ "Wrong QuasiQuoter " <> qn <> " used in context" return $ q i {-# INLINEABLE antiQ #-} ------------------------------------------------------------------------------------ -- ** 15.4 Identifiers {- identifier ::= basic_identifier | extended_identifier -} identifier :: Parser Identifier identifier = lexeme $ antiQ AntiIdent (ExtendedIdent <$> extendedIdentifier <|> Ident <$> basicIdentifier <?> "identifier") {-# INLINE identifier #-} -- ** 15.4.2 Basic identifiers {- basic_identifier ::= letter { [ underline ] letter_or_digit } letter_or_digit ::= letter | digit letter ::= upper_case_letter | lower_case_letter -} identChar :: Parser Char identChar = alphaNumChar <|> char '_' {-# INLINE identChar #-} basicIdentifier :: Parser Text basicIdentifier = lexeme $ try $ do i <- part <|> string "_" <* notFollowedBy part when (T.toLower i `S.member` vhdlReserved) $ fail $ "Keyword " ++ T.unpack i ++ " used as identifier" return i where part = T.pack <$> ((:) <$> letterChar <*> many identChar) {-# INLINEABLE basicIdentifier #-} -- ** 15.4.3 Extended identifiers {- extended_identifier ::= \ graphic_character { graphic_character } \ -} extendedIdentifier :: Parser Text extendedIdentifier = T.pack <$> between (char '\\') (char '\\' <?> "end of extended identifier") (some (escapedBackslash <|> graphicalChar)) where escapedBackslash = do _ <- try (symbol "\\\\") return '\\' {-# INLINEABLE extendedIdentifier #-} -------------------------------------------------------------------------------- -- ** 15.5 Abstract-literal {- abstract_literal ::= decimal_literal | based_literal -} abstractLiteral :: Parser AbstractLiteral abstractLiteral = try (ALitBased <$> basedLiteral) <|> (ALitDecimal <$> decimalLiteral) {-# INLINEABLE abstractLiteral #-} ------------------------------------------------------------------------------------ -- ***15.5.2 Decimal literals -- -- I use Haskell's Integer to represent integers in VHDL. Its syntax seems to be -- slightly different though (the underline part). {- decimal_literal ::= integer [ . integer ] [ exponent ] integer ::= digit { [ underline ] digit } exponent ::= E [ + ] integer | E – integer -} decimalLiteral :: Parser DecimalLiteral decimalLiteral = DecimalLiteral <$> integer <*> optional (dot *> number) <*> optional exponent' {-# INLINEABLE decimalLiteral #-} integer :: Parser Integer integer = toInteger . (read :: String -> Integer) <$> number {-# INLINEABLE integer #-} number, hexNumber :: Parser String number = number' digitChar hexNumber = number' hexDigitChar {-# INLINEABLE number #-} {-# INLINEABLE hexNumber #-} number' :: Parser Char -> Parser String number' p = lexeme $ concat <$> some p `sepBy1` symbol "_" {-# INLINEABLE number' #-} -- We do this rather convoluted thing to avoid interpreting x = 3 ELSE as the -- beginning of an exponent exponent' :: Parser Exponent exponent' = try $ do _ <- char' 'e' lookAhead anyChar >>= \case '-' -> do _ <- anyChar ExponentNeg <$> integer '+' -> do _ <- anyChar ExponentPos <$> integer a -> if isDigit a then ExponentPos <$> integer else fail "Exponent not followed by digit" {-# INLINEABLE exponent' #-} -------------------------------------------------------------------------------- -- *** 15.5.3 {- based_literal ::= base # based_integer [ . based_integer ] # [ exponent ] base ::= integer based_integer ::= extended_digit { [ underline ] extended_digit } extended_digit ::= digit | letter -} -- TODO: VHDL allows for obscure character substitutions (# -> :). Consider -- dropping this basedLiteral :: Parser BasedLiteral basedLiteral = let sepSym = symbol "#" <|> symbol ":" in do b <- base <* sepSym unless (2 <= b && b <= 16) (fail "Base must be between 2 and 16") bi1 <- basedInteger b bi2 <- optional (dot *> hexNumber) _ <- sepSym e <- optional exponent' return $ BasedLiteral b bi1 bi2 e {-# INLINEABLE basedLiteral #-} base :: Parser Integer base = integer -- TODO: Probably case sensitive basedInteger :: Integer -> Parser BasedInteger basedInteger b = let b' = fromIntegral b in fromIntegral . fst . head . readInt b' ((< b') . digitToInt) digitToInt <$> hexNumber {-# INLINEABLE basedInteger #-} -------------------------------------------------------------------------------- -- *** 15.8 {- bit_string_literal ::= [ integer ] base_specifier " [ bit_value ] " bit_value ::= graphic_character { [ underline ] graphic_character } base_specifier ::= B | O | X | UB | UO | UX | SB | SO | SX | D -} bitStringLiteral :: Parser BitStringLiteral bitStringLiteral = BitStringLiteral <$> try (optional integer) <*> baseSpecifier <*> bitValue -- FIXME: Should we filter out _'s? bitValue :: Parser BitValue bitValue = BitValue <$> (SLit <$> (T.filter ('_' /=) <$> stringLiteral')) baseSpecifier :: Parser BaseSpecifier baseSpecifier = choice $ map (\(l, s) -> symbol l *> pure s) specMap where specMap = specMap' ++ map (first T.toLower) specMap' specMap' = [ ("B", BinaryBase) , ("O", OctalBase) , ("X", HexBase) , ("UB", UnsignedBinaryBase) , ("UO", UnsignedOctalBase) , ("UX", UnsignedHexBase) , ("SB", SignedBinaryBase) , ("SO", SignedHoctalBase) , ("SX", SignedHexBase) , ("D", Decimal) ] -- The following is somewhat inspired by the language module of parsec -- LRM08 15.7. Parses a string consisting of string segments and escape codes -- separated by & -- FIXME: Test escape codes stringLiteral' :: Parser Text stringLiteral' = lexeme (strSegment >>= rest) <?> "String lit" where rest ctx = choice [ try (symbol "&" >> strSegment) >>= (\s -> rest (ctx <> s)) , try (symbol "&" >> asciiCode) >>= (\s -> rest (ctx <> T.pack [s])) , pure ctx ] {-# INLINEABLE stringLiteral' #-} stringDelimiter :: Parser Text stringDelimiter = symbol "\"" <|> symbol "%" {-# INLINEABLE stringDelimiter #-} -- Parses a segment between " and " or % and % (obscure VHDL char replacement) strSegment :: Parser Text strSegment = strSegment' '%' <|> strSegment' '"' {-# INLINEABLE strSegment #-} strSegment' :: Char -> Parser Text strSegment' delim = T.pack <$> lexeme (between (char delim) (char delim <?> "end of string") (many (strChar delim)) <?> "literal string") {-# INLINEABLE strSegment' #-} -- "" in a string becomes literal " if string is between "" -- %% becomes % if string is between %% strChar :: Char -> Parser Char strChar delim = try $ (char delim >> char delim) <|> char '\\' <|> graphicalChar {-# INLINEABLE strChar #-} -- escape codes asciiCode :: Parser Char asciiCode = lexeme charAscii <?> "escape code" {-# INLINEABLE asciiCode #-} -- Parses names of unprintable ASCII chars such as ACK -- FIXME: This might be slow charAscii :: Parser Char charAscii = choice (map parseAscii asciiMap) where parseAscii :: (Text, Char) -> Parser Char parseAscii (asc, code) = try (string asc *> pure code) {-# INLINEABLE charAscii #-} -- LRM08 15.2. Values as defined by the CHARACTERS type in the STANDARD package graphicalChar :: Parser Char graphicalChar = oneOf gchars <?> "graphical character" where gchars = [ ' ' , '!' -- intentionally removed: , '"' , '#' , '$' , '%' , '&' , '\'' , '(' , ')' , '*' , '+' , ',' , '-' , '.' , '/' , '0' , '1' , '2' , '3' , '4' , '5' , '6' , '7' , '8' , '9' , ':' , ';' , '<' , '=' , '>' , '?' , '@' , 'A' , 'B' , 'C' , 'D' , 'E' , 'F' , 'G' , 'H' , 'I' , 'J' , 'K' , 'L' , 'M' , 'N' , 'O' , 'P' , 'Q' , 'R' , 'S' , 'T' , 'U' , 'V' , 'W' , 'X' , 'Y' , 'Z' , '[' -- , '\\' , ']' , '^' , '_' , '`' , 'a' , 'b' , 'c' , 'd' , 'e' , 'f' , 'g' , 'h' , 'i' , 'j' , 'k' , 'l' , 'm' , 'n' , 'o' , 'p' , 'q' , 'r' , 's' , 't' , 'u' , 'v' , 'w' , 'x' , 'y' , 'z' , '{' , '|' , '}' , '~' , ' ' , '¡' , '¢' , '£' , '¤' , '¥' , '¦' , '§' , '¨' , '©' , 'ª' , '«' , '¬' , '\173' -- - , '®' , '¯' , '°' , '±' , '²' , '³' , '´' , 'µ' , '¶' , '·' , '¸' , '¹' , 'º' , '»' , '¼' , '½' , '¾' , '¿' , 'À' , 'Á' , 'Â' , 'Ã' , 'Ä' , 'Å' , 'Æ' , 'Ç' , 'È' , 'É' , 'Ê' , 'Ë' , 'Ì' , 'Í' , 'Î' , 'Ï' , 'Ð' , 'Ñ' , 'Ò' , 'Ó' , 'Ô' , 'Õ' , 'Ö' , '×' , 'Ø' , 'Ù' , 'Ú' , 'Û' , 'Ü' , 'Ý' , 'Þ' , 'ß' , 'à' , 'á' , 'â' , 'ã' , 'ä' , 'å' , 'æ' , 'ç' , 'è' , 'é' , 'ê' , 'ë' , 'ì' , 'í' , 'î' , 'ï' , 'ð' , 'ñ' , 'ò' , 'ó' , 'ô' , 'õ' , 'ö' , '÷' , 'ø' , 'ù' , 'ú' , 'û' , 'ü' , 'ý' , 'þ' , 'ÿ' ] {-# INLINE graphicalChar #-} -- escape code tables asciiMap :: [(Text, Char)] asciiMap = zip (asciiNames ++ ascii2Names) (asciiCodes ++ ascii2Codes) asciiNames :: [Text] asciiNames = [ "NUL" , "SOH" , "STX" , "ETX" , "EOT" , "ENQ" , "ACK" , "BEL" , "BS" , "HT" , "LF" , "VT" , "FF" , "CR" , "SO" , "SI" , "DLE" , "DC1" , "DC2" , "DC3" , "DC4" , "NAK" , "SYN" , "ETB" , "CAN" , "EM" , "SUB" , "ESC" , "FSP" , "GSP" , "RSP" , "USP" , "DEL" ] asciiCodes :: String asciiCodes = [ '\NUL' , '\SOH' , '\STX' , '\ETX' , '\EOT' , '\ENQ' , '\ACK' , '\BEL' , '\BS' , '\HT' , '\LF' , '\VT' , '\FF' , '\CR' , '\SO' , '\SI' , '\DLE' , '\DC1' , '\DC2' , '\DC3' , '\DC4' , '\NAK' , '\SYN' , '\ETB' , '\CAN' , '\EM' , '\SUB' , '\ESC' , '\FS' , '\GS' , '\RS' , '\US' , '\DEL' ] ascii2Names :: [Text] ascii2Names = [ "C128" , "C129" , "C130" , "C131" , "C132" , "C133" , "C134" , "C135" , "C136" , "C137" , "C138" , "C139" , "C140" , "C141" , "C142" , "C143" , "C144" , "C145" , "C146" , "C147" , "C148" , "C149" , "C150" , "C151" , "C152" , "C153" , "C154" , "C155" , "C156" , "C157" , "C158" , "C159" ] ascii2Codes :: String ascii2Codes = map chr [ 128 , 129 , 130 , 131 , 132 , 133 , 134 , 135 , 136 , 137 , 138 , 139 , 140 , 141 , 142 , 143 , 144 , 145 , 146 , 147 , 148 , 149 , 150 , 151 , 152 , 153 , 154 , 155 , 156 , 157 , 158 , 159 ]
truls/language-vhdl-quote
src/Language/VHDL/Lexer.hs
mpl-2.0
19,020
0
20
6,054
4,613
2,649
1,964
799
7
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- Module : Network.AWS.Support.DescribeTrustedAdvisorCheckSummaries -- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | Returns the summaries of the results of the Trusted Advisor checks that have -- the specified check IDs. Check IDs can be obtained by calling 'DescribeTrustedAdvisorChecks'. -- -- The response contains an array of 'TrustedAdvisorCheckSummary' objects. -- -- <http://docs.aws.amazon.com/awssupport/latest/APIReference/API_DescribeTrustedAdvisorCheckSummaries.html> module Network.AWS.Support.DescribeTrustedAdvisorCheckSummaries ( -- * Request DescribeTrustedAdvisorCheckSummaries -- ** Request constructor , describeTrustedAdvisorCheckSummaries -- ** Request lenses , dtacsCheckIds -- * Response , DescribeTrustedAdvisorCheckSummariesResponse -- ** Response constructor , describeTrustedAdvisorCheckSummariesResponse -- ** Response lenses , dtacsrSummaries ) where import Network.AWS.Prelude import Network.AWS.Request.JSON import Network.AWS.Support.Types import qualified GHC.Exts newtype DescribeTrustedAdvisorCheckSummaries = DescribeTrustedAdvisorCheckSummaries { _dtacsCheckIds :: List "checkIds" Text } deriving (Eq, Ord, Read, Show, Monoid, Semigroup) instance GHC.Exts.IsList DescribeTrustedAdvisorCheckSummaries where type Item DescribeTrustedAdvisorCheckSummaries = Text fromList = DescribeTrustedAdvisorCheckSummaries . GHC.Exts.fromList toList = GHC.Exts.toList . _dtacsCheckIds -- | 'DescribeTrustedAdvisorCheckSummaries' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'dtacsCheckIds' @::@ ['Text'] -- describeTrustedAdvisorCheckSummaries :: DescribeTrustedAdvisorCheckSummaries describeTrustedAdvisorCheckSummaries = DescribeTrustedAdvisorCheckSummaries { _dtacsCheckIds = mempty } -- | The IDs of the Trusted Advisor checks. dtacsCheckIds :: Lens' DescribeTrustedAdvisorCheckSummaries [Text] dtacsCheckIds = lens _dtacsCheckIds (\s a -> s { _dtacsCheckIds = a }) . _List newtype DescribeTrustedAdvisorCheckSummariesResponse = DescribeTrustedAdvisorCheckSummariesResponse { _dtacsrSummaries :: List "summaries" TrustedAdvisorCheckSummary } deriving (Eq, Read, Show, Monoid, Semigroup) instance GHC.Exts.IsList DescribeTrustedAdvisorCheckSummariesResponse where type Item DescribeTrustedAdvisorCheckSummariesResponse = TrustedAdvisorCheckSummary fromList = DescribeTrustedAdvisorCheckSummariesResponse . GHC.Exts.fromList toList = GHC.Exts.toList . _dtacsrSummaries -- | 'DescribeTrustedAdvisorCheckSummariesResponse' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'dtacsrSummaries' @::@ ['TrustedAdvisorCheckSummary'] -- describeTrustedAdvisorCheckSummariesResponse :: DescribeTrustedAdvisorCheckSummariesResponse describeTrustedAdvisorCheckSummariesResponse = DescribeTrustedAdvisorCheckSummariesResponse { _dtacsrSummaries = mempty } -- | The summary information for the requested Trusted Advisor checks. dtacsrSummaries :: Lens' DescribeTrustedAdvisorCheckSummariesResponse [TrustedAdvisorCheckSummary] dtacsrSummaries = lens _dtacsrSummaries (\s a -> s { _dtacsrSummaries = a }) . _List instance ToPath DescribeTrustedAdvisorCheckSummaries where toPath = const "/" instance ToQuery DescribeTrustedAdvisorCheckSummaries where toQuery = const mempty instance ToHeaders DescribeTrustedAdvisorCheckSummaries instance ToJSON DescribeTrustedAdvisorCheckSummaries where toJSON DescribeTrustedAdvisorCheckSummaries{..} = object [ "checkIds" .= _dtacsCheckIds ] instance AWSRequest DescribeTrustedAdvisorCheckSummaries where type Sv DescribeTrustedAdvisorCheckSummaries = Support type Rs DescribeTrustedAdvisorCheckSummaries = DescribeTrustedAdvisorCheckSummariesResponse request = post "DescribeTrustedAdvisorCheckSummaries" response = jsonResponse instance FromJSON DescribeTrustedAdvisorCheckSummariesResponse where parseJSON = withObject "DescribeTrustedAdvisorCheckSummariesResponse" $ \o -> DescribeTrustedAdvisorCheckSummariesResponse <$> o .:? "summaries" .!= mempty
dysinger/amazonka
amazonka-support/gen/Network/AWS/Support/DescribeTrustedAdvisorCheckSummaries.hs
mpl-2.0
5,113
0
10
834
548
329
219
62
1
{-# LANGUAGE ScopedTypeVariables #-} -- | -- Soundness results for query resolution according to the paper 1 module CoALP.Sound ( res ) where import CoALP.Program ( DerTree (..) , RewTree (..) , Trans (..) , AndNode (..) , OrNode (..) , Succ (..) , VR , Program , Clause , GuardingContext , VR , Program , Signature , GuardingContext , lookupType , Type(..) ) import CoALP.DerTree (der) import CoALP.Guards (guardingContext) import CoALP.FreshVar (Freshable) -- | Resolution on clause -- -- according to paper 1 res :: forall a b c . (Eq a, Show a, Show b, Show c, Ord a, Eq b, Ord c, Freshable c) => Program a b c -> Signature a -> Clause a b c -> [Succ a b c] res p s c = resDerTree s [] dt where dt :: DerTree a b c VR dt = der p c -- | Process a der tree and continue -- resDerTree :: (Show a, Show b, Show c, Ord a, Eq b, Ord c) => Signature a -> [GuardingContext a b c] -> DerTree a b c t -> [Succ a b c] resDerTree sig gcs (DT rt trs) = (indRes rt) ++ case separateTrs sig trs of ([], cotrs) -> concatMap (resCoIndTrans sig gcs) cotrs (indtrs, _) -> concatMap (resIndTrans sig gcs) indtrs -- | Process a transition within a tree that sill has -- some unprocessed inductive obligations resIndTrans :: (Show a, Show b, Show c, Ord a, Eq b, Ord c) => Signature a -> [GuardingContext a b c] -> Trans a b c d -> [Succ a b c] resIndTrans sig gcs (Trans p rt _ _ cx dt) = resDerTree sig (gc:gcs) dt where gc = guardingContext p rt cx -- | Process a transition within a tree that has no inductive -- obligations - therefore we can conclude coinductively resCoIndTrans :: (Show a, Show b, Show c, Ord a, Eq b, Ord c) => Signature a -> [GuardingContext a b c] -> Trans a b c d -> [Succ a b c] resCoIndTrans sig gcs (Trans p rt _ _ cx dt) = case (not $ null gc) && (gc `elem` gcs) of True -> [rep rt] False -> resDerTree sig (gc:gcs) dt where gc = guardingContext p rt cx rep (RTEmpty) = error "impossible" rep (RT c _ _) = CoIndS c gc -- | Separate transitions into inductive and coinductive obligations separateTrs :: Ord a => Signature a -> [Trans a b c d] -> ([Trans a b c d], [Trans a b c d]) separateTrs sig trs = foldr f ([], []) trs where f t@(Trans _ _ _ i _ _) (as,bs) = case lookupType sig i of SInd -> (t:as, bs) SCoInd -> (as, t:bs) -- | Resolution on rew tree - inductive observations -- -- TODO make into traversal over the tree indRes :: RewTree a b c d -> [Succ a b c] indRes RTEmpty = [] indRes (RT c _ ands) = if any hasSuccTreeAnd ands then [IndS c] else [] -- concatMap (indResAnds c) ands where hasSuccTreeAnd (AndNode _ ors) = any hasSuccTreeOr ors hasSuccTreeOr (OrNodeEmpty _) = False hasSuccTreeOr (OrNode _ []) = True hasSuccTreeOr (OrNode _ ands') = all hasSuccTreeAnd ands'
frantisekfarka/CoALP
src/CoALP/Sound.hs
lgpl-3.0
2,804
132
8
649
1,221
664
557
73
4
{- Copyrights (c) 2016. Samsung Electronics Ltd. All right reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -} {-# LANGUAGE ImplicitParams, TupleSections #-} module MiniNet.MiniNet (generateMininetTopology, NodeMap) where import Text.JSON import Data.Maybe import Control.Monad.State import Data.List import Numeric import Topology import Util import Syntax hstep = 100 vstep = 100 type Switches = [JSValue] type Hosts = [JSValue] type NodeMap = [(InstanceDescr, String)] generateMininetTopology :: Refine -> Topology -> (String, NodeMap) generateMininetTopology r topology = (encode $ toJSObject attrs, nmap) where -- max number of nodes in a layer width = (maximum $ map (length . (uncurry instMapFlatten)) topology) * hstep -- render nodes (sws, hs, nmap) = execState (mapIdxM (renderNodes width) topology) ([],[],[]) -- render links links = let ?r = r in let ?t = topology in mapMaybe (renderLink nmap) $ topologyLinks topology attrs = [ ("controllers", JSArray []) , ("hosts" , JSArray hs) , ("switches" , JSArray sws) , ("links" , JSArray links) , ("version" , JSRational False 2) ] renderNodes :: Int -> (Node, InstanceMap PortLinks) -> Int -> State (Switches, Hosts, NodeMap) () renderNodes w (n, imap) voffset = do let nodes = instMapFlatten n imap offset = (w `div` length nodes) `div` 2 step = w `div` length nodes nodeoff = mapIdx (\nd i -> (nd, offset + i * step)) nodes mapM_ (renderNode voffset n) nodeoff renderNode :: Int -> Node -> ((InstanceDescr, PortLinks), Int) -> State (Switches, Hosts, NodeMap) () renderNode voffset node ((descr, _), hoffset) = do (sws, hs, nmap) <- get let (letter, number) = if' (nodeType node == NodeSwitch) ("s", length sws) ("h", length hs) ndname = letter ++ show number opts = [ ("controllers", JSArray []) , ("hostname" , JSString $ toJSString ndname) , ("nodeNum" , JSRational False $ fromIntegral number) , ("switchType" , JSString $ toJSString "bmv2")] ++ if (nodeType node == NodeHost) && (not $ null $ idescKeys descr) then case head $ idescKeys descr of e@(EStruct _ _ _) -> [("ip4", JSString $ toJSString $ formatIP e)] (EInt _ 48 m) -> [("mac", JSString $ toJSString $ formatMAC m)] ++ if length (idescKeys descr) >= 2 then case idescKeys descr !! 1 of e@(EStruct _ _ _) -> [("ip4", JSString $ toJSString $ formatIP e)] _ -> [] else [] _ -> [] else [] attrs = [ ("number", JSString $ toJSString $ show number) , ("opts" , JSObject $ toJSObject opts) , ("x" , JSString $ toJSString $ show $ hoffset) , ("y" , JSString $ toJSString $ show $ (voffset + 1) * vstep)] n = JSObject $ toJSObject attrs nmap' = (descr, ndname):nmap put $ if' (nodeType node == NodeSwitch) ((n:sws), hs, nmap') (sws, (n:hs), nmap') formatIP :: Expr -> String formatIP (EStruct _ _ fs) = intercalate "." $ map (show . exprIVal) fs formatIP e = error $ "MiniNet.formatIP " ++ show e formatMAC :: Integer -> String formatMAC i = ( showHex b0 . colon . showHex b1 . colon . showHex b2 . colon . showHex b3 . colon . showHex b4 . colon . showHex b5) "" where colon = showString ":" b5 = bitSlice i 7 0 b4 = bitSlice i 15 8 b3 = bitSlice i 23 16 b2 = bitSlice i 31 24 b1 = bitSlice i 39 32 b0 = bitSlice i 47 40 renderLink :: (?t::Topology,?r::Refine) => NodeMap -> (PortInstDescr, PortInstDescr) -> Maybe JSValue renderLink nmap (srcport, dstport) = if isPort ?r $ pdescPort dstport then if (srcndname, srcpnum) < (dstndname,dstpnum) then Just $ JSObject $ toJSObject attrs else Nothing else Nothing where dstnode = nodeFromPort ?r dstport srcnode = nodeFromPort ?r srcport srcndname = fromJust $ lookup srcnode nmap dstndname = fromJust $ lookup dstnode nmap dstpnum = phyPortNum ?t dstnode (pdescPort dstport) (fromInteger $ exprIVal $ last $ pdescKeys dstport) srcpnum = phyPortNum ?t srcnode (pdescPort srcport) (fromInteger $ exprIVal $ last $ pdescKeys srcport) attrs = [ ("src" , JSString $ toJSString srcndname) , ("srcport" , JSRational False $ fromIntegral $ srcpnum) , ("dest" , JSString $ toJSString dstndname) , ("destport", JSRational False $ fromIntegral dstpnum) , ("opts" , JSObject $ toJSObject ([]::[(String, JSValue)]))]
ryzhyk/cocoon
cocoon/MiniNet/MiniNet.hs
apache-2.0
5,845
0
23
2,053
1,680
909
771
92
6
----------------------------------------------------------------------------- -- Copyright 2019, Ideas project team. This file is distributed under the -- terms of the Apache License 2.0. For more information, see the files -- "LICENSE.txt" and "NOTICE.txt", which are included in the distribution. ----------------------------------------------------------------------------- -- | -- Maintainer : bastiaan.heeren@ou.nl -- Stability : provisional -- Portability : portable (depends on ghc) -- -- Run a feedbackscript -- ----------------------------------------------------------------------------- module Ideas.Service.FeedbackScript.Run ( Script , Environment(..), newEnvironment , feedbackDiagnosis, feedbackHint, feedbackHints , ruleToString, feedbackIds, attributeIds, conditionIds , eval ) where import Data.List import Data.Maybe import Ideas.Common.Library hiding (ready, Environment) import Ideas.Service.BasicServices import Ideas.Service.Diagnose import Ideas.Service.FeedbackScript.Syntax import Ideas.Service.State data Environment a = Env { oldReady :: Bool , expected :: Maybe (Rule (Context a)) , recognized :: Maybe (Rule (Context a)) , motivation :: Maybe (Rule (Context a)) , diffPair :: Maybe (String, String) , before :: Maybe Term , after :: Maybe Term , afterText :: Maybe String } newEnvironment :: State a -> Maybe (Rule (Context a)) -> Environment a newEnvironment st motivationRule = newEnvironmentFor st motivationRule next where next = either (const Nothing) Just (onefirst st) newEnvironmentFor :: State a -> Maybe (Rule (Context a)) -> Maybe ((Rule (Context a), b, c), State a) -> Environment a newEnvironmentFor st motivationRule next = Env { oldReady = finished st , expected = fmap (\((x,_,_),_) -> x) next , motivation = motivationRule , recognized = Nothing , diffPair = Nothing , before = f st , after = fmap snd next >>= f , afterText = fmap snd next >>= g } where f s = fmap (`build` stateTerm s) (hasTermView (exercise s)) g s = return $ prettyPrinter (exercise s) (stateTerm s) toText :: Environment a -> Script -> Text -> Maybe Text toText env script = eval env script . Right ruleToString :: Environment a -> Script -> Rule b -> String ruleToString env script r = let f = eval env script . Left . getId in maybe (showId r) show (f r) eval :: Environment a -> Script -> Either Id Text -> Maybe Text eval env script = either (return . findIdRef) evalText where evalText :: Text -> Maybe Text evalText = fmap mconcat . mapM unref . textItems where unref (TextRef a) | a == expectedId = fmap (findIdRef . getId) (expected env) | a == recognizedId = fmap (findIdRef . getId) (recognized env) | a == diffbeforeId = fmap (TextString . fst) (diffPair env) | a == diffafterId = fmap (TextString . snd) (diffPair env) | a == beforeId = fmap TextTerm (before env) | a == afterId = fmap TextTerm (after env) | a == afterTextId = fmap TextString (afterText env) | a == motivationId = fmap (findIdRef . getId) (motivation env) | otherwise = findRef (==a) unref t = Just t evalBool :: Condition -> Bool evalBool (RecognizedIs a) = maybe False (eqId a . getId) (recognized env) evalBool (MotivationIs a) = maybe False (eqId a . getId) (motivation env) evalBool (CondNot c) = not (evalBool c) evalBool (CondConst b) = b evalBool (CondRef a) | a == oldreadyId = oldReady env | a == hasexpectedId = isJust (expected env) | a == hasrecognizedId = isJust (recognized env) | a == hasmotivationId = isJust (motivation env) | a == recognizedbuggyId = maybe False isBuggy (recognized env) | otherwise = False namespaces = nub $ mempty : [ a | NameSpace as <- scriptDecls script, a <- as ] -- equality with namespaces eqId :: Id -> Id -> Bool eqId a b = any (\n -> n#a == b) namespaces findIdRef :: Id -> Text findIdRef x = fromMaybe (TextString (showId x)) (findRef (`eqId` x)) findRef :: (Id -> Bool) -> Maybe Text findRef p = listToMaybe $ catMaybes [ evalText t | (as, c, t) <- allDecls , any p as && evalBool c ] allDecls = let f (Simple _ as t) = [ (as, CondConst True, t) ] f (Guarded _ as xs) = [ (as, c, t) | (c, t) <- xs ] f _ = [] in concatMap f (scriptDecls script) feedbackDiagnosis :: Diagnosis a -> Environment a -> Script -> Text feedbackDiagnosis diagnosis env = case diagnosis of SyntaxError s -> const (makeText s) Buggy _ r -> makeWrong "buggy" env {recognized = Just r} NotEquivalent s -> makeNotEq s "noteq" env Expected _ _ r -> makeOk "ok" env {recognized = Just r} WrongRule _ _ mr -> makeWrong "wrongrule" env {recognized = mr} Similar _ _ mr -> makeOk "same" env {recognized = mr} Detour _ _ _ r -> makeOk "detour" env {recognized = Just r} Correct _ _ -> makeOk "correct" env Unknown _ _ -> makeOk "unknown" env where makeOk = makeDefault "Well done!" makeWrong = makeDefault "This is incorrect." makeNotEq s = if null s then makeWrong else makeDefault s makeDefault dt s e = fromMaybe (TextString dt) . make (newId s) e feedbackHint :: Id -> Environment a -> Script -> Text feedbackHint feedbackId env script = fromMaybe (defaultHint env script) $ make feedbackId env script feedbackHints :: Id -> [((Rule (Context a), b, c), State a)] -> State a -> Maybe (Rule (Context a)) -> Script -> [Text] feedbackHints feedbackId nexts state motivationRule script = map (\env -> fromMaybe (defaultHint env script) $ make feedbackId env script) envs where envs = map (newEnvironmentFor state motivationRule . Just) nexts defaultHint :: Environment a -> Script -> Text defaultHint env script = makeText $ case expected env of Just r -> ruleToString env script r Nothing -> "Sorry, no hint available." make :: Id -> Environment a -> Script -> Maybe Text make feedbackId env script = toText env script (TextRef feedbackId) feedbackIds :: [Id] feedbackIds = map newId ["same", "noteq", "correct", "unknown", "ok", "buggy", "detour", "wrongrule", "hint", "step", "label"] attributeIds :: [Id] attributeIds = [expectedId, recognizedId, diffbeforeId, diffafterId, beforeId, afterId, afterTextId, motivationId] conditionIds :: [Id] conditionIds = [oldreadyId, hasexpectedId, hasrecognizedId, hasmotivationId, recognizedbuggyId] expectedId, recognizedId, diffbeforeId, diffafterId, beforeId, afterId, afterTextId, motivationId :: Id expectedId = newId "expected" recognizedId = newId "recognized" diffbeforeId = newId "diffbefore" diffafterId = newId "diffafter" beforeId = newId "before" afterId = newId "after" afterTextId = newId "aftertext" motivationId = newId "motivation" oldreadyId, hasexpectedId, hasrecognizedId, hasmotivationId, recognizedbuggyId :: Id oldreadyId = newId "oldready" hasexpectedId = newId "hasexpected" hasrecognizedId = newId "hasrecognized" hasmotivationId = newId "hasmotivation" recognizedbuggyId = newId "recognizedbuggy"
ideas-edu/ideas
src/Ideas/Service/FeedbackScript/Run.hs
apache-2.0
7,515
0
14
1,890
2,465
1,278
1,187
139
10
------------------------------------------------------------------------------- -- Experimental test for evaluating Queues performance -- -- Data Structures. Grado en Informática. UMA. -- Pepe Gallardo, 2012 ------------------------------------------------------------------------------- module Demos.Queue.QueuesPerformance where import DataStructures.Util.Random import DataStructures.Queue.TwoListsQueue -- LinearQueue import System.CPUTime data Operation = Enqueue | Dequeue -- on average, do 2 enqueues for each dequeue randomOperations :: Seed -> [Operation] randomOperations s = randomsIn [Enqueue, Enqueue, Dequeue] s -- forces queue evaluation by summing its elements sumQ :: (Num a) => Queue a -> a sumQ q | isEmpty q = 0 | otherwise = first q + sumQ (dequeue q) test :: Seed -> Int -> Int test s n = sumQ (foldr simulate empty (take n (randomOperations s))) simulate :: Operation -> Queue Int -> Queue Int simulate Enqueue q = enqueue 0 q simulate Dequeue q = if isEmpty q then q else dequeue q main = do let tests = 10 let numOperations = 10000 t0 <- getCPUTime let xs = [ test s numOperations | s <- [0..tests-1]] print (sum xs) -- force evaluation t1 <- getCPUTime let average = toSecs (t1-t0) / fromIntegral tests putStrLn ("Tests took "++ show average ++ " secs on average") toSecs :: Integer -> Double toSecs x = fromIntegral x / 10^12
Saeron/haskell
data.structures/haskell/Demos/Queue/QueuesPerformance.hs
apache-2.0
1,417
0
14
268
406
207
199
27
2
----------------------------------------------------------------------------- -- | -- Module : Text.PrettyPrint.HughesPJ -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : provisional -- Portability : portable -- -- John Hughes's and Simon Peyton Jones's Pretty Printer Combinators -- -- Based on /The Design of a Pretty-printing Library/ -- in Advanced Functional Programming, -- Johan Jeuring and Erik Meijer (eds), LNCS 925 -- <http://www.cs.chalmers.se/~rjmh/Papers/pretty.ps> -- -- Heavily modified by Simon Peyton Jones, Dec 96 -- ----------------------------------------------------------------------------- {- Version 3.0 28 May 1997 * Cured massive performance bug. If you write foldl <> empty (map (text.show) [1..10000]) you get quadratic behaviour with V2.0. Why? For just the same reason as you get quadratic behaviour with left-associated (++) chains. This is really bad news. One thing a pretty-printer abstraction should certainly guarantee is insensivity to associativity. It matters: suddenly GHC's compilation times went up by a factor of 100 when I switched to the new pretty printer. I fixed it with a bit of a hack (because I wanted to get GHC back on the road). I added two new constructors to the Doc type, Above and Beside: <> = Beside $$ = Above Then, where I need to get to a "TextBeside" or "NilAbove" form I "force" the Doc to squeeze out these suspended calls to Beside and Above; but in so doing I re-associate. It's quite simple, but I'm not satisfied that I've done the best possible job. I'll send you the code if you are interested. * Added new exports: punctuate, hang int, integer, float, double, rational, lparen, rparen, lbrack, rbrack, lbrace, rbrace, * fullRender's type signature has changed. Rather than producing a string it now takes an extra couple of arguments that tells it how to glue fragments of output together: fullRender :: Mode -> Int -- Line length -> Float -- Ribbons per line -> (TextDetails -> a -> a) -- What to do with text -> a -- What to do at the end -> Doc -> a -- Result The "fragments" are encapsulated in the TextDetails data type: data TextDetails = Chr Char | Str String | PStr FAST_STRING The Chr and Str constructors are obvious enough. The PStr constructor has a packed string (FAST_STRING) inside it. It's generated by using the new "ptext" export. An advantage of this new setup is that you can get the renderer to do output directly (by passing in a function of type (TextDetails -> IO () -> IO ()), rather than producing a string that you then print. Version 2.0 24 April 1997 * Made empty into a left unit for <> as well as a right unit; it is also now true that nest k empty = empty which wasn't true before. * Fixed an obscure bug in sep that occassionally gave very weird behaviour * Added $+$ * Corrected and tidied up the laws and invariants ====================================================================== Relative to John's original paper, there are the following new features: 1. There's an empty document, "empty". It's a left and right unit for both <> and $$, and anywhere in the argument list for sep, hcat, hsep, vcat, fcat etc. It is Really Useful in practice. 2. There is a paragraph-fill combinator, fsep, that's much like sep, only it keeps fitting things on one line until it can't fit any more. 3. Some random useful extra combinators are provided. <+> puts its arguments beside each other with a space between them, unless either argument is empty in which case it returns the other hcat is a list version of <> hsep is a list version of <+> vcat is a list version of $$ sep (separate) is either like hsep or like vcat, depending on what fits cat behaves like sep, but it uses <> for horizontal conposition fcat behaves like fsep, but it uses <> for horizontal conposition These new ones do the obvious things: char, semi, comma, colon, space, parens, brackets, braces, quotes, doubleQuotes 4. The "above" combinator, $$, now overlaps its two arguments if the last line of the top argument stops before the first line of the second begins. For example: text "hi" $$ nest 5 (text "there") lays out as hi there rather than hi there There are two places this is really useful a) When making labelled blocks, like this: Left -> code for left Right -> code for right LongLongLongLabel -> code for longlonglonglabel The block is on the same line as the label if the label is short, but on the next line otherwise. b) When laying out lists like this: [ first , second , third ] which some people like. But if the list fits on one line you want [first, second, third]. You can't do this with John's original combinators, but it's quite easy with the new $$. The combinator $+$ gives the original "never-overlap" behaviour. 5. Several different renderers are provided: * a standard one * one that uses cut-marks to avoid deeply-nested documents simply piling up in the right-hand margin * one that ignores indentation (fewer chars output; good for machines) * one that ignores indentation and newlines (ditto, only more so) 6. Numerous implementation tidy-ups Use of unboxed data types to speed up the implementation -} module Text.PrettyPrint.HughesPJ ( -- * The document type Doc, -- Abstract -- * Constructing documents -- ** Converting values into documents char, text, ptext, int, integer, float, double, rational, -- ** Simple derived documents semi, comma, colon, space, equals, lparen, rparen, lbrack, rbrack, lbrace, rbrace, -- ** Wrapping documents in delimiters parens, brackets, braces, quotes, doubleQuotes, -- ** Combining documents empty, (<>), (<+>), hcat, hsep, ($$), ($+$), vcat, sep, cat, fsep, fcat, nest, hang, punctuate, -- * Predicates on documents isEmpty, -- * Rendering documents -- ** Default rendering render, -- ** Rendering with a particular style Style(..), style, renderStyle, -- ** General rendering fullRender, Mode(..), TextDetails(..), ) where import Prelude infixl 6 <> infixl 6 <+> infixl 5 $$, $+$ -- --------------------------------------------------------------------------- -- The interface -- The primitive Doc values isEmpty :: Doc -> Bool; -- ^ Returns 'True' if the document is empty -- | The empty document, with no height and no width. -- 'empty' is the identity for '<>', '<+>', '$$' and '$+$', and anywhere -- in the argument list for 'sep', 'hcat', 'hsep', 'vcat', 'fcat' etc. empty :: Doc semi :: Doc; -- ^ A ';' character comma :: Doc; -- ^ A ',' character colon :: Doc; -- ^ A ':' character space :: Doc; -- ^ A space character equals :: Doc; -- ^ A '=' character lparen :: Doc; -- ^ A '(' character rparen :: Doc; -- ^ A ')' character lbrack :: Doc; -- ^ A '[' character rbrack :: Doc; -- ^ A ']' character lbrace :: Doc; -- ^ A '{' character rbrace :: Doc; -- ^ A '}' character -- | A document of height and width 1, containing a literal character. char :: Char -> Doc -- | A document of height 1 containing a literal string. -- 'text' satisfies the following laws: -- -- * @'text' s '<>' 'text' t = 'text' (s'++'t)@ -- -- * @'text' \"\" '<>' x = x@, if @x@ non-empty -- -- The side condition on the last law is necessary because @'text' \"\"@ -- has height 1, while 'empty' has no height. text :: String -> Doc -- | An obsolete function, now identical to 'text'. ptext :: String -> Doc int :: Int -> Doc; -- ^ @int n = text (show n)@ integer :: Integer -> Doc; -- ^ @integer n = text (show n)@ float :: Float -> Doc; -- ^ @float n = text (show n)@ double :: Double -> Doc; -- ^ @double n = text (show n)@ rational :: Rational -> Doc; -- ^ @rational n = text (show n)@ parens :: Doc -> Doc; -- ^ Wrap document in @(...)@ brackets :: Doc -> Doc; -- ^ Wrap document in @[...]@ braces :: Doc -> Doc; -- ^ Wrap document in @{...}@ quotes :: Doc -> Doc; -- ^ Wrap document in @\'...\'@ doubleQuotes :: Doc -> Doc; -- ^ Wrap document in @\"...\"@ -- Combining @Doc@ values -- | Beside. -- '<>' is associative, with identity 'empty'. (<>) :: Doc -> Doc -> Doc -- | Beside, separated by space, unless one of the arguments is 'empty'. -- '<+>' is associative, with identity 'empty'. (<+>) :: Doc -> Doc -> Doc -- | Above, except that if the last line of the first argument stops -- at least one position before the first line of the second begins, -- these two lines are overlapped. For example: -- -- > text "hi" $$ nest 5 (text "there") -- -- lays out as -- -- > hi there -- -- rather than -- -- > hi -- > there -- -- '$$' is associative, with identity 'empty', and also satisfies -- -- * @(x '$$' y) '<>' z = x '$$' (y '<>' z)@, if @y@ non-empty. -- ($$) :: Doc -> Doc -> Doc -- | Above, with no overlapping. -- '$+$' is associative, with identity 'empty'. ($+$) :: Doc -> Doc -> Doc hcat :: [Doc] -> Doc; -- ^List version of '<>'. hsep :: [Doc] -> Doc; -- ^List version of '<+>'. vcat :: [Doc] -> Doc; -- ^List version of '$$'. cat :: [Doc] -> Doc; -- ^ Either 'hcat' or 'vcat'. sep :: [Doc] -> Doc; -- ^ Either 'hsep' or 'vcat'. fcat :: [Doc] -> Doc; -- ^ \"Paragraph fill\" version of 'cat'. fsep :: [Doc] -> Doc; -- ^ \"Paragraph fill\" version of 'sep'. -- | Nest (or indent) a document by a given number of positions -- (which may also be negative). 'nest' satisfies the laws: -- -- * @'nest' 0 x = x@ -- -- * @'nest' k ('nest' k' x) = 'nest' (k+k') x@ -- -- * @'nest' k (x '<>' y) = 'nest' k z '<>' 'nest' k y@ -- -- * @'nest' k (x '$$' y) = 'nest' k x '$$' 'nest' k y@ -- -- * @'nest' k 'empty' = 'empty'@ -- -- * @x '<>' 'nest' k y = x '<>' y@, if @x@ non-empty -- -- The side condition on the last law is needed because -- 'empty' is a left identity for '<>'. nest :: Int -> Doc -> Doc -- GHC-specific ones. -- | @hang d1 n d2 = sep [d1, nest n d2]@ hang :: Doc -> Int -> Doc -> Doc -- | @punctuate p [d1, ... dn] = [d1 \<> p, d2 \<> p, ... dn-1 \<> p, dn]@ punctuate :: Doc -> [Doc] -> [Doc] -- Displaying @Doc@ values. instance Show Doc where showsPrec prec doc cont = showDoc doc cont -- | Renders the document as a string using the default 'style'. render :: Doc -> String -- | The general rendering interface. fullRender :: Mode -- ^Rendering mode -> Int -- ^Line length -> Float -- ^Ribbons per line -> (TextDetails -> a -> a) -- ^What to do with text -> a -- ^What to do at the end -> Doc -- ^The document -> a -- ^Result -- | Render the document as a string using a specified style. renderStyle :: Style -> Doc -> String -- | A rendering style. data Style = Style { mode :: Mode -- ^ The rendering mode , lineLength :: Int -- ^ Length of line, in chars , ribbonsPerLine :: Float -- ^ Ratio of ribbon length to line length } -- | The default style (@mode=PageMode, lineLength=100, ribbonsPerLine=1.5@). style :: Style style = Style { lineLength = 100, ribbonsPerLine = 1.5, mode = PageMode } -- | Rendering mode. data Mode = PageMode -- ^Normal | ZigZagMode -- ^With zig-zag cuts | LeftMode -- ^No indentation, infinitely long lines | OneLineMode -- ^All on one line -- --------------------------------------------------------------------------- -- The Doc calculus -- The Doc combinators satisfy the following laws: {- Laws for $$ ~~~~~~~~~~~ <a1> (x $$ y) $$ z = x $$ (y $$ z) <a2> empty $$ x = x <a3> x $$ empty = x ...ditto $+$... Laws for <> ~~~~~~~~~~~ <b1> (x <> y) <> z = x <> (y <> z) <b2> empty <> x = empty <b3> x <> empty = x ...ditto <+>... Laws for text ~~~~~~~~~~~~~ <t1> text s <> text t = text (s++t) <t2> text "" <> x = x, if x non-empty Laws for nest ~~~~~~~~~~~~~ <n1> nest 0 x = x <n2> nest k (nest k' x) = nest (k+k') x <n3> nest k (x <> y) = nest k z <> nest k y <n4> nest k (x $$ y) = nest k x $$ nest k y <n5> nest k empty = empty <n6> x <> nest k y = x <> y, if x non-empty ** Note the side condition on <n6>! It is this that ** makes it OK for empty to be a left unit for <>. Miscellaneous ~~~~~~~~~~~~~ <m1> (text s <> x) $$ y = text s <> ((text "" <> x)) $$ nest (-length s) y) <m2> (x $$ y) <> z = x $$ (y <> z) if y non-empty Laws for list versions ~~~~~~~~~~~~~~~~~~~~~~ <l1> sep (ps++[empty]++qs) = sep (ps ++ qs) ...ditto hsep, hcat, vcat, fill... <l2> nest k (sep ps) = sep (map (nest k) ps) ...ditto hsep, hcat, vcat, fill... Laws for oneLiner ~~~~~~~~~~~~~~~~~ <o1> oneLiner (nest k p) = nest k (oneLiner p) <o2> oneLiner (x <> y) = oneLiner x <> oneLiner y You might think that the following verion of <m1> would be neater: <3 NO> (text s <> x) $$ y = text s <> ((empty <> x)) $$ nest (-length s) y) But it doesn't work, for if x=empty, we would have text s $$ y = text s <> (empty $$ nest (-length s) y) = text s <> nest (-length s) y -} -- --------------------------------------------------------------------------- -- Simple derived definitions semi = char ';' colon = char ':' comma = char ',' space = char ' ' equals = char '=' lparen = char '(' rparen = char ')' lbrack = char '[' rbrack = char ']' lbrace = char '{' rbrace = char '}' int n = text (show n) integer n = text (show n) float n = text (show n) double n = text (show n) rational n = text (show n) -- SIGBJORN wrote instead: -- rational n = text (show (fromRationalX n)) quotes p = char '\'' <> p <> char '\'' doubleQuotes p = char '"' <> p <> char '"' parens p = char '(' <> p <> char ')' brackets p = char '[' <> p <> char ']' braces p = char '{' <> p <> char '}' hcat = foldr (<>) empty hsep = foldr (<+>) empty vcat = foldr ($$) empty hang d1 n d2 = sep [d1, nest n d2] punctuate p [] = [] punctuate p (d:ds) = go d ds where go d [] = [d] go d (e:es) = (d <> p) : go e es -- --------------------------------------------------------------------------- -- The Doc data type -- A Doc represents a *set* of layouts. A Doc with -- no occurrences of Union or NoDoc represents just one layout. -- | The abstract type of documents. -- The 'Show' instance is equivalent to using 'render'. data Doc = Empty -- empty | NilAbove Doc -- text "" $$ x | TextBeside TextDetails !Int Doc -- text s <> x | Nest !Int Doc -- nest k x | Union Doc Doc -- ul `union` ur | NoDoc -- The empty set of documents | Beside Doc Bool Doc -- True <=> space between | Above Doc Bool Doc -- True <=> never overlap type RDoc = Doc -- RDoc is a "reduced Doc", guaranteed not to have a top-level Above or Beside reduceDoc :: Doc -> RDoc reduceDoc (Beside p g q) = beside p g (reduceDoc q) reduceDoc (Above p g q) = above p g (reduceDoc q) reduceDoc p = p data TextDetails = Chr Char | Str String | PStr String space_text = Chr ' ' nl_text = Chr '\n' {- Here are the invariants: * The argument of NilAbove is never Empty. Therefore a NilAbove occupies at least two lines. * The arugment of @TextBeside@ is never @Nest@. * The layouts of the two arguments of @Union@ both flatten to the same string. * The arguments of @Union@ are either @TextBeside@, or @NilAbove@. * The right argument of a union cannot be equivalent to the empty set (@NoDoc@). If the left argument of a union is equivalent to the empty set (@NoDoc@), then the @NoDoc@ appears in the first line. * An empty document is always represented by @Empty@. It can't be hidden inside a @Nest@, or a @Union@ of two @Empty@s. * The first line of every layout in the left argument of @Union@ is longer than the first line of any layout in the right argument. (1) ensures that the left argument has a first line. In view of (3), this invariant means that the right argument must have at least two lines. -} -- Arg of a NilAbove is always an RDoc nilAbove_ p = NilAbove p -- Arg of a TextBeside is always an RDoc textBeside_ s sl p = TextBeside s sl p -- Arg of Nest is always an RDoc nest_ k p = Nest k p -- Args of union are always RDocs union_ p q = Union p q -- Notice the difference between -- * NoDoc (no documents) -- * Empty (one empty document; no height and no width) -- * text "" (a document containing the empty string; -- one line high, but has no width) -- --------------------------------------------------------------------------- -- @empty@, @text@, @nest@, @union@ empty = Empty isEmpty Empty = True isEmpty _ = False char c = textBeside_ (Chr c) 1 Empty text s = case length s of {sl -> textBeside_ (Str s) sl Empty} ptext s = case length s of {sl -> textBeside_ (PStr s) sl Empty} nest k p = mkNest k (reduceDoc p) -- Externally callable version -- mkNest checks for Nest's invariant that it doesn't have an Empty inside it mkNest k _ | k `seq` False = undefined mkNest k (Nest k1 p) = mkNest (k + k1) p mkNest k NoDoc = NoDoc mkNest k Empty = Empty mkNest 0 p = p -- Worth a try! mkNest k p = nest_ k p -- mkUnion checks for an empty document mkUnion Empty q = Empty mkUnion p q = p `union_` q -- --------------------------------------------------------------------------- -- Vertical composition @$$@ above_ :: Doc -> Bool -> Doc -> Doc above_ p _ Empty = p above_ Empty _ q = q above_ p g q = Above p g q p $$ q = above_ p False q p $+$ q = above_ p True q above :: Doc -> Bool -> RDoc -> RDoc above (Above p g1 q1) g2 q2 = above p g1 (above q1 g2 q2) above p@(Beside _ _ _) g q = aboveNest (reduceDoc p) g 0 (reduceDoc q) above p g q = aboveNest p g 0 (reduceDoc q) aboveNest :: RDoc -> Bool -> Int -> RDoc -> RDoc -- Specfication: aboveNest p g k q = p $g$ (nest k q) aboveNest _ _ k _ | k `seq` False = undefined aboveNest NoDoc g k q = NoDoc aboveNest (p1 `Union` p2) g k q = aboveNest p1 g k q `union_` aboveNest p2 g k q aboveNest Empty g k q = mkNest k q aboveNest (Nest k1 p) g k q = nest_ k1 (aboveNest p g (k - k1) q) -- p can't be Empty, so no need for mkNest aboveNest (NilAbove p) g k q = nilAbove_ (aboveNest p g k q) aboveNest (TextBeside s sl p) g k q = k1 `seq` textBeside_ s sl rest where k1 = k - sl rest = case p of Empty -> nilAboveNest g k1 q other -> aboveNest p g k1 q nilAboveNest :: Bool -> Int -> RDoc -> RDoc -- Specification: text s <> nilaboveNest g k q -- = text s <> (text "" $g$ nest k q) nilAboveNest _ k _ | k `seq` False = undefined nilAboveNest g k Empty = Empty -- Here's why the "text s <>" is in the spec! nilAboveNest g k (Nest k1 q) = nilAboveNest g (k + k1) q nilAboveNest g k q | (not g) && (k > 0) -- No newline if no overlap = textBeside_ (Str (spaces k)) k q | otherwise -- Put them really above = nilAbove_ (mkNest k q) -- --------------------------------------------------------------------------- -- Horizontal composition @<>@ beside_ :: Doc -> Bool -> Doc -> Doc beside_ p _ Empty = p beside_ Empty _ q = q beside_ p g q = Beside p g q p <> q = beside_ p False q p <+> q = beside_ p True q beside :: Doc -> Bool -> RDoc -> RDoc -- Specification: beside g p q = p <g> q beside NoDoc g q = NoDoc beside (p1 `Union` p2) g q = (beside p1 g q) `union_` (beside p2 g q) beside Empty g q = q beside (Nest k p) g q = nest_ k (beside p g q) -- p non-empty beside p@(Beside p1 g1 q1) g2 q2 {- (A `op1` B) `op2` C == A `op1` (B `op2` C) iff op1 == op2 [ && (op1 == <> || op1 == <+>) ] -} | g1 == g2 = beside p1 g1 (beside q1 g2 q2) | otherwise = beside (reduceDoc p) g2 q2 beside p@(Above _ _ _) g q = beside (reduceDoc p) g q beside (NilAbove p) g q = nilAbove_ (beside p g q) beside (TextBeside s sl p) g q = textBeside_ s sl rest where rest = case p of Empty -> nilBeside g q other -> beside p g q nilBeside :: Bool -> RDoc -> RDoc -- Specification: text "" <> nilBeside g p -- = text "" <g> p nilBeside g Empty = Empty -- Hence the text "" in the spec nilBeside g (Nest _ p) = nilBeside g p nilBeside g p | g = textBeside_ space_text 1 p | otherwise = p -- --------------------------------------------------------------------------- -- Separate, @sep@, Hughes version -- Specification: sep ps = oneLiner (hsep ps) -- `union` -- vcat ps sep = sepX True -- Separate with spaces cat = sepX False -- Don't sepX x [] = empty sepX x (p:ps) = sep1 x (reduceDoc p) 0 ps -- Specification: sep1 g k ys = sep (x : map (nest k) ys) -- = oneLiner (x <g> nest k (hsep ys)) -- `union` x $$ nest k (vcat ys) sep1 :: Bool -> RDoc -> Int -> [Doc] -> RDoc sep1 g _ k ys | k `seq` False = undefined sep1 g NoDoc k ys = NoDoc sep1 g (p `Union` q) k ys = sep1 g p k ys `union_` (aboveNest q False k (reduceDoc (vcat ys))) sep1 g Empty k ys = mkNest k (sepX g ys) sep1 g (Nest n p) k ys = nest_ n (sep1 g p (k - n) ys) sep1 g (NilAbove p) k ys = nilAbove_ (aboveNest p False k (reduceDoc (vcat ys))) sep1 g (TextBeside s sl p) k ys = textBeside_ s sl (sepNB g p (k - sl) ys) -- Specification: sepNB p k ys = sep1 (text "" <> p) k ys -- Called when we have already found some text in the first item -- We have to eat up nests sepNB g (Nest _ p) k ys = sepNB g p k ys sepNB g Empty k ys = oneLiner (nilBeside g (reduceDoc rest)) `mkUnion` nilAboveNest False k (reduceDoc (vcat ys)) where rest | g = hsep ys | otherwise = hcat ys sepNB g p k ys = sep1 g p k ys -- --------------------------------------------------------------------------- -- @fill@ fsep = fill True fcat = fill False -- Specification: -- fill [] = empty -- fill [p] = p -- fill (p1:p2:ps) = oneLiner p1 <#> nest (length p1) -- (fill (oneLiner p2 : ps)) -- `union` -- p1 $$ fill ps fill g [] = empty fill g (p:ps) = fill1 g (reduceDoc p) 0 ps fill1 :: Bool -> RDoc -> Int -> [Doc] -> Doc fill1 g _ k ys | k `seq` False = undefined fill1 g NoDoc k ys = NoDoc fill1 g (p `Union` q) k ys = fill1 g p k ys `union_` (aboveNest q False k (fill g ys)) fill1 g Empty k ys = mkNest k (fill g ys) fill1 g (Nest n p) k ys = nest_ n (fill1 g p (k - n) ys) fill1 g (NilAbove p) k ys = nilAbove_ (aboveNest p False k (fill g ys)) fill1 g (TextBeside s sl p) k ys = textBeside_ s sl (fillNB g p (k - sl) ys) fillNB g _ k ys | k `seq` False = undefined fillNB g (Nest _ p) k ys = fillNB g p k ys fillNB g Empty k [] = Empty fillNB g Empty k (y:ys) = nilBeside g (fill1 g (oneLiner (reduceDoc y)) k1 ys) `mkUnion` nilAboveNest False k (fill g (y:ys)) where k1 | g = k - 1 | otherwise = k fillNB g p k ys = fill1 g p k ys -- --------------------------------------------------------------------------- -- Selecting the best layout best :: Mode -> Int -- Line length -> Int -- Ribbon length -> RDoc -> RDoc -- No unions in here! best OneLineMode w r p = get p where get Empty = Empty get NoDoc = NoDoc get (NilAbove p) = nilAbove_ (get p) get (TextBeside s sl p) = textBeside_ s sl (get p) get (Nest k p) = get p -- Elide nest get (p `Union` q) = first (get p) (get q) best mode w r p = get w p where get :: Int -- (Remaining) width of line -> Doc -> Doc get w _ | w==0 && False = undefined get w Empty = Empty get w NoDoc = NoDoc get w (NilAbove p) = nilAbove_ (get w p) get w (TextBeside s sl p) = textBeside_ s sl (get1 w sl p) get w (Nest k p) = nest_ k (get (w - k) p) get w (p `Union` q) = nicest w r (get w p) (get w q) get1 :: Int -- (Remaining) width of line -> Int -- Amount of first line already eaten up -> Doc -- This is an argument to TextBeside => eat Nests -> Doc -- No unions in here! get1 w _ _ | w==0 && False = undefined get1 w sl Empty = Empty get1 w sl NoDoc = NoDoc get1 w sl (NilAbove p) = nilAbove_ (get (w - sl) p) get1 w sl (TextBeside t tl p) = textBeside_ t tl (get1 w (sl + tl) p) get1 w sl (Nest k p) = get1 w sl p get1 w sl (p `Union` q) = nicest1 w r sl (get1 w sl p) (get1 w sl q) nicest w r p q = nicest1 w r 0 p q nicest1 w r sl p q | fits ((w `minn` r) - sl) p = p | otherwise = q fits :: Int -- Space available -> Doc -> Bool -- True if *first line* of Doc fits in space available fits n p | n < 0 = False fits n NoDoc = False fits n Empty = True fits n (NilAbove _) = True fits n (TextBeside _ sl p) = fits (n - sl) p minn x y | x < y = x | otherwise = y -- @first@ and @nonEmptySet@ are similar to @nicest@ and @fits@, only simpler. -- @first@ returns its first argument if it is non-empty, otherwise its second. first p q | nonEmptySet p = p | otherwise = q nonEmptySet NoDoc = False nonEmptySet (p `Union` q) = True nonEmptySet Empty = True nonEmptySet (NilAbove p) = True -- NoDoc always in first line nonEmptySet (TextBeside _ _ p) = nonEmptySet p nonEmptySet (Nest _ p) = nonEmptySet p -- @oneLiner@ returns the one-line members of the given set of @Doc@s. oneLiner :: Doc -> Doc oneLiner NoDoc = NoDoc oneLiner Empty = Empty oneLiner (NilAbove p) = NoDoc oneLiner (TextBeside s sl p) = textBeside_ s sl (oneLiner p) oneLiner (Nest k p) = nest_ k (oneLiner p) oneLiner (p `Union` q) = oneLiner p -- --------------------------------------------------------------------------- -- Displaying the best layout renderStyle style doc = fullRender (mode style) (lineLength style) (ribbonsPerLine style) string_txt "" doc render doc = showDoc doc "" showDoc doc rest = fullRender PageMode 100 1.5 string_txt rest doc string_txt (Chr c) s = c:s string_txt (Str s1) s2 = s1 ++ s2 string_txt (PStr s1) s2 = s1 ++ s2 fullRender OneLineMode _ _ txt end doc = easy_display space_text txt end (reduceDoc doc) fullRender LeftMode _ _ txt end doc = easy_display nl_text txt end (reduceDoc doc) fullRender mode line_length ribbons_per_line txt end doc = display mode line_length ribbon_length txt end best_doc where best_doc = best mode hacked_line_length ribbon_length (reduceDoc doc) hacked_line_length, ribbon_length :: Int ribbon_length = round (fromIntegral line_length / ribbons_per_line) hacked_line_length = case mode of { ZigZagMode -> maxBound; other -> line_length } display mode page_width ribbon_width txt end doc = case page_width - ribbon_width of { gap_width -> case gap_width `quot` 2 of { shift -> let lay k _ | k `seq` False = undefined lay k (Nest k1 p) = lay (k + k1) p lay k Empty = end lay k (NilAbove p) = nl_text `txt` lay k p lay k (TextBeside s sl p) = case mode of ZigZagMode | k >= gap_width -> nl_text `txt` ( Str (multi_ch shift '/') `txt` ( nl_text `txt` ( lay1 (k - shift) s sl p))) | k < 0 -> nl_text `txt` ( Str (multi_ch shift '\\') `txt` ( nl_text `txt` ( lay1 (k + shift) s sl p ))) other -> lay1 k s sl p lay1 k _ sl _ | k+sl `seq` False = undefined lay1 k s sl p = Str (indent k) `txt` (s `txt` lay2 (k + sl) p) lay2 k _ | k `seq` False = undefined lay2 k (NilAbove p) = nl_text `txt` lay k p lay2 k (TextBeside s sl p) = s `txt` (lay2 (k + sl) p) lay2 k (Nest _ p) = lay2 k p lay2 k Empty = end in lay 0 doc }} cant_fail = error "easy_display: NoDoc" easy_display nl_text txt end doc = lay doc cant_fail where lay NoDoc no_doc = no_doc lay (Union p q) no_doc = {- lay p -} (lay q cant_fail) -- Second arg can't be NoDoc lay (Nest k p) no_doc = lay p no_doc lay Empty no_doc = end lay (NilAbove p) no_doc = nl_text `txt` lay p cant_fail -- NoDoc always on first line lay (TextBeside s sl p) no_doc = s `txt` lay p no_doc -- OLD version: we shouldn't rely on tabs being 8 columns apart in the output. -- indent n | n >= 8 = '\t' : indent (n - 8) -- | otherwise = spaces n indent n = spaces n multi_ch 0 ch = "" multi_ch n ch = ch : multi_ch (n - 1) ch -- (spaces n) generates a list of n spaces -- -- It should never be called with 'n' < 0, but that can happen for reasons I don't understand -- Here's a test case: -- ncat x y = nest 4 $ cat [ x, y ] -- d1 = foldl1 ncat $ take 50 $ repeat $ char 'a' -- d2 = parens $ sep [ d1, text "+" , d1 ] -- main = print d2 -- I don't feel motivated enough to find the Real Bug, so meanwhile we just test for n<=0 spaces n | n <= 0 = "" | otherwise = ' ' : spaces (n - 1) {- Comments from Johannes Waldmann about what the problem might be: In the example above, d2 and d1 are deeply nested, but `text "+"' is not, so the layout function tries to "out-dent" it. when I look at the Doc values that are generated, there are lots of Nest constructors with negative arguments. see this sample output of d1 (obtained with hugs, :s -u) tBeside (TextDetails_Chr 'a') 1 Doc_Empty) (Doc_NilAbove (Doc_Nest (-241) (Doc_TextBeside (TextDetails_Chr 'a') 1 Doc_Empty))))) (Doc_NilAbove (Doc_Nest (-236) (Doc_TextBeside (TextDetails_Chr 'a') 1 (Doc_NilAbove (Doc_Nest (-5) (Doc_TextBeside (TextDetails_Chr 'a') 1 Doc_Empty)))))))) (Doc_NilAbove (Doc_Nest (-231) (Doc_TextBeside (TextDetails_Chr 'a') 1 (Doc_NilAbove (Doc_Nest (-5) (Doc_TextBeside (TextDetails_Chr 'a') 1 (Doc_NilAbove (Doc_Nest (-5) (Doc_TextBeside (TextDetails_Chr 'a') 1 Doc_Empty))))))))))) (Doc_NilAbove (Doc_Nest -}
lwchkg/sunlight-x
test/code-snippets/haskell.hs
apache-2.0
34,428
0
27
12,005
6,308
3,370
2,938
384
18
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Openshift.V1.SecurityContext where import GHC.Generics import Openshift.V1.Capabilities import Openshift.V1.SELinuxOptions import qualified Data.Aeson -- | SecurityContext holds security configuration that will be applied to a container. Some fields are present in both SecurityContext and PodSecurityContext. When both are set, the values in SecurityContext take precedence. data SecurityContext = SecurityContext { capabilities :: Maybe Capabilities -- ^ The capabilities to add/drop when running containers. Defaults to the default set of capabilities granted by the container runtime. , privileged :: Maybe Bool -- ^ Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. , seLinuxOptions :: Maybe SELinuxOptions -- ^ The SELinux context to be applied to the container. If unspecified, the container runtime will allocate a random SELinux context for each container. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. , runAsUser :: Maybe Integer -- ^ The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. , runAsNonRoot :: Maybe Bool -- ^ Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. } deriving (Show, Eq, Generic) instance Data.Aeson.FromJSON SecurityContext instance Data.Aeson.ToJSON SecurityContext
minhdoboi/deprecated-openshift-haskell-api
openshift/lib/Openshift/V1/SecurityContext.hs
apache-2.0
2,184
0
9
343
130
79
51
19
0
module Ticket75 where data a :- b = Q -- | A reference to ':-' f :: Int f = undefined
nominolo/haddock2
tests/golden-tests/tests/Ticket75.hs
bsd-2-clause
88
0
5
23
25
16
9
-1
-1
{-Joseph Eremondi UU# 4229924 Utrecht University, APA 2015 Project one: dataflow analysis March 17, 2015 -} {-# LANGUAGE RecordWildCards #-} {-| General framework for constructing lattices and finding fixpoints of monotone functions. |-} module Optimize.MonotoneFramework ( AnalysisDirection(..), ProgramInfo(..), Lattice(..), joinAll, minFP, printGraph )where import qualified Data.HashMap.Strict as Map import qualified Data.Graph.Inductive.Graph as Graph import qualified Data.Graph.Inductive.PatriciaTree as Gr import qualified Data.GraphViz as Viz import qualified Data.GraphViz.Attributes.Complete as VA import Data.GraphViz.Printing (renderDot) import Data.List (foldl') import Data.Hashable import Data.Text.Lazy (pack, unpack) newtype FlowEdge label = FlowEdge (label, label) data AnalysisDirection = ForwardAnalysis | BackwardAnalysis data ProgramInfo label = ProgramInfo { edgeMap :: label -> [label], --labelRange :: (label, label), allLabels :: [label], labelPairs :: [(label, label)], isExtremal :: label -> Bool } {-| Useful for debugging. Prints the graphviz string to render a representation of a control-flow graph |-} printGraph :: (Ord label) => (label -> Int) -> (label -> String) -> ProgramInfo label -> String printGraph intMap strMap pInfo = let nodes = map (\n -> (intMap n, strMap n)) $ allLabels pInfo grWithNodes = (Graph.insNodes nodes Graph.empty) :: (Gr.Gr String ()) edges = map (\(n1, n2) -> (intMap n1, intMap n2, () ) ) (labelPairs pInfo) theGraph = (Graph.insEdges edges grWithNodes) :: (Gr.Gr String () ) defaultParams = Viz.defaultParams :: (Viz.GraphvizParams Graph.Node String () () String ) ourParams = defaultParams {Viz.fmtNode = \(_,s) -> [VA.Label $ VA.StrLabel $ pack s]} in unpack $ renderDot $ Viz.toDot $ Viz.graphToDot ourParams theGraph {-| Either reverse and edge, or don't, depending on whether we are doing forwards or backwards analysis |-} getFlowEdge :: AnalysisDirection -> (label,label) -> FlowEdge label getFlowEdge ForwardAnalysis e = FlowEdge e getFlowEdge BackwardAnalysis (l1, l2) = FlowEdge (l2, l1) {-| Abstract type representing a lattice and the operations that can be performed on it. |-} data Lattice a = Lattice { --latticeTop :: a latticeBottom :: a, latticeJoin :: a -> a -> a, iota :: a, --Extremal value for our analysis lleq :: a -> a -> Bool, flowDirection :: AnalysisDirection } {-| Iteratively join all the lattice elements in a list |-} joinAll :: (Lattice a) -> [a] -> a joinAll Lattice{..} = foldl' latticeJoin latticeBottom {-| Given a Lattice, a transfer which takes current stored values, a block label, and a payload and produces a new payload, and the flow information for our program, generate the dictionaries representing the open and closed fix-points of the given transfer function. |-} minFP :: (Hashable label, Eq label, Show label, Show payload) => Lattice payload -> (Map.HashMap label payload -> label -> payload -> payload) -> ProgramInfo label -> (Map.HashMap label payload, Map.HashMap label payload) minFP lat@(Lattice{..}) f info = (mfpOpen, mfpClosed) where mfpClosed = Map.mapWithKey (f mfpOpen) mfpOpen --stResult :: ST s [(label, payload)] initialSolns = foldr (\l solnsSoFar -> if isExtremal info l then Map.insert l iota solnsSoFar else Map.insert l latticeBottom solnsSoFar ) Map.empty (allLabels info) mfpOpen = iterateSolns initialSolns (labelPairs info) iterateSolns currentSolns [] = currentSolns iterateSolns currentSolns (cfgEdge:rest) = let flowEdge = getFlowEdge flowDirection cfgEdge (FlowEdge (l,l')) = flowEdge al = currentSolns Map.! l al' = currentSolns Map.! l' fal = f currentSolns l al (newPairs, newSolns) = if ( not $ fal `lleq` al') then let theMap = Map.insert l' (latticeJoin fal al') currentSolns thePairs = map (\lNeighbour -> (l', lNeighbour) ) $ edgeMap info l' in (thePairs, theMap) else ([], currentSolns) in iterateSolns newSolns (newPairs ++ rest)
JoeyEremondi/utrecht-apa-p1
src/Optimize/MonotoneFramework.hs
bsd-3-clause
4,433
0
20
1,113
1,112
626
486
78
4
{-# LANGUAGE NoMonomorphismRestriction, ScopedTypeVariables#-} module Scan where import Obsidian import Data.Word import Data.Bits import Control.Monad import Prelude hiding (map,zipWith,zip,sum,replicate,take,drop,iterate,last) import qualified Prelude as P --------------------------------------------------------------------------- -- --------------------------------------------------------------------------- --------------------------------------------------------------------------- -- Kernel1 (Thread acceses element tid and tid+1 --------------------------------------------------------------------------- -- Kernel1 is just a reduction! kernel1 :: Storable a => (a -> a -> a) -> SPull a -> BProgram (SPush Block a) kernel1 f arr | len arr == 1 = return (push arr) | otherwise = do let (a1,a2) = evenOdds arr arr' <- forcePull (zipWith f a1 a2) kernel1 f arr' mapKernel1 :: Storable a => (a -> a -> a) -> DPull (SPull a) -> DPush Grid a mapKernel1 f arr = pConcat (fmap body arr) where body arr = runPush (kernel1 f arr) --------------------------------------------------------------------------- -- Sklansky --------------------------------------------------------------------------- sklansky :: (Choice a, Storable a) => Int -> (a -> a -> a) -> Pull Word32 a -> Program Block (Push Block Word32 a) sklansky 0 op arr = return (push arr) sklansky n op arr = do let arr1 = binSplit (n-1) (fan op) arr arr2 <- forcePull arr1 sklansky (n-1) op arr2 fan :: Choice a => (a -> a -> a) -> SPull a -> SPull a fan op arr = a1 `append` fmap (op c) a2 where (a1,a2) = halve arr c = a1 ! fromIntegral (len a1 - 1) pushM = liftM push mapScan1 :: (Choice a, Storable a) => Int -> (a -> a -> a) -> DPull (SPull a) -> DPush Grid a mapScan1 n f arr = pConcat (fmap body arr) where body arr = runPush (sklansky n f arr) --------------------------------------------------------------------------- -- Pushy phases for Sklansky --------------------------------------------------------------------------- phase :: Int -> (a -> a -> a) -> Pull Word32 a -> Push Block Word32 a phase i f arr = mkPush l (\wf -> forAll sl2 (\tid -> do let ix1 = insertZero i tid ix2 = flipBit i ix1 ix3 = zeroBits i ix2 - 1 wf (arr ! ix1) ix1 wf (f (arr ! ix3) (arr ! ix2) ) ix2)) where l = len arr l2 = l `div` 2 sl2 = fromIntegral l2 sklansky2 :: Storable a => Int -> (a -> a -> a) -> Pull Word32 a -> Program Block (Push Block Word32 a) sklansky2 l f = compose [phase i f | i <- [0..(l-1)]] compose :: Storable a => [Pull Word32 a -> Push Block Word32 a] -> Pull Word32 a -> Program Block (Push Block Word32 a) compose [f] arr = return (f arr) compose (f:fs) arr = do let arr1 = f arr arr2 <- force arr1 compose fs arr2 insertZero :: Int -> Exp Word32 -> Exp Word32 insertZero 0 a = a `shiftL` 1 insertZero i a = a + zeroBits i a zeroBits :: Int -> EWord32 -> EWord32 zeroBits i a = a .&. fromIntegral (complement (oneBits i :: Word32)) flipBit :: (Num a, Bits a) => Int -> a -> a flipBit i = (`xor` (1 `shiftL` i)) oneBits :: (Num a, Bits a) => Int -> a oneBits i = (2^i) - 1 mapScan2 :: (Choice a, Storable a) => Int -> (a -> a -> a) -> DPull (SPull a) -> DPush Grid a mapScan2 n f arr = pConcat $ fmap body arr -- sklansky2 n f where body arr = runPush (sklansky2 n f arr) -- getScan2 n = namedPrint ("scanB" ++ show (2^n)) (mapScan2 n (+) . splitUp (2^n)) (input :- ()) ---------------------------------------------------------------------------- -- TWEAK LOADS ---------------------------------------------------------------------------- sklansky3 :: Storable a => Int -> (a -> a -> a) -> Pull Word32 a -> Program Block (Push Block Word32 a) sklansky3 l f arr = do im <- force (load 2 arr) compose [phase i f | i <- [0..(l-1)]] im mapScan3 :: (Choice a, Storable a) => Int -> (a -> a -> a) -> DPull (SPull a) -> DPush Grid a mapScan3 n f arr = pConcat (fmap body arr) where body arr = runPush (sklansky3 n f arr) --getScan3 n = namedPrint ("scanC" ++ show (2^n)) (mapScan3 n (+) . splitUp (2^n)) (input :- ())
svenssonjoel/ObsidianGFX
Examples/ScanBench/Scan.hs
bsd-3-clause
4,464
0
17
1,166
1,599
812
787
100
1
{-# LANGUAGE TemplateHaskell #-} module Client.MenuLayerT where import Control.Lens (makeLenses) import Types makeLenses ''MenuLayerT newMenuLayerT :: MenuLayerT newMenuLayerT = MenuLayerT { _mlDraw = Nothing , _mlKey = Nothing }
ksaveljev/hake-2
src/Client/MenuLayerT.hs
bsd-3-clause
267
0
6
66
52
31
21
9
1
module Proper.TestUtils(testFunction) where import Test.HUnit testFunction func cases = runTestTT $ makeTestCases func cases makeTestCases func cases = TestList $ map (\(input, expected) -> testCase func input expected) cases testCase func input expected = TestCase (assertEqual ("Input: " ++ show input) expected (func input))
dillonhuff/Proper
test/Proper/TestUtils.hs
bsd-3-clause
335
0
10
51
116
60
56
7
1
main :: IO () main = putStrLn "Test suite not yet implemented."
denibertovic/watcher
test/Spec.hs
bsd-3-clause
64
0
6
12
19
9
10
2
1
import Intake.Core import Intake.Job import System.Exit main = defaultMain' [ -- Run `intake run a` and capture its output as SHORT_ID and ID. defaultJob { jobCommand = "./dist/build/intake/intake" , jobArguments = words "run a" , jobStderr = Just "" , jobStdout = Just "{{SHORT_ID}} {{ID}}\n" } -- Wait a bit. , defaultJob { jobCommand = "sleep" , jobArguments = words "0.1" } -- Run `intake status $SHORT_ID` with the captured SHORT_ID. , defaultJob { jobCommand = "./dist/build/intake/intake" , jobArguments = words "status {{$SHORT_ID}}" , jobStderr = Just "" , jobStdout = Just "WCompleted\n" } -- Run `intake show $SHORT_ID` with the captured SHORT_ID. -- This checks the stdout by comparing the captured ID. , defaultJob { jobCommand = "./dist/build/intake/intake" , jobArguments = words "show {{$SHORT_ID}}" , jobStderr = Just "" , jobStdout = Just "workflow: Right a\n\ \id: {{$ID}}\n\ \arguments: []\n\ \state: #0 echo [\"a\"](Completed)\n\n" } ]
noteed/intake
tests/intake-run-00.hs
bsd-3-clause
1,024
0
9
214
163
96
67
24
1
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE InstanceSigs #-} {-# LANGUAGE ParallelListComp #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} -- | Definition of the MonetDB5/SQL backend for DSH: SQL code generation and -- execution of SQL queries. module Database.DSH.Backend.Sql.M5 where import Data.Aeson import Database.Algebra.SQL.Dialect import Database.Algebra.SQL.Materialization.CTE import Database.Algebra.SQL.Util import Database.DSH.Backend.Sql.Common -------------------------------------------------------------------------------- -- | SQL code generated for MonetDB5/SQL newtype M5Code = M5Code { unM5 :: String } instance Show M5Code where show = unM5 instance SqlCode M5Code where genSqlCode dag = (M5Code <$> prelude, M5Code <$> queries) where (prelude, queries) = renderOutputDSHWith MonetDB materialize dag instance ToJSON M5Code where toJSON (M5Code sql) = toJSON sql -- | A data vector described by MonetDB5 SQL type M5Vector = SqlVector M5Code --------------------------------------------------------------------------------
ulricha/dsh-sql
src/Database/DSH/Backend/Sql/M5.hs
bsd-3-clause
1,237
0
8
248
168
105
63
22
0
module Main where import Test.Framework import qualified TestTerm import qualified TestTheta import qualified TestASUP main :: IO () main = defaultMain tests tests :: [Test] tests = [ TestTerm.tests , TestTheta.tests , TestASUP.tests ]
projedi/type-inference-rank2
tests/TestMain.hs
bsd-3-clause
264
0
6
62
67
41
26
11
1
module Arcade.Sequence ( Sequence(..) ) where import Data.Word -- wrapping 16 bit sequence numbers newtype Sequence = Sequence Word16 instance Eq Sequence where Sequence a == Sequence b = a == b instance Ord Sequence where compare (Sequence s1) (Sequence s2) = case compare s1 s2 of LT | s2 - s1 <= 32768 -> GT GT | s1 - s2 <= 32768 -> LT x -> x
ekmett/arcade
src/Arcade/Sequence.hs
bsd-3-clause
370
0
13
92
143
72
71
11
0
-- Example of Container for Multiple Types {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TypeFamilies #-} module Container4 where import Control.Applicative (Applicative, WrappedMonad (..)) import Control.Applicative ((<$>), (<*>)) import Data.Foldable (Foldable) import Data.Maybe (fromMaybe, listToMaybe, maybeToList) import Data.Traversable (Traversable, traverse) import qualified Data.Set as Set type family L p (t :: * -> *) class PackLift p where pup :: (forall a. a -> t a ) -> p -> L p t pdown :: (forall a. t a -> a ) -> L p t -> p class PackMap p where pmap :: (forall a. t a -> t' a) -> L p t -> L p t' pmapA :: Applicative f => (forall a. t a -> f (t' a)) -> L p t -> f (L p t') -- pmapM :: Monad m => -- (forall a. t a -> m (t' a)) -> L p t -> m (L p t') -- pmapM f = unwrapMonad . pmapA (WrapMonad . f) toList :: (forall a. t a -> t') -> L p t -> [t'] class (PackMap p, PackLift p) => Pack p newtype Pair a b = Pair ( a, b) deriving (Show, Eq) -- newtype Pair' a b t = Pair' (t a, t b) deriving (Show, Eq) type Pair' a b f = Pair (f a) (f b) type instance L (Pair a b) t = Pair (t a) (t b) instance PackLift (Pair a b) where pup f (Pair (a, b)) = Pair (f a, f b) pdown f (Pair (a, b)) = Pair (f a, f b) instance PackMap (Pair a b) where pmap f (Pair (a, b)) = Pair (f a, f b) pmapA f (Pair (a, b)) = (\x y -> Pair (x, y)) <$> (f a) <*> (f b) toList f (Pair (a, b)) = [f a, f b] testUp :: Pair' Int Bool Maybe testUp = pup Just (Pair (1 :: Int, True)) testDown :: Pair Int Bool testDown = pdown (head . maybeToList) (Pair (Just (1 :: Int), Just True)) -- NG: The type variable ‘p1’ is ambiguous -- testMap :: Pair' Int Bool [] -- testMap = pmap maybeToList (Pair (Just 1, Just True)) -- testMap = (pmap :: (Maybe a -> [a]) -> Pair' Int Bool Maybe -> Pair' Int Bool []) maybeToList (Pair (Just 1, Just True)) -- data Showable = forall a. Show a => S a data Showable = forall a. S a -- deriving instance Show Showable -- NG: The type variables ‘p0’, ‘t0’ are ambiguous -- testToList :: [Showable] -- testToList = toList S (Pair (Just (1 :: Int), Just True)) {- instance PackLift (a, b) ( a b) where pup f (Pair (a, b)) = Pair' (f a, f b) pdown f (Pair' (a, b)) = Pair (f a, f b) instance PackMap (Pair' a b) where pmap f (Pair' (a, b)) = Pair' (f a, f b) pmapA f (Pair' (a, b)) = (\x y -> Pair' (x, y)) <$> (f a) <*> (f b) toList f (Pair' (a, b)) = [f a, f b] -} newtype PTraversable t' v t = PTraversable { unPTraversable :: t' (t v) } deriving (Show, Eq) {- instance Traversable t' => P (PTraversable t' v) where pmapA f (PTraversable ts) = PTraversable <$> Traversable.traverse f ts fromContainer f (PTraversable ts) = Foldable.toList $ fmap f ts instance Traversable t' => ContainerLift (PTraversable t' v) (t' v) where pup f ts = PTraversable $ fmap f ts pdown f (PTraversable ts) = fmap f ts -}
notae/haskell-exercise
pack/Container4.hs
bsd-3-clause
3,207
0
14
814
834
463
371
-1
-1
module Data.Int.Dom.Common ( Mask , mkMask , negative , zero , Depth , depth , Prefix , mkPrefix ) where import Control.Monad ((<=<)) import Data.Bits import Data.Function (on) import Data.Functor ((<$>)) import Data.Word (Word) import Prelude hiding (init, last) newtype Mask = Mask Int mkMask :: Prefix -> Prefix -> Mask mkMask = curry $ last . next 32 . next 16 . next 8 . next 4 . next 2 . next 1 . init where init = uncurry (xor `on` toWord) next = (.|.) <=< flip (.>>.) last = Mask . fromWord <$> (xor =<< (.>>. 1)) (.>>.) = shiftR {-# INLINE mkMask #-} negative :: Mask -> Bool negative (Mask m) = m < 0 {-# INLINE negative #-} zero :: Int -> Mask -> Bool zero i (Mask m) = toWord i .&. toWord m == 0 {-# INLINE zero #-} newtype Depth = Depth Mask instance Eq Depth where Depth (Mask m1) == Depth (Mask m2) = m1 == m2 Depth (Mask m1) /= Depth (Mask m2) = m1 /= m2 depth :: Mask -> Depth depth = Depth {-# INLINE depth #-} instance Ord Depth where Depth (Mask m1) <= Depth (Mask m2) = toWord m1 >= toWord m2 {-# INLINE (<=) #-} type Prefix = Int mkPrefix :: Int -> Mask -> Prefix mkPrefix i (Mask m) = fromWord $ toWord i .&. (complement (m' - 1) `xor` m') where m' = toWord m {-# INLINE mkPrefix #-} toWord :: Int -> Word toWord = fromIntegral {-# INLINE toWord #-} fromWord :: Word -> Int fromWord = fromIntegral {-# INLINE fromWord #-}
sonyandy/fd
src/Data/Int/Dom/Common.hs
bsd-3-clause
1,456
0
12
380
561
306
255
51
1
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} module Pack2 where import Control.Applicative import Control.Monad import Data.Dynamic import Data.Foldable import Data.Functor.Identity import Data.Maybe import Data.Monoid import Data.Traversable -- -- New Pack -- class PackLift b p where pLiftUp :: Applicative f => b -> p f pLiftDown :: Applicative f => p f -> f b class PackMap p where pNTM :: Applicative f => (forall x. g x -> f (h x)) -> p g -> f (p h) pNT :: (forall x. g x -> h x) -> p g -> p h pNT f = runIdentity . pNTM (Identity . f) -- TBD: put x under constraints pToList :: Applicative f => (forall x. f x -> y) -> p f -> [y] pToList' :: (Applicative f, Typeable f) => p f -> [Dynamic] pFoldM :: Applicative f => (forall x. s -> f x -> s) -> s -> p f -> s type Pack b p = (PackLift b p, PackMap p) -- Instances for Traversable newtype WrappedTraversable t a f = WrapTraversable { unwrapTraversable :: t (f a) } deriving Show instance Traversable t => PackLift (t a) (WrappedTraversable t a) where pLiftUp = WrapTraversable . fmap pure pLiftDown (WrapTraversable t) = traverse id t instance (Traversable t, Typeable a) => PackMap (WrappedTraversable t a) where pNTM f (WrapTraversable t) = WrapTraversable <$> traverse f t pToList f (WrapTraversable t) = fmap f (toList t) pToList' (WrapTraversable t) = fmap toDyn (toList t) -- Instances for (a, b) newtype PairL a b f = PairL (f a, f b) deriving (Show, Eq) instance PackLift (a, b) (PairL a b) where pLiftUp (a, b) = PairL (pure a, pure b) pLiftDown (PairL (a, b)) = (,) <$> a <*> b instance (Typeable a, Typeable b) => PackMap (PairL a b) where pNTM f (PairL (a, b)) = PairL <$> ((,) <$> f a <*> f b) pToList f (PairL (a, b)) = [f a, f b] pToList' (PairL (a, b)) = [toDyn a, toDyn b] pFoldM f s0 (PairL (a, b)) = f (f s0 a) b pl0 :: (Int, Bool) pl0 = (1, True) pl1 :: PairL Int Bool [] pl1 = PairL ([1], [True]) pl2 :: PairL Int Bool [] pl2 = PairL ([1, 2], [True]) pl3 :: PairL Int Bool [] pl3 = PairL ([1, 2], [True, False]) {-| >>> testPLU PairL ([1],[True]) -} testPLU :: PairL Int Bool [] testPLU = pLiftUp pl0 {-| >>> testPLD [(1,True)] >>> testPLD2 [(1,True),(1,False),(2,True),(2,False)] -} testPLD :: [(Int, Bool)] testPLD = pLiftDown pl1 testPLD2 :: [(Int, Bool)] testPLD2 = pLiftDown pl3 {-| >>> testPNTM PairL (Just 1,Just True) -} testPNTM :: PairL Int Bool Maybe testPNTM = runIdentity $ pNTM (Identity . listToMaybe) pl1 {-| >>> testFM [1,2] -} testFM :: [Int] testFM = pFoldM f mempty pl2 where f s x = length x : s testPack :: Pack b p => b -> p [] testPack = pLiftUp -- -- Experimental code -- {- traverse :: (Traversable t, Applicative f) => (a -> f b) -> t a -> f (t b) usecase: forall a. t a -> t (Dom a) -- (1) Create domain with single value t (Dom a) -> m (t (Var a)) -- (2) Create new variables t (Var a) -> m (t (Dom a)) -- (3) Read values of variables t (Dom a) -> t (Maybe a) -- (4) Convert Domain to Maybe t (Maybe a) -> Maybe (t a) -- (5) Convert Maybe to single value -} -- single type: {-| >>> st5 $ st4 $ st1 [1,2,3] Just [1,2,3] >>> st45 $ st1 [1,2,3] Just [1,2,3] -} -- (1) st1 :: (Traversable t, Applicative f) => t a -> t (f a) st1 = runIdentity . traverse (Identity . pure) -- (2)(3) st2 :: (Traversable t, Applicative f) => (a -> f b) -> t a -> f (t b) st2 = traverse -- (4) st4 :: (Traversable t) => t [a] -> t (Maybe a) st4 = runIdentity . traverse (Identity . listToMaybe) -- (5) st5 :: (Traversable t) => t (Maybe a) -> Maybe (t a) st5 = traverse id -- (4)+(5) st45 :: (Traversable t) => t [a] -> Maybe (t a) st45 = traverse listToMaybe -- ex.: t = (a, b), f = [] {-| >>> plLiftDown' $ plTranslate' $ plLiftUp' (1,True) Just (1,True) >>> plLiftDown'' $ plLiftUp' (1,True) Just (1,True) -} -- (1) plLiftUp' :: (a, b) -> ([a], [b]) plLiftUp' (a, b) = ([a], [b]) -- (4) plTranslate' :: ([a], [b]) -> (Maybe a, Maybe b) plTranslate' (a, b) = (listToMaybe a, listToMaybe b) -- (5) plLiftDown' :: (Maybe a, Maybe b) -> Maybe (a, b) plLiftDown' (a, b) = (,) <$> a <*> b -- (4)+(5) plLiftDown'' :: ([a], [b]) -> Maybe (a, b) plLiftDown'' (a, b) = (,) <$> listToMaybe a <*> listToMaybe b {-| >>> pgLiftDown $ pgNT listToMaybe $ pgLiftUp (1,True) Just (1,True) >>> pgLiftDown $ pgNT' listToMaybe $ pgLiftUp (1,True) Just (1,True) >>> pgNTM (Just . listToMaybe) ([1,2],[True,False]) Just (Just 1,Just True) >>> runIdentity $ pgNTM (Identity . listToMaybe) ([1,2],[True,False]) (Just 1,Just True) -} -- (1) primitive pgLiftUp :: Applicative f => (a, b) -> (f a, f b) pgLiftUp (a, b) = (pure a, pure b) -- (2)(3) primitive pgNTM :: Applicative f => (forall x. g x -> f (h x)) -> (g a, g b) -> f (h a, h b) pgNTM f (a, b) = (,) <$> f a <*> f b -- (4) pgNT :: (forall x. f x -> g x) -> (f a, f b) -> (g a, g b) pgNT f (a, b) = (f a, f b) -- rewritten with primitive pgNT' :: (forall x. f x -> g x) -> (f a, f b) -> (g a, g b) pgNT' f = runIdentity . pgNTM (Identity . f) -- (5) primitive pgLiftDown :: Applicative f => (f a, f b) -> f (a, b) pgLiftDown (a, b) = (,) <$> a <*> b -- TBD: generalize on element type -- multi type: -- (1) -- mt1 :: (forall a. a -> f a) -> f a -- (forall a. f a -> g a) -> -- (forall a. f a -> a) -> t f -> f t -- (2)(3) primitive mNTM :: Applicative f => (forall x. g x -> f (h x)) -> t g -> f (t h) mNTM = undefined
notae/haskell-exercise
pack/Pack2.hs
bsd-3-clause
5,650
0
14
1,319
2,124
1,142
982
99
1
-- example2 import Data.IORef import Control.Applicative import Control.Monad import Control.Exception (SomeException) import qualified Control.Exception as E import Control.Concurrent (threadDelay) import System.IO.Unsafe (unsafePerformIO) import System.Timeout (timeout) {-# NOINLINE numHandles #-} numHandles :: IORef Int numHandles = unsafePerformIO $ newIORef 0 {-# NOINLINE dataWritten #-} dataWritten :: IORef [String] dataWritten = unsafePerformIO $ newIORef [] test :: IO () -> IO () test action = do action `E.catch` \e -> do putStrLn $ "exception: " ++ show (e :: SomeException) readIORef numHandles >>= putStrLn . ("Number of open handles: " ++) . show readIORef dataWritten >>= putStrLn . ("Data writtern to file: " ++) . show data Handle = Handle (IORef (Maybe String)) openFile :: FilePath -> IO Handle openFile _ = do modifyIORef' numHandles succ Handle <$> newIORef Nothing hClose :: Handle -> IO () hClose h = hFlushFailing h `E.finally` modifyIORef numHandles pred hFlushFailing :: Handle -> IO () hFlushFailing _ = error "hFlush failed" hFlush :: Handle -> IO () hFlush (Handle ref) = do val <- readIORef ref case val of Just str -> modifyIORef dataWritten (str :) _ -> return () writeIORef ref Nothing hPutStr :: Handle -> String -> IO () hPutStr h@(Handle ref) str = do hFlush h writeIORef ref (Just str) bracket :: IO a -> (a -> IO ()) -> (a -> IO b) -> IO b bracket allocate release use = E.mask $ \restore -> do resource <- allocate restore (use resource) `E.finally` release resource example :: IO () example = void $ timeout (1 * 1000 * 1000) $ bracket (openFile "path") hClose $ \h -> do hPutStr h "Hello" hPutStr h "World" threadDelay (2 * 1000 * 1000) main :: IO () main = test example
Yuras/io-region
misc/imask/example2.hs
bsd-3-clause
1,794
0
13
363
687
344
343
55
2
module PrettyPrinter where import Syntax import Text.PrettyPrint pprExpr :: CoreExpr -> Doc pprExpr (ENum n) = int n pprExpr (EVar v) = text v pprExpr (EBinApp binOp e1 e2) = hcat [ pprExpr e1 <+> text (show binOp) <+> pprExpr e2] pprExpr (ECase e alts) = empty --TODO pprExpr (EConstr n1 n2) = empty --TODO pprExpr (ELam vs e) = hcat [ text "\\" , foldl (\acc v -> acc <+> (text v)) empty vs , text " . " , pprExpr e ] pprExpr (EAp e1 e2) = hsep [pprExpr e1, pprExpr e2] pprExpr (ELet is_rec defns expr) = vcat [ text keyword , vcat (map pprDefn defns) , text "in" , pprExpr expr ] where keyword | is_rec = "letrec" | not is_rec = "let" pprDefn :: (Name, CoreExpr) -> Doc pprDefn (name, expr) = hsep [text name, text "=", pprExpr expr] pprScDefn :: CoreScDefn -> Doc pprScDefn (name, vars, expr) = hsep [ text name , foldl (\acc v -> acc <+> (text v)) empty vars , text "=" , pprExpr expr ] pprProg :: CoreProgram -> Doc pprProg scDefns = vcat (map pprScDefn scDefns) pprint :: CoreProgram -> IO () pprint = print . pprProg
MarkX95/TinyHask
PrettyPrinter.hs
bsd-3-clause
1,145
0
12
319
490
249
241
40
1
module Network.TigHTTP.Token ( isTokenChar, isTextChar, isQdtextChar, ) where import Data.Char (isAscii) isCtl, isSeparator, isTokenChar, isTextChar, isQdtextChar :: Char -> Bool isCtl = (`elem` (['\0' .. '\31'] ++ "\DEL")) isSeparator = (`elem` "()<>@,;:\\\"/[]?={} \t") isTokenChar = (&&) <$> not . isCtl <*> not . isSeparator isTextChar = (&&) <$> isAscii <*> not . isCtl isQdtextChar = (&&) <$> isTextChar <*> (/= '"')
YoshikuniJujo/tighttp
src/Network/TigHTTP/Token.hs
bsd-3-clause
433
8
8
71
150
96
54
11
1
-- | Render an abstract shell script as a bash script module Shell.Formatter.Bash ( runBash, bashFormatter ) where import qualified Text.PrettyPrint.Mainland as PP import qualified Shell.Diagnostic as D import qualified Shell.Formatter.Base as F import qualified Shell.Internal as I import qualified Shell.Optimize as O import qualified Shell.Render as R -- | A formatter for bash scripts bashFormatter :: F.Formatter bashFormatter = F.defaultFormatter { F.fmtPreamble = \_ -> preamble } preamble :: PP.Doc preamble = PP.stack [ PP.string "#!/bin/bash" , PP.string "set -e" , PP.string "set -u" , PP.line ] -- | Turn an abstract shell script specification into a bash script. runBash :: I.ShellM () -> IO (Maybe String, [D.Diagnostic]) runBash st = do shell <- I.flattenShell st let (sh, odiags) = O.optimize O.defaultOptimizer shell case R.renderScript bashFormatter sh of Left errs -> return (Nothing, errs ++ odiags) Right script -> return (Just script, odiags)
travitch/shellDSL
src/Shell/Formatter/Bash.hs
bsd-3-clause
1,071
0
12
250
278
157
121
23
2
module Trainer.Internal where boxMueller :: Double -> Double -> Double -> Double -> Double boxMueller μ σ r1 r2 = μ + σ * sqrt (-2*log r1) * cos (2*pi*r2) positiveStdNormal :: Double -> Double -> Double -> Double positiveStdNormal hi r1 r2 = min hi (abs bm) where bm = boxMueller 0 (hi/25) r1 r2
epsilonhalbe/VocabuLambda
Trainer/Internal.hs
bsd-3-clause
330
0
10
86
145
74
71
6
1
------------------------------------------------------------------------------ -- | -- Module : Data.Datamining.Clustering.Gsom -- Copyright : (c) 2009 Stephan Günther -- License : BSD3 -- -- Maintainer : gnn.github@gmail.com -- Stability : experimental -- Portability : portable -- -- The network created by the GSOM algorithm is layed out in two dimensions. -- Thus one needs to assign two dimensional coordinates to the nodes of the -- network and for a clustering to the clusters. -- -- The types defining these coordinates and the functions to handle them belong -- into this module. ------------------------------------------------------------------------------ module Data.Datamining.Clustering.Gsom.Coordinates where type Coordinates = (Int, Int) type Direction = Int type Directions = [Int] -- | The list of supported directions. Since we are only dealing with -- hexagonal lattices, there are only six possible directions. directions :: Directions directions = [0..5] -- | @'neighbour' location direction@ calculates the coordinates of -- the neighbour of node with location @location@ in direction -- @direction@. neighbour :: Coordinates -> Direction -> Coordinates neighbour coordinates direction | direction > 5 = error $ "in neighbour: direction to big " ++ show direction ++ " (not in [0,5])." | otherwise = map ((\p1 p2 -> (fst p1 + fst p2, snd p1 + snd p2)) coordinates) [(2, 0), (1, 1), (-1, 1), (-2, 0), (-1, -1), (1, -1)] !! direction -- | @'neighbourCoordinates' point@ calculates the list of -- coordinates which are directly adjacent to @point@. neighbourCoordinates :: Coordinates -> [Coordinates] neighbourCoordinates cs = map (neighbour cs) directions
gnn/hsgsom
Data/Datamining/Clustering/Gsom/coordinates.hs
bsd-3-clause
1,733
0
14
297
278
167
111
16
1
{-# LANGUAGE PatternGuards #-} -- | -- Module : Scion.Types.Notes -- Copyright : (c) Thomas Schilling 2009 -- License : BSD-style -- -- Maintainer : nominolo@googlemail.com -- Stability : experimental -- Portability : portable -- -- Notes, i.e., warnings, errors, etc. -- module Scion.Types.Notes ( Location, LocSource(..), mkLocation, mkNoLoc , locSource, isValidLoc, noLocText, viewLoc , locStartCol, locEndCol, locStartLine, locEndLine , AbsFilePath(toFilePath), mkAbsFilePath , Note(..), NoteKind(..), Notes , ghcSpanToLocation, ghcErrMsgToNote, ghcWarnMsgToNote , ghcMessagesToNotes ) where import qualified ErrUtils as GHC ( ErrMsg(..), WarnMsg, Messages ) import qualified SrcLoc as GHC import qualified FastString as GHC ( unpackFS ) import qualified Outputable as GHC ( showSDoc, ppr, showSDocForUser ) import qualified Bag ( bagToList ) import qualified Data.MultiSet as MS import System.FilePath infixr 9 `thenCmp` -- * Notes -- | A note from the compiler or some other tool. data Note = Note { noteKind :: NoteKind , noteLoc :: Location , noteMessage :: String } deriving (Eq, Ord, Show) -- | Classifies the kind (or severity) of a note. data NoteKind = ErrorNote | WarningNote | InfoNote | OtherNote deriving (Eq, Ord, Show) type Notes = MS.MultiSet Note -- * Absolute File Paths -- | Represents a 'FilePath' which we know is absolute. -- -- Since relative 'FilePath's depend on the a current working directory we -- normalise all paths to absolute paths. Use 'mkAbsFilePath' to create -- absolute file paths. newtype AbsFilePath = AFP { toFilePath :: FilePath } deriving (Eq, Ord) instance Show AbsFilePath where show (AFP s) = show s -- | Create an absolute file path given a base directory. -- -- Throws an error if the first argument is not an absolute path. mkAbsFilePath :: FilePath -- ^ base directory (must be absolute) -> FilePath -- ^ absolute or relative -> AbsFilePath mkAbsFilePath baseDir dir | isAbsolute baseDir = AFP $ normalise $ baseDir </> dir | otherwise = error "mkAbsFilePath: first argument must be an absolute path" -- * Scion's 'Location' data type -- | Scion's type for source code locations (regions). -- -- We use a custom location type for two reasons: -- -- 1. We enforce the invariant, that the file path of the location is an -- absolute path. -- -- 2. Independent evolution from the GHC API. -- -- To save space, the 'Location' type is kept abstract and uses special -- cases for notes that span only one line or are only one character wide. -- Use 'mkLocation' and 'viewLoc' as well as the respective accessor -- functions to construct and destruct nodes. -- -- If no reasonable can be given, use the 'mkNoLoc' function, but be careful -- not to call 'viewLoc' or any other accessor function on such a -- 'Location'. -- data Location = LocOneLine { locSource :: LocSource, locLine :: {-# UNPACK #-} !Int, locSCol :: {-# UNPACK #-} !Int, locECol :: {-# UNPACK #-} !Int } | LocMultiLine { locSource :: LocSource, locSLine :: {-# UNPACK #-} !Int, locELine :: {-# UNPACK #-} !Int, locSCol :: {-# UNPACK #-} !Int, locECol :: {-# UNPACK #-} !Int } | LocPoint { locSource :: LocSource, locLine :: {-# UNPACK #-} !Int, locCol :: {-# UNPACK #-} !Int } | LocNone { noLocText :: String } deriving (Eq, Show) -- | The \"source\" of a location. data LocSource = FileSrc AbsFilePath -- ^ The location refers to a position in a file. | OtherSrc String -- ^ The location refers to something else, e.g., the command line, or -- stdin. deriving (Eq, Ord, Show) instance Ord Location where compare = cmpLoc -- | Construct a source code location from start and end point. -- -- If the start point is after the end point, they are swapped -- automatically. mkLocation :: LocSource -> Int -- ^ start line -> Int -- ^ start column -> Int -- ^ end line -> Int -- ^ end column -> Location mkLocation file l0 c0 l1 c1 | l0 > l1 = mkLocation file l1 c0 l0 c1 | l0 == l1 && c0 > c1 = mkLocation file l0 c1 l1 c0 | l0 == l1 = if c0 == c1 then LocPoint file l0 c0 else LocOneLine file l0 c0 c1 | otherwise = LocMultiLine file l0 l1 c0 c1 -- | Construct a source location that does not specify a region. The -- argument can be used to give some hint as to why there is no location -- available. (E.g., \"File not found\"). mkNoLoc :: String -> Location mkNoLoc msg = LocNone msg -- | Test whether a location is valid, i.e., not constructed with 'mkNoLoc'. isValidLoc :: Location -> Bool isValidLoc (LocNone _) = False isValidLoc _ = True noLocError :: String -> a noLocError f = error $ f ++ ": argument must not be a noLoc" -- | Return the start column. Only defined on valid locations. locStartCol :: Location -> Int locStartCol l@LocPoint{} = locCol l locStartCol LocNone{} = noLocError "locStartCol" locStartCol l = locSCol l -- | Return the end column. Only defined on valid locations. locEndCol :: Location -> Int locEndCol l@LocPoint{} = locCol l locEndCol LocNone{} = noLocError "locEndCol" locEndCol l = locECol l -- | Return the start line. Only defined on valid locations. locStartLine :: Location -> Int locStartLine l@LocMultiLine{} = locSLine l locStartLine LocNone{} = noLocError "locStartLine" locStartLine l = locLine l -- | Return the end line. Only defined on valid locations. locEndLine :: Location -> Int locEndLine l@LocMultiLine{} = locELine l locEndLine LocNone{} = noLocError "locEndLine" locEndLine l = locLine l {-# INLINE viewLoc #-} -- | View on a (valid) location. -- -- It holds the property: -- -- > prop_viewLoc_mkLoc s l0 c0 l1 c1 = -- > viewLoc (mkLocation s l0 c0 l1 c1) == (s, l0, c0, l1, c1) -- viewLoc :: Location -> (LocSource, Int, Int, Int, Int) -- ^ source, start line, start column, end line, end column. viewLoc l = (locSource l, locStartLine l, locStartCol l, locEndLine l, locEndCol l) -- | Comparison function for two 'Location's. cmpLoc :: Location -> Location -> Ordering cmpLoc LocNone{} _ = LT cmpLoc _ LocNone{} = GT cmpLoc l1 l2 = (f1 `compare` f2) `thenCmp` (sl1 `compare` sl2) `thenCmp` (sc1 `compare` sc2) `thenCmp` (el1 `compare` el2) `thenCmp` (ec1 `compare` ec2) where (f1, sl1, sc1, el1, ec1) = viewLoc l1 (f2, sl2, sc2, el2, ec2) = viewLoc l2 -- | Lexicographic composition two orderings. Compare using the first -- ordering, use the second to break ties. thenCmp :: Ordering -> Ordering -> Ordering thenCmp EQ x = x thenCmp x _ = x {-# INLINE thenCmp #-} -- * Converting from GHC types. -- | Convert a 'GHC.SrcSpan' to a 'Location'. -- -- The first argument is used to normalise relative source locations to an -- absolute file path. ghcSpanToLocation :: FilePath -- ^ Base directory -> GHC.SrcSpan -> Location ghcSpanToLocation baseDir sp | GHC.isGoodSrcSpan sp = mkLocation mkLocFile (GHC.srcSpanStartLine sp) (GHC.srcSpanStartCol sp) (GHC.srcSpanEndLine sp) (GHC.srcSpanEndCol sp) | otherwise = mkNoLoc (GHC.showSDoc (GHC.ppr sp)) where mkLocFile = case GHC.unpackFS (GHC.srcSpanFile sp) of s@('<':_) -> OtherSrc s p -> FileSrc $ mkAbsFilePath baseDir p ghcErrMsgToNote :: FilePath -> GHC.ErrMsg -> Note ghcErrMsgToNote = ghcMsgToNote ErrorNote ghcWarnMsgToNote :: FilePath -> GHC.WarnMsg -> Note ghcWarnMsgToNote = ghcMsgToNote WarningNote -- Note that we don *not* include the extra info, since that information is -- only useful in the case where we don not show the error location directly -- in the source. ghcMsgToNote :: NoteKind -> FilePath -> GHC.ErrMsg -> Note ghcMsgToNote note_kind base_dir msg = Note { noteLoc = ghcSpanToLocation base_dir loc , noteKind = note_kind , noteMessage = show_msg (GHC.errMsgShortDoc msg) } where loc | (s:_) <- GHC.errMsgSpans msg = s | otherwise = GHC.noSrcSpan unqual = GHC.errMsgContext msg show_msg = GHC.showSDocForUser unqual -- | Convert 'GHC.Messages' to 'Notes'. -- -- This will mix warnings and errors, but you can split them back up -- by filtering the 'Notes' based on the 'noteKind'. ghcMessagesToNotes :: FilePath -- ^ Base path for normalising paths. -- See 'mkAbsFilePath'. -> GHC.Messages -> Notes ghcMessagesToNotes base_dir (warns, errs) = MS.union (map_bag2ms (ghcWarnMsgToNote base_dir) warns) (map_bag2ms (ghcErrMsgToNote base_dir) errs) where map_bag2ms f = MS.fromList . map f . Bag.bagToList
nominolo/scion
lib/Scion/Types/Notes.hs
bsd-3-clause
8,890
0
12
2,118
1,764
1,003
761
155
2
-------------------------------------------------------------------------------- -- -- Copyright (c) 2011 - 2014 Tad Doxsee -- All rights reserved. -- -- Author: Tad Doxsee -- -------------------------------------------------------------------------------- module Main where import RegTesterLib (CmdMaker, regTesterMain) main :: IO () main = regTesterMain mkCmd mkCmd :: CmdMaker mkCmd program auxDir stdInDir testOutDir _ = program ++ " " ++ auxDir ++ " " ++ stdInDir ++ " " ++ testOutDir
tdox/regTester
src/regTester2.hs
bsd-3-clause
500
0
10
72
86
50
36
7
1
{-# LANGUAGE OverloadedStrings #-} -- Module : Network.AWS.Data.Internal.JSON -- Copyright : (c) 2013-2015 Brendan Hay <brendan.g.hay@gmail.com> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : experimental -- Portability : non-portable (GHC extensions) module Network.AWS.Data.Internal.JSON ( -- * FromJSON FromJSON (..) , parseJSONText , eitherDecode' -- ** Parser a , withObject , (.:) , (.:?) , (.!=) -- ** Either String a , (.:>) , (.:?>) -- * ToJSON , ToJSON (..) , toJSONText , Object , object , (.=) ) where import Data.Aeson (eitherDecode') import Data.Aeson.Types import qualified Data.HashMap.Strict as Map import Data.Text (Text) import Network.AWS.Data.Internal.Text parseJSONText :: FromText a => String -> Value -> Parser a parseJSONText n = withText n (either fail return . fromText) toJSONText :: ToText a => a -> Value toJSONText = String . toText (.:>) :: FromJSON a => Object -> Text -> Either String a (.:>) o k = case Map.lookup k o of Nothing -> Left $ "key " ++ show k ++ " not present" Just v -> parseEither parseJSON v (.:?>) :: FromJSON a => Object -> Text -> Either String (Maybe a) (.:?>) o k = case Map.lookup k o of Nothing -> Right Nothing Just v -> parseEither parseJSON v
romanb/amazonka
core/src/Network/AWS/Data/Internal/JSON.hs
mpl-2.0
1,719
0
10
525
367
214
153
36
2
{-# LANGUAGE RecordWildCards #-} -- Chi square tests for random generators module MWC.ChiSquare ( tests ) where import Control.Applicative import Control.Monad import Data.Typeable import Data.Word import Data.List (find) import qualified Data.Vector.Unboxed as U import qualified Data.Vector.Unboxed.Mutable as M import qualified System.Random.MWC as MWC import qualified System.Random.MWC.Distributions as MWC import qualified System.Random.MWC.CondensedTable as MWC import Statistics.Types import Statistics.Test.ChiSquared import Statistics.Distribution import Statistics.Distribution.Poisson import Statistics.Distribution.Binomial import Statistics.Distribution.Geometric import Test.HUnit hiding (Test) import Test.Framework import Test.Framework.Providers.HUnit ---------------------------------------------------------------- tests :: MWC.GenIO -> Test.Framework.Test tests g = testGroup "Chi squared tests" -- Word8 tests [ uniformRTest (0,255 :: Word8) g , uniformRTest (0,254 :: Word8) g , uniformRTest (0,129 :: Word8) g , uniformRTest (0,126 :: Word8) g , uniformRTest (0,10 :: Word8) g -- * Tables , ctableTest [1] g , ctableTest [0.5, 0.5] g , ctableTest [0.25, 0.25, 0.25, 0.25] g , ctableTest [0.25, 0.5, 0.25] g , ctableTest [1/3 , 1/3, 1/3] g , ctableTest [0.1, 0.9] g , ctableTest (replicate 10 0.1) g -- ** Poisson , poissonTest 0.2 g , poissonTest 1.32 g , poissonTest 6.8 g , poissonTest 100 g -- ** Binomial , binomialTest 4 0.5 g , binomialTest 10 0.1 g , binomialTest 10 0.6 g , binomialTest 10 0.8 g , binomialTest 100 0.3 g -- ** Geometric , geometricTest 0.1 g , geometricTest 0.5 g , geometricTest 0.9 g ] ---------------------------------------------------------------- -- | RNG and corresonding distribution data Generator = Generator { generator :: MWC.GenIO -> IO Int , probabilites :: U.Vector Double } -- | Apply chi square test for a distribution sampleTest :: String -- ^ Name of test -> Generator -- ^ Generator to test -> Int -- ^ N of events -> MWC.GenIO -- ^ PRNG state -> Test.Framework.Test sampleTest nm (Generator{..}) n g = testCase nm $ do let size = U.length $ probabilites h <- histogram (generator g) size n let w = U.map (* fromIntegral n) probabilites Just t = chi2test 0 $ U.zip h w case isSignificant (mkPValue 0.01) t of Significant -> assertFailure ("Significant: " ++ show t) NotSignificant -> return () {-# INLINE sampleTest #-} -- | Fill histogram using supplied generator histogram :: IO Int -- ^ Rangom generator -> Int -- ^ N of outcomes -> Int -- ^ N of events -> IO (U.Vector Int) histogram gen size n = do arr <- M.replicate size 0 replicateM_ n $ do i <- gen when (i < size) $ M.write arr i . (+1) =<< M.read arr i U.unsafeFreeze arr {-# INLINE histogram #-} -- | Test uniformR uniformRTest :: (MWC.Variate a, Typeable a, Show a, Integral a) => (a,a) -> MWC.GenIO -> Test.Framework.Test uniformRTest (a,b) = sampleTest ("uniformR: " ++ show (a,b) ++ " :: " ++ show (typeOf a)) gen (10^5) where n = fromIntegral b - fromIntegral a + 1 gen = Generator { generator = \g -> fromIntegral . subtract a <$> MWC.uniformR (a,b) g , probabilites = U.replicate n (1 / fromIntegral n) } {-# INLINE uniformRTest #-} -- | Test for condensed tables ctableTest :: [Double] -> MWC.GenIO -> Test.Framework.Test ctableTest ps = sampleTest ("condensedTable: " ++ show ps) gen (10^4) where gen = Generator { generator = MWC.genFromTable $ MWC.tableFromProbabilities $ U.fromList $ zip [0..] ps , probabilites = U.fromList ps } -- | Test for condensed table for poissson distribution poissonTest :: Double -> MWC.GenIO -> Test.Framework.Test poissonTest lam = sampleTest ("poissonTest: " ++ show lam) gen (10^4) where pois = poisson lam Just nMax = find (\n -> probability pois n < 2**(-33)) [floor lam ..] gen = Generator { generator = MWC.genFromTable (MWC.tablePoisson lam) , probabilites = U.generate nMax (probability pois) } -- | Test for condensed table for binomial distribution binomialTest :: Int -> Double -> MWC.GenIO -> Test.Framework.Test binomialTest n p = sampleTest ("binomialTest: " ++ show p ++ " " ++ show n) gen (10^4) where binom = binomial n p gen = Generator { generator = MWC.genFromTable (MWC.tableBinomial n p) , probabilites = U.generate (n+1) (probability binom) } -- | Test for geometric distribution geometricTest :: Double -> MWC.GenIO -> Test.Framework.Test geometricTest gd = sampleTest ("geometricTest: " ++ show gd) gen (10^4) where n = 1000 gen = Generator { generator = MWC.geometric1 gd , probabilites = U.generate (n+1) (probability $ geometric gd) }
bos/mwc-random
mwc-random-bench/test/MWC/ChiSquare.hs
bsd-2-clause
5,175
0
15
1,353
1,552
843
709
110
2
{-# LANGUAGE TypeOperators, GADTs, KindSignatures, ConstraintKinds #-} {-# LANGUAGE FlexibleContexts, PatternGuards, ViewPatterns, ScopedTypeVariables #-} {-# LANGUAGE CPP #-} {-# OPTIONS_GHC -Wall #-} -- {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP -- {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP ---------------------------------------------------------------------- -- | -- Module : LambdaCCC.ToCircuit -- Copyright : (c) 2013 Tabula, Inc. -- LICENSE : BSD3 -- -- Maintainer : conal@tabula.com -- Stability : experimental -- -- Convert from CCC form to a circuit ---------------------------------------------------------------------- module LambdaCCC.ToCircuit ( expToCircuit, cccToCircuit ) where import Prelude hiding (id,(.),curry,uncurry) import Data.Constraint (Dict(..)) import Circat.Prim hiding (xor) import LambdaCCC.CCC import LambdaCCC.Lambda (E) import LambdaCCC.ToCCC (toCCC) import Circat.Circuit import Circat.Category import Circat.Classes expToCircuit :: E Prim (a -> b) -> (a :> b) expToCircuit = cccToCircuit . toCCC #define TS (tyPSource -> Dict) #define CP (cccPS -> (Dict, Dict)) #define TC (tyHasCond -> Dict) #define LS (litSS -> Dict) cccToCircuit :: (a :-> b) -> (a :> b) -- Category cccToCircuit Id = id cccToCircuit (g :. f) = cccToCircuit g . cccToCircuit f -- Primitives cccToCircuit (Prim p) = primToSource p cccToCircuit (Lit l@LS) = constC (eval l) -- Product cccToCircuit Exl = exl cccToCircuit Exr = exr cccToCircuit (f :&&& g) = cccToCircuit f &&& cccToCircuit g -- Coproduct cccToCircuit Inl = inl cccToCircuit Inr = inr -- cccToCircuit k@(f :||| g) = cccToCircuit f |||* cccToCircuit g -- Exponential cccToCircuit Apply = apply cccToCircuit (Curry h) = curry (cccToCircuit h) cccToCircuit (Uncurry h) = uncurry (cccToCircuit h) cccToCircuit ccc = error $ "cccToCircuit: not yet handled: " ++ show ccc #define TH (tyHasTy -> HasTy) -- TODO: I don't know whether to keep add. We'll probably want to build it from -- simpler pieces. -- -- TODO: Maybe implement all primitives (other than exl & exr) with namedC. I -- could even use this PrimC type in circat, though it'd be the first dependency -- of circat on lambda-ccc. {-------------------------------------------------------------------- Prim conversion --------------------------------------------------------------------} primToSource :: Prim t -> Pins t primToSource NotP = not primToSource AndP = curry and primToSource OrP = curry or primToSource XorP = curry xor primToSource ExlP = exl primToSource ExrP = exr primToSource PairP = curry id primToSource InlP = inl primToSource InrP = inr -- primToSource CondP = condC -- primToSource AddP = curry (namedC "add") primToSource p = error $ "primToSource: not yet handled: " ++ show p #if 0 -- Prove that IsSource (Pins a), IsSource (Pins b) cccPS :: (a :-> b) -> (PSourceJt a, PSourceJt b) cccPS = tyPSource2 . cccTys {-------------------------------------------------------------------- Proofs --------------------------------------------------------------------} type PSourceJt a = Dict (IsSourceP a) -- | Proof of @'IsSource' ('Pins' a)@ from @'Ty' a@ tyPSource :: Ty a -> PSourceJt a tyPSource Unit = Dict tyPSource Bool = Dict tyPSource (TS :* TS) = Dict -- still needed? tyPSource ty = error $ "tyPSource: Oops -- not yet handling " ++ show ty -- That product case gets used for my CRC example when I turn off the -- xor/constant rewrite rules. tyPSource2 :: (Ty a,Ty b) -> (PSourceJt a, PSourceJt b) tyPSource2 (a,b) = (tyPSource a,tyPSource b) -- tyPSource2 = tyPSource *** tyPSource -- | Proof of @'HasCond t@ from @'Ty' t@ tyHasCond :: Ty t -> Dict (HasCond t) tyHasCond Unit = Dict tyHasCond Bool = Dict tyHasCond (TC :* TC) = Dict tyHasCond (_ :+ _ ) = Dict tyHasCond (_ :=> TC) = Dict tyHasCond Int = error "tyHasCond: Int not yet handled." #endif
conal/lambda-ccc
src/LambdaCCC/Unused/ToCircuit.hs
bsd-3-clause
4,036
0
8
785
800
438
362
42
1
{-# LANGUAGE TypeFamilies, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, GADTs, ScopedTypeVariables, FunctionalDependencies, RecursiveDo, UndecidableInstances, GeneralizedNewtypeDeriving, StandaloneDeriving, EmptyDataDecls, NoMonomorphismRestriction, TypeOperators, DeriveDataTypeable, PackageImports, TemplateHaskell, LambdaCase, DataKinds, PolyKinds #-} module Reflex.Dynamic ( Dynamic -- Abstract so we can preserve the law that the current value is always equal to the most recent update , current , updated , constDyn , holdDyn , nubDyn , count , toggle , switchPromptlyDyn , tagDyn , attachDyn , attachDynWith , attachDynWithMaybe , mapDyn , forDyn , mapDynM , foldDyn , foldDynM , foldDynMaybe , foldDynMaybeM , combineDyn , collectDyn , mconcatDyn , distributeDMapOverDyn , joinDyn , joinDynThroughMap , traceDyn , traceDynWith , splitDyn , Demux , demux , getDemuxed -- Things that probably aren't very useful: , HList (..) , FHList (..) , distributeFHListOverDyn -- Unsafe , unsafeDynamic ) where import Prelude hiding (mapM, mapM_) import Reflex.Class import Data.Functor.Misc import Control.Monad hiding (mapM, mapM_, forM, forM_) import Control.Monad.Fix import Control.Monad.Identity hiding (mapM, mapM_, forM, forM_) import Data.These import Data.Traversable (mapM, forM) import Data.Align import Data.Map (Map) import qualified Data.Map as Map import Data.Dependent.Map (DMap) import qualified Data.Dependent.Map as DMap import Data.Dependent.Sum (DSum (..)) import Data.GADT.Compare (GCompare (..), GEq (..), (:~:) (..), GOrdering (..)) import Data.Monoid --import Data.HList (HList (..), hBuild) data HList (l::[*]) where HNil :: HList '[] HCons :: e -> HList l -> HList (e ': l) infixr 2 `HCons` type family HRevApp (l1 :: [k]) (l2 :: [k]) :: [k] type instance HRevApp '[] l = l type instance HRevApp (e ': l) l' = HRevApp l (e ': l') hRevApp :: HList l1 -> HList l2 -> HList (HRevApp l1 l2) hRevApp HNil l = l hRevApp (HCons x l) l' = hRevApp l (HCons x l') hReverse :: HList l -> HList (HRevApp l '[]) hReverse l = hRevApp l HNil hBuild :: (HBuild' '[] r) => r hBuild = hBuild' HNil class HBuild' l r where hBuild' :: HList l -> r instance (l' ~ HRevApp l '[]) => HBuild' l (HList l') where hBuild' l = hReverse l instance HBuild' (a ': l) r => HBuild' l (a->r) where hBuild' l x = hBuild' (HCons x l) -- | A container for a value that can change over time and allows notifications on changes. -- Basically a combination of a 'Behavior' and an 'Event', with a rule that the Behavior will -- change if and only if the Event fires. data Dynamic t a = Dynamic (Behavior t a) (Event t a) unsafeDynamic :: Behavior t a -> Event t a -> Dynamic t a unsafeDynamic = Dynamic -- | Extract the 'Behavior' of a 'Dynamic'. current :: Dynamic t a -> Behavior t a current (Dynamic b _) = b -- | Extract the 'Event' of the 'Dynamic'. updated :: Dynamic t a -> Event t a updated (Dynamic _ e) = e -- | 'Dynamic' with the constant supplied value. constDyn :: Reflex t => a -> Dynamic t a constDyn x = Dynamic (constant x) never -- | Create a 'Dynamic' using the initial value that changes every -- time the 'Event' occurs. holdDyn :: MonadHold t m => a -> Event t a -> m (Dynamic t a) holdDyn v0 e = do b <- hold v0 e return $ Dynamic b e -- | Create a new 'Dynamic' that only signals changes if the values -- actually changed. nubDyn :: (Reflex t, Eq a) => Dynamic t a -> Dynamic t a nubDyn d = let e' = attachWithMaybe (\x x' -> if x' == x then Nothing else Just x') (current d) (updated d) in Dynamic (current d) e' --TODO: Avoid invalidating the outgoing Behavior {- instance Reflex t => Functor (Dynamic t) where fmap f d = let e' = fmap f $ updated d eb' = push (\b' -> liftM Just $ constant b') e' b0 = fmap f $ current d -} -- | Map a function over a 'Dynamic'. mapDyn :: (Reflex t, MonadHold t m) => (a -> b) -> Dynamic t a -> m (Dynamic t b) mapDyn f = mapDynM $ return . f -- | Flipped version of 'mapDyn'. forDyn :: (Reflex t, MonadHold t m) => Dynamic t a -> (a -> b) -> m (Dynamic t b) forDyn = flip mapDyn -- | Map a monadic function over a 'Dynamic'. The only monadic action that the given function can -- perform is 'sample'. {-# INLINE mapDynM #-} mapDynM :: forall t m a b. (Reflex t, MonadHold t m) => (forall m'. MonadSample t m' => a -> m' b) -> Dynamic t a -> m (Dynamic t b) mapDynM f d = do let e' = push (liftM Just . f :: a -> PushM t (Maybe b)) $ updated d eb' = fmap constant e' v0 = pull $ f =<< sample (current d) bb' :: Behavior t (Behavior t b) <- hold v0 eb' let b' = pull $ sample =<< sample bb' return $ Dynamic b' e' -- | Create a 'Dynamic' using the initial value and change it each -- time the 'Event' occurs using a folding function on the previous -- value and the value of the 'Event'. foldDyn :: (Reflex t, MonadHold t m, MonadFix m) => (a -> b -> b) -> b -> Event t a -> m (Dynamic t b) foldDyn f = foldDynMaybe $ \o v -> Just $ f o v -- | Create a 'Dynamic' using the initial value and change it each -- time the 'Event' occurs using a monadic folding function on the -- previous value and the value of the 'Event'. foldDynM :: (Reflex t, MonadHold t m, MonadFix m) => (a -> b -> PushM t b) -> b -> Event t a -> m (Dynamic t b) foldDynM f = foldDynMaybeM $ \o v -> liftM Just $ f o v foldDynMaybe :: (Reflex t, MonadHold t m, MonadFix m) => (a -> b -> Maybe b) -> b -> Event t a -> m (Dynamic t b) foldDynMaybe f = foldDynMaybeM $ \o v -> return $ f o v foldDynMaybeM :: (Reflex t, MonadHold t m, MonadFix m) => (a -> b -> PushM t (Maybe b)) -> b -> Event t a -> m (Dynamic t b) foldDynMaybeM f z e = do rec let e' = flip push e $ \o -> do v <- sample b' f o v b' <- hold z e' return $ Dynamic b' e' -- | Create a new 'Dynamic' that counts the occurences of the 'Event'. count :: (Reflex t, MonadHold t m, MonadFix m, Num b) => Event t a -> m (Dynamic t b) count e = holdDyn 0 =<< zipListWithEvent const (iterate (+1) 1) e -- | Create a new 'Dynamic' using the initial value that flips its -- value every time the 'Event' occurs. toggle :: (Reflex t, MonadHold t m, MonadFix m) => Bool -> Event t a -> m (Dynamic t Bool) toggle = foldDyn (const not) -- | Switches to the new 'Event' whenever it receives one. Switching -- occurs *before* the inner 'Event' fires - so if the 'Dynamic' changes and both the old and new -- inner Events fire simultaneously, the output will fire with the value of the *new* 'Event'. switchPromptlyDyn :: forall t a. Reflex t => Dynamic t (Event t a) -> Event t a switchPromptlyDyn de = let eLag = switch $ current de eCoincidences = coincidence $ updated de in leftmost [eCoincidences, eLag] {- mergeEventsWith :: Reflex t m => (a -> a -> a) -> Event t a -> Event t a -> m (Event t a) mergeEventsWith f ea eb = mapE (mergeThese f) =<< alignEvents ea eb firstE :: (Reflex t m) => [Event t a] -> m (Event t a) firstE [] = return never firstE (h:t) = mergeEventsLeftBiased h =<< firstE t concatEventsWith :: (Reflex t m) => (a -> a -> a) -> [Event t a] -> m (Event t a) concatEventsWith _ [] = return never concatEventsWith _ [e] = return e concatEventsWith f es = mapEM (liftM (foldl1 f . map (\(Const2 _ :=> v) -> v) . DMap.toList) . sequenceDmap) <=< mergeEventDMap $ DMap.fromList $ map (\(k, v) -> WrapArg (Const2 k) :=> v) $ zip [0 :: Int ..] es --concatEventsWith f (h:t) = mergeEventsWith f h =<< concatEventsWith f t mconcatE :: (Reflex t m, Monoid a) => [Event t a] -> m (Event t a) mconcatE = concatEventsWith mappend -} -- | Split the 'Dynamic' into two 'Dynamic's, each taking the -- respective value of the tuple. splitDyn :: (Reflex t, MonadHold t m) => Dynamic t (a, b) -> m (Dynamic t a, Dynamic t b) splitDyn d = liftM2 (,) (mapDyn fst d) (mapDyn snd d) -- | Merge the 'Dynamic' values using their 'Monoid' instance. mconcatDyn :: forall t m a. (Reflex t, MonadHold t m, Monoid a) => [Dynamic t a] -> m (Dynamic t a) mconcatDyn es = do ddm :: Dynamic t (DMap (Const2 Int a)) <- distributeDMapOverDyn $ DMap.fromList $ map (\(k, v) -> WrapArg (Const2 k) :=> v) $ zip [0 :: Int ..] es mapDyn (mconcat . map (\(Const2 _ :=> v) -> v) . DMap.toList) ddm -- | Create a 'Dynamic' with a 'DMap' of values out of a 'DMap' of -- Dynamic values. distributeDMapOverDyn :: forall t m k. (Reflex t, MonadHold t m, GCompare k) => DMap (WrapArg (Dynamic t) k) -> m (Dynamic t (DMap k)) distributeDMapOverDyn dm = case DMap.toList dm of [] -> return $ constDyn DMap.empty [WrapArg k :=> v] -> mapDyn (DMap.singleton k) v _ -> do let edmPre = merge $ rewrapDMap updated dm edm :: Event t (DMap k) = flip push edmPre $ \o -> return . Just =<< do let f _ = \case This origDyn -> sample $ current origDyn That _ -> error "distributeDMapOverDyn: should be impossible to have an event occurring that is not present in the original DMap" These _ (Identity newVal) -> return newVal sequenceDmap $ combineDMapsWithKey f dm (wrapDMap Identity o) dm0 :: Behavior t (DMap k) = pull $ do liftM DMap.fromList $ forM (DMap.toList dm) $ \(WrapArg k :=> dv) -> liftM (k :=>) $ sample $ current dv bbdm :: Behavior t (Behavior t (DMap k)) <- hold dm0 $ fmap constant edm let bdm = pull $ sample =<< sample bbdm return $ Dynamic bdm edm -- | Merge two 'Dynamic's into a new one using the provided -- function. The new 'Dynamic' changes its value each time one of the -- original 'Dynamic's changes its value. combineDyn :: forall t m a b c. (Reflex t, MonadHold t m) => (a -> b -> c) -> Dynamic t a -> Dynamic t b -> m (Dynamic t c) combineDyn f da db = do let eab = align (updated da) (updated db) ec = flip push eab $ \o -> do (a, b) <- case o of This a -> do b <- sample $ current db return (a, b) That b -> do a <- sample $ current da return (a, b) These a b -> return (a, b) return $ Just $ f a b c0 :: Behavior t c = pull $ liftM2 f (sample $ current da) (sample $ current db) bbc :: Behavior t (Behavior t c) <- hold c0 $ fmap constant ec let bc :: Behavior t c = pull $ sample =<< sample bbc return $ Dynamic bc ec {- tagInnerDyn :: Reflex t => Event t (Dynamic t a) -> Event t a tagInnerDyn e = let eSlow = push (liftM Just . sample . current) e eFast = coincidence $ fmap updated e in leftmost [eFast, eSlow] -} -- | Join a nested 'Dynamic' into a new 'Dynamic' that has the value -- of the inner 'Dynamic'. joinDyn :: forall t a. (Reflex t) => Dynamic t (Dynamic t a) -> Dynamic t a joinDyn dd = let b' = pull $ sample . current =<< sample (current dd) eOuter :: Event t a = pushAlways (sample . current) $ updated dd eInner :: Event t a = switch $ fmap updated (current dd) eBoth :: Event t a = coincidence $ fmap updated (updated dd) e' = leftmost [eBoth, eOuter, eInner] in Dynamic b' e' --TODO: Generalize this to functors other than Maps -- | Combine a 'Dynamic' of a 'Map' of 'Dynamic's into a 'Dynamic' -- with the current values of the 'Dynamic's in a map. joinDynThroughMap :: forall t k a. (Reflex t, Ord k) => Dynamic t (Map k (Dynamic t a)) -> Dynamic t (Map k a) joinDynThroughMap dd = let b' = pull $ mapM (sample . current) =<< sample (current dd) eOuter :: Event t (Map k a) = pushAlways (mapM (sample . current)) $ updated dd eInner :: Event t (Map k a) = attachWith (flip Map.union) b' $ switch $ fmap (mergeMap . fmap updated) (current dd) --Note: the flip is important because Map.union is left-biased readNonFiring :: MonadSample t m => These (Dynamic t a) a -> m a readNonFiring = \case This d -> sample $ current d That a -> return a These _ a -> return a eBoth :: Event t (Map k a) = coincidence $ fmap (\m -> pushAlways (mapM readNonFiring . align m) $ mergeMap $ fmap updated m) (updated dd) e' = leftmost [eBoth, eOuter, eInner] in Dynamic b' e' -- | Print the value of the 'Dynamic' on each change and prefix it -- with the provided string. This should /only/ be used for debugging. -- -- Note: Just like Debug.Trace.trace, the value will only be shown if something -- else in the system is depending on it. traceDyn :: (Reflex t, Show a) => String -> Dynamic t a -> Dynamic t a traceDyn s = traceDynWith $ \x -> s <> ": " <> show x -- | Print the result of applying the provided function to the value -- of the 'Dynamic' on each change. This should /only/ be used for -- debugging. -- -- Note: Just like Debug.Trace.trace, the value will only be shown if something -- else in the system is depending on it. traceDynWith :: Reflex t => (a -> String) -> Dynamic t a -> Dynamic t a traceDynWith f d = let e' = traceEventWith f $ updated d in Dynamic (current d) e' -- | Replace the value of the 'Event' with the current value of the 'Dynamic' -- each time the 'Event' occurs. -- -- Note: `tagDyn d e` differs from `tag (current d) e` in the case that `e` is firing -- at the same time that `d` is changing. With `tagDyn d e`, the *new* value of `d` -- will replace the value of `e`, whereas with `tag (current d) e`, the *old* value -- will be used, since the 'Behavior' won't be updated until the end of the frame. -- Additionally, this means that the output 'Event' may not be used to directly change -- the input 'Dynamic', because that would mean its value depends on itself. When creating -- cyclic data flows, generally `tag (current d) e` is preferred. tagDyn :: Reflex t => Dynamic t a -> Event t b -> Event t a tagDyn = attachDynWith const -- | Attach the current value of the 'Dynamic' to the value of the -- 'Event' each time it occurs. -- -- Note: `attachDyn d` is not the same as `attach (current d)`. See 'tagDyn' for details. attachDyn :: Reflex t => Dynamic t a -> Event t b -> Event t (a, b) attachDyn = attachDynWith (,) -- | Combine the current value of the 'Dynamic' with the value of the -- 'Event' each time it occurs. -- -- Note: `attachDynWith f d` is not the same as `attachWith f (current d)`. See 'tagDyn' for details. attachDynWith :: Reflex t => (a -> b -> c) -> Dynamic t a -> Event t b -> Event t c attachDynWith f = attachDynWithMaybe $ \a b -> Just $ f a b -- | Create a new 'Event' by combining the value at each occurence -- with the current value of the 'Dynamic' value and possibly -- filtering if the combining function returns 'Nothing'. -- -- Note: `attachDynWithMaybe f d` is not the same as `attachWithMaybe f (current d)`. See 'tagDyn' for details. attachDynWithMaybe :: Reflex t => (a -> b -> Maybe c) -> Dynamic t a -> Event t b -> Event t c attachDynWithMaybe f d e = let e' = attach (current d) e in fforMaybe (align e' $ updated d) $ \case This (a, b) -> f a b -- Only the tagging event is firing, so use that These (_, b) a -> f a b -- Both events are firing, so use the newer value That _ -> Nothing -- The tagging event isn't firing, so don't fire -------------------------------------------------------------------------------- -- Demux -------------------------------------------------------------------------------- -- | Represents a time changing value together with an 'EventSelector' -- that can efficiently detect when the underlying Dynamic has a particular value. -- This is useful for representing data like the current selection of a long list. -- -- Semantically, -- > getDemuxed (demux d) k === mapDyn (== k) d -- However, the when getDemuxed is used multiple times, the complexity is only /O(log(n))/, -- rather than /O(n)/ for mapDyn. data Demux t k = Demux { demuxValue :: Behavior t k , demuxSelector :: EventSelector t (Const2 k Bool) } -- | Demultiplex an input value to a 'Demux' with many outputs. At any given time, whichever output is indicated by the given 'Dynamic' will be 'True'. demux :: (Reflex t, Ord k) => Dynamic t k -> Demux t k demux k = Demux (current k) (fan $ attachWith (\k0 k1 -> if k0 == k1 then DMap.empty else DMap.fromList [Const2 k0 :=> False, Const2 k1 :=> True]) (current k) (updated k)) --TODO: The pattern of using hold (sample b0) can be reused in various places as a safe way of building certain kinds of Dynamics; see if we can factor this out -- | Select a particular output of the 'Demux'; this is equivalent to (but much faster than) -- mapping over the original 'Dynamic' and checking whether it is equal to the given key. getDemuxed :: (Reflex t, MonadHold t m, Eq k) => Demux t k -> k -> m (Dynamic t Bool) getDemuxed d k = do let e = select (demuxSelector d) (Const2 k) bb <- hold (liftM (==k) $ sample $ demuxValue d) $ fmap return e let b = pull $ join $ sample bb return $ Dynamic b e -------------------------------------------------------------------------------- -- collectDyn -------------------------------------------------------------------------------- --TODO: This whole section is badly in need of cleanup data FHList f l where FHNil :: FHList f '[] FHCons :: f e -> FHList f l -> FHList f (e ': l) instance GEq (HListPtr l) where HHeadPtr `geq` HHeadPtr = Just Refl HHeadPtr `geq` HTailPtr _ = Nothing HTailPtr _ `geq` HHeadPtr = Nothing HTailPtr a `geq` HTailPtr b = a `geq` b instance GCompare (HListPtr l) where -- Warning: This ordering can't change, dmapTo*HList will break HHeadPtr `gcompare` HHeadPtr = GEQ HHeadPtr `gcompare` HTailPtr _ = GLT HTailPtr _ `gcompare` HHeadPtr = GGT HTailPtr a `gcompare` HTailPtr b = a `gcompare` b data HListPtr l a where HHeadPtr :: HListPtr (h ': t) h HTailPtr :: HListPtr t a -> HListPtr (h ': t) a fhlistToDMap :: forall f l. FHList f l -> DMap (WrapArg f (HListPtr l)) fhlistToDMap = DMap.fromList . go where go :: forall l'. FHList f l' -> [DSum (WrapArg f (HListPtr l'))] go = \case FHNil -> [] FHCons h t -> (WrapArg HHeadPtr :=> h) : map (\(WrapArg p :=> v) -> WrapArg (HTailPtr p) :=> v) (go t) class RebuildSortedHList l where rebuildSortedFHList :: [DSum (WrapArg f (HListPtr l))] -> FHList f l rebuildSortedHList :: [DSum (HListPtr l)] -> HList l instance RebuildSortedHList '[] where rebuildSortedFHList l = case l of [] -> FHNil _ : _ -> error "rebuildSortedFHList{'[]}: empty list expected" rebuildSortedHList l = case l of [] -> HNil _ : _ -> error "rebuildSortedHList{'[]}: empty list expected" instance RebuildSortedHList t => RebuildSortedHList (h ': t) where rebuildSortedFHList l = case l of ((WrapArg HHeadPtr :=> h) : t) -> FHCons h $ rebuildSortedFHList $ map (\(WrapArg (HTailPtr p) :=> v) -> WrapArg p :=> v) t _ -> error "rebuildSortedFHList{h':t}: non-empty list with HHeadPtr expected" rebuildSortedHList l = case l of ((HHeadPtr :=> h) : t) -> HCons h $ rebuildSortedHList $ map (\(HTailPtr p :=> v) -> p :=> v) t _ -> error "rebuildSortedHList{h':t}: non-empty list with HHeadPtr expected" dmapToHList :: forall l. RebuildSortedHList l => DMap (HListPtr l) -> HList l dmapToHList = rebuildSortedHList . DMap.toList distributeFHListOverDyn :: forall t m l. (Reflex t, MonadHold t m, RebuildSortedHList l) => FHList (Dynamic t) l -> m (Dynamic t (HList l)) distributeFHListOverDyn l = mapDyn dmapToHList =<< distributeDMapOverDyn (fhlistToDMap l) {- distributeFHListOverDyn l = do let ec = undefined c0 = pull $ sequenceFHList $ natMap (sample . current) l bbc <- hold c0 $ fmap constant ec let bc = pull $ sample =<< sample bbc return $ Dynamic bc ec -} class AllAreFunctors (f :: a -> *) (l :: [a]) where type FunctorList f l :: [*] toFHList :: HList (FunctorList f l) -> FHList f l fromFHList :: FHList f l -> HList (FunctorList f l) instance AllAreFunctors f '[] where type FunctorList f '[] = '[] toFHList l = case l of HNil -> FHNil _ -> error "toFHList: impossible" -- Otherwise, GHC complains of a non-exhaustive pattern match; see https://ghc.haskell.org/trac/ghc/ticket/4139 fromFHList FHNil = HNil instance AllAreFunctors f t => AllAreFunctors f (h ': t) where type FunctorList f (h ': t) = f h ': FunctorList f t toFHList l = case l of a `HCons` b -> a `FHCons` toFHList b _ -> error "toFHList: impossible" -- Otherwise, GHC complains of a non-exhaustive pattern match; see https://ghc.haskell.org/trac/ghc/ticket/4139 fromFHList (a `FHCons` b) = a `HCons` fromFHList b collectDyn :: ( RebuildSortedHList (HListElems b) , IsHList a, IsHList b , AllAreFunctors (Dynamic t) (HListElems b) , Reflex t, MonadHold t m , HListElems a ~ FunctorList (Dynamic t) (HListElems b) ) => a -> m (Dynamic t b) collectDyn ds = mapDyn fromHList =<< distributeFHListOverDyn (toFHList $ toHList ds) -- Poor man's Generic class IsHList a where type HListElems a :: [*] toHList :: a -> HList (HListElems a) fromHList :: HList (HListElems a) -> a instance IsHList (a, b) where type HListElems (a, b) = [a, b] toHList (a, b) = hBuild a b fromHList l = case l of a `HCons` b `HCons` HNil -> (a, b) _ -> error "fromHList: impossible" -- Otherwise, GHC complains of a non-exhaustive pattern match; see https://ghc.haskell.org/trac/ghc/ticket/4139 instance IsHList (a, b, c, d) where type HListElems (a, b, c, d) = [a, b, c, d] toHList (a, b, c, d) = hBuild a b c d fromHList l = case l of a `HCons` b `HCons` c `HCons` d `HCons` HNil -> (a, b, c, d) _ -> error "fromHList: impossible" -- Otherwise, GHC complains of a non-exhaustive pattern match; see https://ghc.haskell.org/trac/ghc/ticket/4139 instance IsHList (a, b, c, d, e, f) where type HListElems (a, b, c, d, e, f) = [a, b, c, d, e, f] toHList (a, b, c, d, e, f) = hBuild a b c d e f fromHList l = case l of a `HCons` b `HCons` c `HCons` d `HCons` e `HCons` f `HCons` HNil -> (a, b, c, d, e, f) _ -> error "fromHList: impossible" -- Otherwise, GHC complains of a non-exhaustive pattern match; see https://ghc.haskell.org/trac/ghc/ticket/4139
k0001/reflex
src/Reflex/Dynamic.hs
bsd-3-clause
22,941
0
26
5,743
6,593
3,425
3,168
307
5
import qualified Data.Text.IO as T import qualified Data.Text as T import qualified Data.Text.Lazy.IO as TL import qualified Data.Text.Lazy as TL import Pipes import qualified Pipes.Text as TP import qualified Pipes.ByteString as BP import Pipes.Safe main = textaction big = "../../examples/txt/words2.txt" textaction = T.readFile big >>= T.putStrLn pipeaction = runEffect $ for ((TP.readFile big) >> return ()) (lift . T.putStrLn)
bitemyapp/text-pipes
bench/IO.hs
bsd-3-clause
441
0
11
68
130
81
49
12
1
{-# LANGUAGE GADTs #-} module Syntax.Tree where -------------------------------------------------------------------------------- -- Identifiers -------------------------------------------------------------------------------- type Identifier = String type SnakeId = Identifier type CamelId = Identifier -- Used to refer to variables, functions, structs, etc. type VarName = SnakeId -- Represents how to 'reach' a variable, e.g. ["x", "m_x"] would be the -- variable m_x, via the object x. The code would be: x.m_x type VarPath = [VarName] -------------------------------------------------------------------------------- -- Types -------------------------------------------------------------------------------- -- Types associated with data stored in variables. data DataType = SymType | TapeType | CustomType StructName deriving (Eq, Show) -- Returns whether the data type is a custom type. Says nothing about which -- custom type it is. isCustomType :: DataType -> Bool isCustomType (CustomType _) = True isCustomType _ = False -- Types that have a data type, e.g. Symbol, Tape, Struct, etc. class Typed a where typeOf :: a -> DataType -------------------------------------------------------------------------------- -- Tape Symbols -------------------------------------------------------------------------------- -- Tape symbol, i.e. a symbol contained in a cell of the machine's tape. type TapeSymbol = Char -- Values that evaluate to tape symbols. data SymExpr = Read TapeExpr | SymLit TapeSymbol | SymVar VarPath deriving (Eq, Show) instance Typed SymExpr where typeOf _ = SymType -------------------------------------------------------------------------------- -- Tape -------------------------------------------------------------------------------- -- Values that evaluate to tape references. data TapeExpr = TapeLit String | TapeVar VarPath deriving (Eq, Show) instance Typed TapeExpr where typeOf _ = TapeType -------------------------------------------------------------------------------- -- Objects -------------------------------------------------------------------------------- -- Values that evaluate to structure instances. data ObjExpr = NewObj StructName [NewObjArg] | ObjVar StructName VarPath deriving (Eq, Show) instance Typed ObjExpr where typeOf (NewObj structName _) = CustomType structName typeOf (ObjVar structName _) = CustomType structName -------------------------------------------------------------------------------- -- Variables -------------------------------------------------------------------------------- -- Values that evaluate to either a symbol or tape. data AnyValExpr = S SymExpr | T TapeExpr | C ObjExpr deriving (Eq, Show) instance Typed AnyValExpr where typeOf (S s) = typeOf s typeOf (T t) = typeOf t typeOf (C c) = typeOf c -------------------------------------------------------------------------------- -- Functions -------------------------------------------------------------------------------- -- Name of a function. type FuncName = SnakeId -- Name of an argument to a function. type ArgName = SnakeId -- Argument supplied when defining a function. type FuncDeclArg = (ArgName, DataType) -- Argument supplied when invoking a function. type FuncCallArg = AnyValExpr -- Returns the type of an argument to a function invocation. argType :: FuncDeclArg -> DataType argType = snd -------------------------------------------------------------------------------- -- Structs -------------------------------------------------------------------------------- -- Name of a structure. type StructName = CamelId -- Variable contained within a struct. type StructMemberVar = (VarName, DataType) -- Argument supplied when creating an object. type NewObjArg = AnyValExpr -- Returns the type of the variable in the struct. memberVarType :: StructMemberVar -> DataType memberVarType = snd -------------------------------------------------------------------------------- -- Bexp -------------------------------------------------------------------------------- -- Syntax tree for boolean expressions. data Bexp = TRUE | FALSE | Not Bexp | And Bexp Bexp | Or Bexp Bexp | Eq SymExpr SymExpr | Le SymExpr SymExpr | Ne SymExpr SymExpr deriving (Eq, Show) -------------------------------------------------------------------------------- -- Stm -------------------------------------------------------------------------------- -- Syntax tree for statements. data Stm = MoveLeft TapeExpr | MoveRight TapeExpr | Write TapeExpr SymExpr | Accept | Reject | If Bexp Stm [(Bexp, Stm)] (Maybe Stm) | While Bexp Stm | VarDecl VarName AnyValExpr | FuncDecl FuncName [FuncDeclArg] Stm | Call FuncName [AnyValExpr] | StructDecl StructName [StructMemberVar] | Comp Stm Stm | Print SymExpr | PrintLn (Maybe SymExpr) | DebugPrintTape TapeExpr deriving (Eq, Show) -------------------------------------------------------------------------------- -- Program -------------------------------------------------------------------------------- -- Path of a Metal file to be imported. type ImportPath = String -- The contents of a metal file. type FileContents = String
BakerSmithA/Turing
src/Syntax/Tree.hs
bsd-3-clause
5,552
0
8
1,059
726
436
290
80
1
{- | Module : $Header$ Description : intermediate calculus table Copyright : (c) Uni Bremen 2005 License : GPLv2 or higher, see LICENSE.txt Maintainer : Christian.Maeder@dfki.de Stability : provisional Portability : portable -} module CASL.CompositionTable.ModelTable where import CASL.CompositionTable.CompositionTable import Common.Utils import qualified Data.IntSet as IntSet import qualified Data.IntMap as IntMap import qualified Data.Map as Map import Data.List data Table2 = Table2 String Int (IntMap.IntMap Baserel) BSet CmpTbl ConTables type BSet = IntSet.IntSet type CmpTbl = IntMap.IntMap (IntMap.IntMap IntSet.IntSet) type ConTable = IntMap.IntMap IntSet.IntSet type ConTables = (ConTable, ConTable, ConTable, ConTable) lkup :: (Show a, Ord a) => a -> Map.Map a Int -> Int lkup i = Map.findWithDefault (error $ "CASL.CompositionTable.ModelTable.lkup" ++ show i) i toTable2 :: Table -> Table2 toTable2 (Table (Table_Attrs name id_ baserels) (Compositiontable comptbl) convtbl _ _) = let ns = number baserels m = Map.fromList ns in Table2 name (lkup id_ m) (IntMap.fromList $ map (\ (a, b) -> (b, a)) ns) (IntSet.fromAscList [1 .. Map.size m]) (toCmpTbl m comptbl) $ toConTables m convtbl toCmpTbl :: Map.Map Baserel Int -> [Cmptabentry] -> CmpTbl toCmpTbl m = foldl' (\ t (Cmptabentry (Cmptabentry_Attrs rel1 rel2) bs) -> IntMap.insertWith IntMap.union (lkup rel1 m) (IntMap.insertWith IntSet.union (lkup rel2 m) (IntSet.fromList $ map (`lkup` m) bs) IntMap.empty) t) IntMap.empty toConTab :: Map.Map Baserel Int -> (a -> Baserel) -> (a -> [Baserel]) -> [a] -> ConTable toConTab m s1 s2 = foldl' (\ t a -> IntMap.insertWith IntSet.union (lkup (s1 a) m) (IntSet.fromList $ map (`lkup` m) $ s2 a) t) IntMap.empty toConTab2 :: Map.Map Baserel Int -> [Contabentry_Ternary] -> ConTable toConTab2 m = toConTab m contabentry_TernaryArgBaseRel contabentry_TernaryConverseBaseRels toConTables :: Map.Map Baserel Int -> Conversetable -> ConTables toConTables m c = case c of Conversetable l -> (toConTab m contabentryArgBaseRel contabentryConverseBaseRel l , IntMap.empty, IntMap.empty, IntMap.empty) Conversetable_Ternary l1 l2 l3 -> (IntMap.empty, toConTab2 m l1, toConTab2 m l2, toConTab2 m l3)
mariefarrell/Hets
CASL/CompositionTable/ModelTable.hs
gpl-2.0
2,353
0
14
465
769
409
360
47
2
import Control.Monad as CM(forM,filterM) yes = flip mapM
bitemyapp/apply-refact
tests/examples/Default123.hs
bsd-3-clause
57
0
5
8
24
14
10
2
1
module B2 where data Data1 a = C1 a Int Int | C4 Float | C2 Int | C3 Float addedC4 = error "added C4 Float to Data1" g (C1 x y z) (C1 n m o) = y + m g (C4 a) b = addedC4 g a (C4 b) = addedC4 g (C2 x) (C2 y) = x - y g (C3 x) (C3 k) = 42
kmate/HaRe
old/testing/addCon/B2AST.hs
bsd-3-clause
245
0
7
78
155
81
74
9
1
module Syntax (module S) where import BaseSyntax as S import SyntaxRec as S import SyntaxRecPretty as S
forste/haReFork
tools/base/syntax/Syntax.hs
bsd-3-clause
106
0
4
19
25
19
6
4
0
module HAD.Y2014.M03.D13.Solution where import Control.Applicative -- | pairToList Trnsform a pair of same type elements in a list of two -- elements. -- -- Of course, the major challenge is to find a point free function -- (without lambda) -- -- prop> replicate 2 (x :: Int) == pairToList (x,x) -- -- prop> (\(f,s) -> [f,s]) x == pairToList x -- pairToList :: (a,a) -> [a] pairToList = (:) <$> fst <*> ((:[]) . snd)
1HaskellADay/1HAD
exercises/HAD/Y2014/M03/D13/Solution.hs
mit
421
0
9
79
70
48
22
4
1
{-# LANGUAGE NoImplicitPrelude #-} -- | Description: interpret flags parsed by "IHaskell.Flags" module IHaskell.Convert.Args (ConvertSpec(..), fromJustConvertSpec, toConvertSpec) where import IHaskellPrelude import qualified Data.Text as T import qualified Data.Text.Lazy as LT import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as LBS import qualified Data.ByteString.Char8 as CBS import Control.Applicative ((<$>)) import Control.Monad.Identity (Identity(Identity)) import Data.Char (toLower) import Data.List (partition) import Data.Maybe (fromMaybe) import qualified Data.Text.Lazy as T (pack, Text) import IHaskell.Flags (Argument(..), LhsStyle, lhsStyleBird, NotebookFormat(..)) import System.FilePath ((<.>), dropExtension, takeExtension) import Text.Printf (printf) -- | ConvertSpec is the accumulator for command line arguments data ConvertSpec f = ConvertSpec { convertToIpynb :: f Bool , convertInput :: f FilePath , convertOutput :: f FilePath , convertLhsStyle :: f (LhsStyle LT.Text) , convertOverwriteFiles :: Bool } -- | Convert a possibly-incomplete specification for what to convert into one which can be executed. -- Calls error when data is missing. fromJustConvertSpec :: ConvertSpec Maybe -> ConvertSpec Identity fromJustConvertSpec convertSpec = convertSpec { convertToIpynb = Identity toIpynb , convertInput = Identity inputFile , convertOutput = Identity outputFile , convertLhsStyle = Identity $ fromMaybe (LT.pack <$> lhsStyleBird) (convertLhsStyle convertSpec) } where toIpynb = fromMaybe (error "Error: direction for conversion unknown") (convertToIpynb convertSpec) (inputFile, outputFile) = case (convertInput convertSpec, convertOutput convertSpec) of (Nothing, Nothing) -> error "Error: no files specified for conversion" (Just i, Nothing) | toIpynb -> (i, dropExtension i <.> "ipynb") | otherwise -> (i, dropExtension i <.> "lhs") (Nothing, Just o) | toIpynb -> (dropExtension o <.> "lhs", o) | otherwise -> (dropExtension o <.> "ipynb", o) (Just i, Just o) -> (i, o) -- | Does this @Argument@ explicitly request a file format? isFormatSpec :: Argument -> Bool isFormatSpec (ConvertToFormat _) = True isFormatSpec (ConvertFromFormat _) = True isFormatSpec _ = False toConvertSpec :: [Argument] -> ConvertSpec Maybe toConvertSpec args = mergeArgs otherArgs (mergeArgs formatSpecArgs initialConvertSpec) where (formatSpecArgs, otherArgs) = partition isFormatSpec args initialConvertSpec = ConvertSpec Nothing Nothing Nothing Nothing False mergeArgs :: [Argument] -> ConvertSpec Maybe -> ConvertSpec Maybe mergeArgs args initialConvertSpec = foldr mergeArg initialConvertSpec args mergeArg :: Argument -> ConvertSpec Maybe -> ConvertSpec Maybe mergeArg OverwriteFiles convertSpec = convertSpec { convertOverwriteFiles = True } mergeArg (ConvertLhsStyle lhsStyle) convertSpec | Just previousLhsStyle <- convertLhsStyle convertSpec, previousLhsStyle /= fmap LT.pack lhsStyle = error $ printf "Conflicting lhs styles requested: <%s> and <%s>" (show lhsStyle) (show previousLhsStyle) | otherwise = convertSpec { convertLhsStyle = Just (LT.pack <$> lhsStyle) } mergeArg (ConvertFrom inputFile) convertSpec | Just previousInputFile <- convertInput convertSpec, previousInputFile /= inputFile = error $ printf "Multiple input files specified: <%s> and <%s>" inputFile previousInputFile | otherwise = convertSpec { convertInput = Just inputFile , convertToIpynb = case (convertToIpynb convertSpec, fromExt inputFile) of (prev, Nothing) -> prev (prev@(Just _), _) -> prev (Nothing, format) -> fmap (== LhsMarkdown) format } mergeArg (ConvertTo outputFile) convertSpec | Just previousOutputFile <- convertOutput convertSpec, previousOutputFile /= outputFile = error $ printf "Multiple output files specified: <%s> and <%s>" outputFile previousOutputFile | otherwise = convertSpec { convertOutput = Just outputFile , convertToIpynb = case (convertToIpynb convertSpec, fromExt outputFile) of (prev, Nothing) -> prev (prev@(Just _), _) -> prev (Nothing, format) -> fmap (== IpynbFile) format } mergeArg unexpectedArg _ = error $ "IHaskell.Convert.mergeArg: impossible argument: " ++ show unexpectedArg -- | Guess the format based on the file extension. fromExt :: FilePath -> Maybe NotebookFormat fromExt s = case map toLower (takeExtension s) of ".lhs" -> Just LhsMarkdown ".ipynb" -> Just IpynbFile _ -> Nothing
artuuge/IHaskell
src/IHaskell/Convert/Args.hs
mit
4,837
0
14
1,045
1,214
656
558
88
5
{- (c) The University of Glasgow 2006 (c) The AQUA Project, Glasgow University, 1993-1998 This is useful, general stuff for the Native Code Generator. Provide trees (of instructions), so that lists of instructions can be appended in linear time. -} {-# LANGUAGE CPP #-} module OrdList ( OrdList, nilOL, isNilOL, unitOL, appOL, consOL, snocOL, concatOL, lastOL, mapOL, fromOL, toOL, foldrOL, foldlOL ) where import Outputable #if __GLASGOW_HASKELL__ > 710 import Data.Semigroup ( Semigroup ) import qualified Data.Semigroup as Semigroup #endif infixl 5 `appOL` infixl 5 `snocOL` infixr 5 `consOL` data OrdList a = None | One a | Many [a] -- Invariant: non-empty | Cons a (OrdList a) | Snoc (OrdList a) a | Two (OrdList a) -- Invariant: non-empty (OrdList a) -- Invariant: non-empty instance Outputable a => Outputable (OrdList a) where ppr ol = ppr (fromOL ol) -- Convert to list and print that #if __GLASGOW_HASKELL__ > 710 instance Semigroup (OrdList a) where (<>) = appOL #endif instance Monoid (OrdList a) where mempty = nilOL mappend = appOL mconcat = concatOL nilOL :: OrdList a isNilOL :: OrdList a -> Bool unitOL :: a -> OrdList a snocOL :: OrdList a -> a -> OrdList a consOL :: a -> OrdList a -> OrdList a appOL :: OrdList a -> OrdList a -> OrdList a concatOL :: [OrdList a] -> OrdList a lastOL :: OrdList a -> a nilOL = None unitOL as = One as snocOL as b = Snoc as b consOL a bs = Cons a bs concatOL aas = foldr appOL None aas lastOL None = panic "lastOL" lastOL (One a) = a lastOL (Many as) = last as lastOL (Cons _ as) = lastOL as lastOL (Snoc _ a) = a lastOL (Two _ as) = lastOL as isNilOL None = True isNilOL _ = False None `appOL` b = b a `appOL` None = a One a `appOL` b = Cons a b a `appOL` One b = Snoc a b a `appOL` b = Two a b fromOL :: OrdList a -> [a] fromOL a = go a [] where go None acc = acc go (One a) acc = a : acc go (Cons a b) acc = a : go b acc go (Snoc a b) acc = go a (b:acc) go (Two a b) acc = go a (go b acc) go (Many xs) acc = xs ++ acc mapOL :: (a -> b) -> OrdList a -> OrdList b mapOL _ None = None mapOL f (One x) = One (f x) mapOL f (Cons x xs) = Cons (f x) (mapOL f xs) mapOL f (Snoc xs x) = Snoc (mapOL f xs) (f x) mapOL f (Two x y) = Two (mapOL f x) (mapOL f y) mapOL f (Many xs) = Many (map f xs) instance Functor OrdList where fmap = mapOL foldrOL :: (a->b->b) -> b -> OrdList a -> b foldrOL _ z None = z foldrOL k z (One x) = k x z foldrOL k z (Cons x xs) = k x (foldrOL k z xs) foldrOL k z (Snoc xs x) = foldrOL k (k x z) xs foldrOL k z (Two b1 b2) = foldrOL k (foldrOL k z b2) b1 foldrOL k z (Many xs) = foldr k z xs foldlOL :: (b->a->b) -> b -> OrdList a -> b foldlOL _ z None = z foldlOL k z (One x) = k z x foldlOL k z (Cons x xs) = foldlOL k (k z x) xs foldlOL k z (Snoc xs x) = k (foldlOL k z xs) x foldlOL k z (Two b1 b2) = foldlOL k (foldlOL k z b1) b2 foldlOL k z (Many xs) = foldl k z xs toOL :: [a] -> OrdList a toOL [] = None toOL xs = Many xs
tjakway/ghcjvm
compiler/utils/OrdList.hs
bsd-3-clause
3,188
0
9
929
1,451
740
711
83
6
{-# OPTIONS_GHC -Wunused-binds #-} {-# LANGUAGE PatternSynonyms #-} module Foo (pattern P) where -- x is used!! x :: Int x = 0 pattern P :: Int pattern P <- _ where P = x
ezyang/ghc
testsuite/tests/rename/should_compile/T12548.hs
bsd-3-clause
182
0
5
46
45
28
17
8
1
module TcFail209a where g :: ((Show a, Num a), Eq a) => a -> a g = undefined
urbanslug/ghc
testsuite/tests/typecheck/should_fail/tcfail209a.hs
bsd-3-clause
78
1
8
19
41
23
18
-1
-1