code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE PackageImports #-}
module Camera where
import "linear" Linear
d0 = V3 0 (-1) 0
u0 = V3 0 0 (-1)
s0 p = (p,zero,zero,(0,0))
calcCam dt (dmx,dmy) (left,up,down,right,turbo) (p0,_,_,(mx,my)) = (p',p' + d,u,(mx',my'))
where
nil c n = if c then n else zero
p' = nil left (v ^* (-t)) + nil up (d ^* t) + nil down (d ^* (-t)) + nil right (v ^* t) + p0
k = if turbo then 500 else 100
t = k * realToFrac dt
mx' = dmx-- + mx
my' = dmy-- + my
rm = rotationEuler $ V3 (-mx' / 100) (-my' / 100) 0
d = rotate rm d0
u = rotate rm u0
v = signorm $ d `cross` u
rotationEuler :: V3 Float -> Quaternion Float
rotationEuler (V3 a b c) = axisAngle (V3 0 0 1) a * axisAngle (V3 1 0 0) b * axisAngle (V3 0 1 0) (c)
| csabahruska/gpipe-quake3 | Camera.hs | bsd-3-clause | 763 | 0 | 15 | 217 | 438 | 242 | 196 | 19 | 3 |
{-# LANGUAGE UnicodeSyntax #-}
import Prelude.Unicode
paths ∷ Eq a ⇒ a → a → [(a, a)] → [[a]]
paths source sink edges
| source ≡ sink = [[sink]]
| otherwise = [[source] ++ path |
edge ← edges, (fst edge) ≡ source,
path ← (paths (snd edge) sink [e | e ← edges, e ≢ edge])
]
| m00nlight/99-problems | haskell/p-81.hs | bsd-3-clause | 362 | 0 | 13 | 125 | 158 | 83 | 75 | 8 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module PersistenceSpec (spec) where
import Data.Foldable
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck.Instances ()
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Lazy.Char8 as LBS8
import OrphanInstances ()
import Store (Modification (..))
import qualified Store
spec :: Spec
spec = do
describe "Store.Modification" $ do
prop "does not contain new lines when serialized" $ \op ->
let jsonStr = Aeson.encode (op :: Modification)
in '\n' `LBS8.notElem` jsonStr
prop "round trips serialization" $ \op ->
let jsonStr = Aeson.encode (op :: Modification)
decoded = Aeson.decode jsonStr
in Just op == decoded
describe "Journaling" $ do
prop "journal is idempotent" $ \ops initial ->
let replay value = foldl' (flip Store.applyModification) value (ops :: [Modification])
in replay initial == replay (replay initial)
| channable/icepeak | server/tests/PersistenceSpec.hs | bsd-3-clause | 966 | 0 | 19 | 191 | 280 | 152 | 128 | 25 | 1 |
-----------------------------------------------------------------------------
--
-- Building info tables.
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmLayout (
mkArgDescr,
emitCall, emitReturn, adjustHpBackwards,
emitClosureProcAndInfoTable,
emitClosureAndInfoTable,
slowCall, directCall,
mkVirtHeapOffsets, mkVirtConstrOffsets, getHpRelOffset, hpRel,
ArgRep(..), toArgRep, argRepSizeW -- re-exported from StgCmmArgRep
) where
#include "HsVersions.h"
import StgCmmClosure
import StgCmmEnv
import StgCmmArgRep -- notably: ( slowCallPattern )
import StgCmmTicky
import StgCmmMonad
import StgCmmUtils
import StgCmmProf (curCCS)
import MkGraph
import SMRep
import Cmm
import CmmUtils
import CmmInfo
import CLabel
import StgSyn
import Id
import TyCon ( PrimRep(..) )
import BasicTypes ( RepArity )
import DynFlags
import Module
import Util
import Data.List
import Outputable
import FastString
import Control.Monad
------------------------------------------------------------------------
-- Call and return sequences
------------------------------------------------------------------------
-- | Return multiple values to the sequel
--
-- If the sequel is @Return@
--
-- > return (x,y)
--
-- If the sequel is @AssignTo [p,q]@
--
-- > p=x; q=y;
--
emitReturn :: [CmmExpr] -> FCode ReturnKind
emitReturn results
= do { dflags <- getDynFlags
; sequel <- getSequel
; updfr_off <- getUpdFrameOff
; case sequel of
Return _ ->
do { adjustHpBackwards
; let e = CmmLoad (CmmStackSlot Old updfr_off) (gcWord dflags)
; emit (mkReturn dflags (entryCode dflags e) results updfr_off)
}
AssignTo regs adjust ->
do { when adjust adjustHpBackwards
; emitMultiAssign regs results }
; return AssignedDirectly
}
-- | @emitCall conv fun args@ makes a call to the entry-code of @fun@,
-- using the call/return convention @conv@, passing @args@, and
-- returning the results to the current sequel.
--
emitCall :: (Convention, Convention) -> CmmExpr -> [CmmExpr] -> FCode ReturnKind
emitCall convs fun args
= emitCallWithExtraStack convs fun args noExtraStack
-- | @emitCallWithExtraStack conv fun args stack@ makes a call to the
-- entry-code of @fun@, using the call/return convention @conv@,
-- passing @args@, pushing some extra stack frames described by
-- @stack@, and returning the results to the current sequel.
--
emitCallWithExtraStack
:: (Convention, Convention) -> CmmExpr -> [CmmExpr]
-> [CmmExpr] -> FCode ReturnKind
emitCallWithExtraStack (callConv, retConv) fun args extra_stack
= do { dflags <- getDynFlags
; adjustHpBackwards
; sequel <- getSequel
; updfr_off <- getUpdFrameOff
; case sequel of
Return _ -> do
emit $ mkJumpExtra dflags callConv fun args updfr_off extra_stack
return AssignedDirectly
AssignTo res_regs _ -> do
k <- newLabelC
let area = Young k
(off, _, copyin) = copyInOflow dflags retConv area res_regs []
copyout = mkCallReturnsTo dflags fun callConv args k off updfr_off
extra_stack
emit (copyout <*> mkLabel k <*> copyin)
return (ReturnedTo k off)
}
adjustHpBackwards :: FCode ()
-- This function adjusts and heap pointers just before a tail call or
-- return. At a call or return, the virtual heap pointer may be less
-- than the real Hp, because the latter was advanced to deal with
-- the worst-case branch of the code, and we may be in a better-case
-- branch. In that case, move the real Hp *back* and retract some
-- ticky allocation count.
--
-- It *does not* deal with high-water-mark adjustment.
-- That's done by functions which allocate heap.
adjustHpBackwards
= do { hp_usg <- getHpUsage
; let rHp = realHp hp_usg
vHp = virtHp hp_usg
adjust_words = vHp -rHp
; new_hp <- getHpRelOffset vHp
; emit (if adjust_words == 0
then mkNop
else mkAssign hpReg new_hp) -- Generates nothing when vHp==rHp
; tickyAllocHeap False adjust_words -- ...ditto
; setRealHp vHp
}
-------------------------------------------------------------------------
-- Making calls: directCall and slowCall
-------------------------------------------------------------------------
-- General plan is:
-- - we'll make *one* fast call, either to the function itself
-- (directCall) or to stg_ap_<pat>_fast (slowCall)
-- Any left-over arguments will be pushed on the stack,
--
-- e.g. Sp[old+8] = arg1
-- Sp[old+16] = arg2
-- Sp[old+32] = stg_ap_pp_info
-- R2 = arg3
-- R3 = arg4
-- call f() return to Nothing updfr_off: 32
directCall :: Convention -> CLabel -> RepArity -> [StgArg] -> FCode ReturnKind
-- (directCall f n args)
-- calls f(arg1, ..., argn), and applies the result to the remaining args
-- The function f has arity n, and there are guaranteed at least n args
-- Both arity and args include void args
directCall conv lbl arity stg_args
= do { argreps <- getArgRepsAmodes stg_args
; direct_call "directCall" conv lbl arity argreps }
slowCall :: CmmExpr -> [StgArg] -> FCode ReturnKind
-- (slowCall fun args) applies fun to args, returning the results to Sequel
slowCall fun stg_args
= do { dflags <- getDynFlags
; argsreps <- getArgRepsAmodes stg_args
; let (rts_fun, arity) = slowCallPattern (map fst argsreps)
; r <- direct_call "slow_call" NativeNodeCall
(mkRtsApFastLabel rts_fun) arity ((P,Just fun):argsreps)
; emitComment $ mkFastString ("slow_call for " ++
showSDoc dflags (ppr fun) ++
" with pat " ++ unpackFS rts_fun)
; return r
}
--------------
direct_call :: String
-> Convention -- e.g. NativeNodeCall or NativeDirectCall
-> CLabel -> RepArity
-> [(ArgRep,Maybe CmmExpr)] -> FCode ReturnKind
direct_call caller call_conv lbl arity args
| debugIsOn && real_arity > length args -- Too few args
= do -- Caller should ensure that there enough args!
pprPanic "direct_call" $
text caller <+> ppr arity <+>
ppr lbl <+> ppr (length args) <+>
ppr (map snd args) <+> ppr (map fst args)
| null rest_args -- Precisely the right number of arguments
= emitCall (call_conv, NativeReturn) target (nonVArgs args)
| otherwise -- Note [over-saturated calls]
= do dflags <- getDynFlags
emitCallWithExtraStack (call_conv, NativeReturn)
target
(nonVArgs fast_args)
(nonVArgs (stack_args dflags))
where
target = CmmLit (CmmLabel lbl)
(fast_args, rest_args) = splitAt real_arity args
stack_args dflags = slowArgs dflags rest_args
real_arity = case call_conv of
NativeNodeCall -> arity+1
_ -> arity
-- When constructing calls, it is easier to keep the ArgReps and the
-- CmmExprs zipped together. However, a void argument has no
-- representation, so we need to use Maybe CmmExpr (the alternative of
-- using zeroCLit or even undefined would work, but would be ugly).
--
getArgRepsAmodes :: [StgArg] -> FCode [(ArgRep, Maybe CmmExpr)]
getArgRepsAmodes = mapM getArgRepAmode
where getArgRepAmode arg
| V <- rep = return (V, Nothing)
| otherwise = do expr <- getArgAmode (NonVoid arg)
return (rep, Just expr)
where rep = toArgRep (argPrimRep arg)
nonVArgs :: [(ArgRep, Maybe CmmExpr)] -> [CmmExpr]
nonVArgs [] = []
nonVArgs ((_,Nothing) : args) = nonVArgs args
nonVArgs ((_,Just arg) : args) = arg : nonVArgs args
{-
Note [over-saturated calls]
The natural thing to do for an over-saturated call would be to call
the function with the correct number of arguments, and then apply the
remaining arguments to the value returned, e.g.
f a b c d (where f has arity 2)
-->
r = call f(a,b)
call r(c,d)
but this entails
- saving c and d on the stack
- making a continuation info table
- at the continuation, loading c and d off the stack into regs
- finally, call r
Note that since there are a fixed number of different r's
(e.g. stg_ap_pp_fast), we can also pre-compile continuations
that correspond to each of them, rather than generating a fresh
one for each over-saturated call.
Not only does this generate much less code, it is faster too. We will
generate something like:
Sp[old+16] = c
Sp[old+24] = d
Sp[old+32] = stg_ap_pp_info
call f(a,b) -- usual calling convention
For the purposes of the CmmCall node, we count this extra stack as
just more arguments that we are passing on the stack (cml_args).
-}
-- | 'slowArgs' takes a list of function arguments and prepares them for
-- pushing on the stack for "extra" arguments to a function which requires
-- fewer arguments than we currently have.
slowArgs :: DynFlags -> [(ArgRep, Maybe CmmExpr)] -> [(ArgRep, Maybe CmmExpr)]
slowArgs _ [] = []
slowArgs dflags args -- careful: reps contains voids (V), but args does not
| gopt Opt_SccProfilingOn dflags
= save_cccs ++ this_pat ++ slowArgs dflags rest_args
| otherwise = this_pat ++ slowArgs dflags rest_args
where
(arg_pat, n) = slowCallPattern (map fst args)
(call_args, rest_args) = splitAt n args
stg_ap_pat = mkCmmRetInfoLabel rtsPackageId arg_pat
this_pat = (N, Just (mkLblExpr stg_ap_pat)) : call_args
save_cccs = [(N, Just (mkLblExpr save_cccs_lbl)), (N, Just curCCS)]
save_cccs_lbl = mkCmmRetInfoLabel rtsPackageId (fsLit "stg_restore_cccs")
-------------------------------------------------------------------------
---- Laying out objects on the heap and stack
-------------------------------------------------------------------------
-- The heap always grows upwards, so hpRel is easy
hpRel :: VirtualHpOffset -- virtual offset of Hp
-> VirtualHpOffset -- virtual offset of The Thing
-> WordOff -- integer word offset
hpRel hp off = off - hp
getHpRelOffset :: VirtualHpOffset -> FCode CmmExpr
getHpRelOffset virtual_offset
= do dflags <- getDynFlags
hp_usg <- getHpUsage
return (cmmRegOffW dflags hpReg (hpRel (realHp hp_usg) virtual_offset))
mkVirtHeapOffsets
:: DynFlags
-> Bool -- True <=> is a thunk
-> [(PrimRep,a)] -- Things to make offsets for
-> (WordOff, -- _Total_ number of words allocated
WordOff, -- Number of words allocated for *pointers*
[(NonVoid a, VirtualHpOffset)])
-- Things with their offsets from start of object in order of
-- increasing offset; BUT THIS MAY BE DIFFERENT TO INPUT ORDER
-- First in list gets lowest offset, which is initial offset + 1.
--
-- Void arguments are removed, so output list may be shorter than
-- input list
--
-- mkVirtHeapOffsets always returns boxed things with smaller offsets
-- than the unboxed things
mkVirtHeapOffsets dflags is_thunk things
= let non_void_things = filterOut (isVoidRep . fst) things
(ptrs, non_ptrs) = partition (isGcPtrRep . fst) non_void_things
(wds_of_ptrs, ptrs_w_offsets) = mapAccumL computeOffset 0 ptrs
(tot_wds, non_ptrs_w_offsets) = mapAccumL computeOffset wds_of_ptrs non_ptrs
in
(tot_wds, wds_of_ptrs, ptrs_w_offsets ++ non_ptrs_w_offsets)
where
hdr_size | is_thunk = thunkHdrSize dflags
| otherwise = fixedHdrSize dflags
computeOffset wds_so_far (rep, thing)
= (wds_so_far + argRepSizeW dflags (toArgRep rep),
(NonVoid thing, hdr_size + wds_so_far))
mkVirtConstrOffsets :: DynFlags -> [(PrimRep,a)] -> (WordOff, WordOff, [(NonVoid a, VirtualHpOffset)])
-- Just like mkVirtHeapOffsets, but for constructors
mkVirtConstrOffsets dflags = mkVirtHeapOffsets dflags False
-------------------------------------------------------------------------
--
-- Making argument descriptors
--
-- An argument descriptor describes the layout of args on the stack,
-- both for * GC (stack-layout) purposes, and
-- * saving/restoring registers when a heap-check fails
--
-- Void arguments aren't important, therefore (contrast constructSlowCall)
--
-------------------------------------------------------------------------
-- bring in ARG_P, ARG_N, etc.
#include "../includes/rts/storage/FunTypes.h"
mkArgDescr :: DynFlags -> [Id] -> ArgDescr
mkArgDescr dflags args
= let arg_bits = argBits dflags arg_reps
arg_reps = filter isNonV (map idArgRep args)
-- Getting rid of voids eases matching of standard patterns
in case stdPattern arg_reps of
Just spec_id -> ArgSpec spec_id
Nothing -> ArgGen arg_bits
argBits :: DynFlags -> [ArgRep] -> [Bool] -- True for non-ptr, False for ptr
argBits _ [] = []
argBits dflags (P : args) = False : argBits dflags args
argBits dflags (arg : args) = take (argRepSizeW dflags arg) (repeat True)
++ argBits dflags args
----------------------
stdPattern :: [ArgRep] -> Maybe Int
stdPattern reps
= case reps of
[] -> Just ARG_NONE -- just void args, probably
[N] -> Just ARG_N
[P] -> Just ARG_P
[F] -> Just ARG_F
[D] -> Just ARG_D
[L] -> Just ARG_L
[V16] -> Just ARG_V16
[V32] -> Just ARG_V32
[V64] -> Just ARG_V64
[N,N] -> Just ARG_NN
[N,P] -> Just ARG_NP
[P,N] -> Just ARG_PN
[P,P] -> Just ARG_PP
[N,N,N] -> Just ARG_NNN
[N,N,P] -> Just ARG_NNP
[N,P,N] -> Just ARG_NPN
[N,P,P] -> Just ARG_NPP
[P,N,N] -> Just ARG_PNN
[P,N,P] -> Just ARG_PNP
[P,P,N] -> Just ARG_PPN
[P,P,P] -> Just ARG_PPP
[P,P,P,P] -> Just ARG_PPPP
[P,P,P,P,P] -> Just ARG_PPPPP
[P,P,P,P,P,P] -> Just ARG_PPPPPP
_ -> Nothing
-------------------------------------------------------------------------
--
-- Generating the info table and code for a closure
--
-------------------------------------------------------------------------
-- Here we make an info table of type 'CmmInfo'. The concrete
-- representation as a list of 'CmmAddr' is handled later
-- in the pipeline by 'cmmToRawCmm'.
-- When loading the free variables, a function closure pointer may be tagged,
-- so we must take it into account.
emitClosureProcAndInfoTable :: Bool -- top-level?
-> Id -- name of the closure
-> LambdaFormInfo
-> CmmInfoTable
-> [NonVoid Id] -- incoming arguments
-> ((Int, LocalReg, [LocalReg]) -> FCode ()) -- function body
-> FCode ()
emitClosureProcAndInfoTable top_lvl bndr lf_info info_tbl args body
= do { dflags <- getDynFlags
-- Bind the binder itself, but only if it's not a top-level
-- binding. We need non-top let-bindings to refer to the
-- top-level binding, which this binding would incorrectly shadow.
; node <- if top_lvl then return $ idToReg dflags (NonVoid bndr)
else bindToReg (NonVoid bndr) lf_info
; let node_points = nodeMustPointToIt dflags lf_info
; arg_regs <- bindArgsToRegs args
; let args' = if node_points then (node : arg_regs) else arg_regs
conv = if nodeMustPointToIt dflags lf_info then NativeNodeCall
else NativeDirectCall
(offset, _, _) = mkCallEntry dflags conv args' []
; emitClosureAndInfoTable info_tbl conv args' $ body (offset, node, arg_regs)
}
-- Data constructors need closures, but not with all the argument handling
-- needed for functions. The shared part goes here.
emitClosureAndInfoTable ::
CmmInfoTable -> Convention -> [LocalReg] -> FCode () -> FCode ()
emitClosureAndInfoTable info_tbl conv args body
= do { blks <- getCode body
; let entry_lbl = toEntryLbl (cit_lbl info_tbl)
; emitProcWithConvention conv (Just info_tbl) entry_lbl args blks
}
| ekmett/ghc | compiler/codeGen/StgCmmLayout.hs | bsd-3-clause | 16,801 | 0 | 17 | 4,506 | 3,164 | 1,701 | 1,463 | 237 | 25 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
import Data.Functor.Identity
import Data.Proxy
import Data.Vinyl
import Diagrams.Backend.SVG
import Diagrams.Backend.SVG.CmdLine
import Diagrams.Prelude hiding ((:&))
-- import Graphite
-- import Graphite.Diagrams
import Graphite.Types (Graphite)
import qualified Graphite.Combinators as Graphite
import Diagrams.Graph.Combinators
import Control.Arrow
-- import StackedBarGraph
main :: IO ()
-- main = mainWith $ sideBySideAxis
main = mainWith $
(pad 1.2 $ centerXY $ Graphite.build (allAges :& allGenders :& RNil) funcData2 graphC)
-- ===
-- (pad 1.2 $ centerXY $ build (allGenders :& allAges :& RNil) (funcData2 . rcast) graphD)
-- main = mainWith $ pad 1.4 $ centerXY $ showOrigin $ build (allGenders :& RNil) funcDataGender graphB
test :: Diagram B
test = hcat (replicate 5 myRect) # center # showOrigin
myRect :: Diagram B
myRect = rect 44 66 # padX 1.1
-- graphA :: Graphite Int (Diagram B) '[] '[Gender]
-- graphA = id
-- $ sideBySideAxis 1.2 show
-- $ bar 10 fromIntegral
--
-- graphB :: Graphite Int (Diagram B) '[] '[Gender]
-- graphB = id
-- $ sideBySide 0
-- -- $ barTopLabel 10 fromIntegral (show . runIdentity . rget (Proxy :: Proxy Gender))
-- $ liftFigureChange (lineOnBottom . padX 1.1)
-- $ labelTop (show . runIdentity . rget (Proxy :: Proxy Gender))
-- $ bar 10 fromIntegral
graphC :: Graphite Int (Diagram B) '[] '[Age,Gender]
graphC = id
$ Graphite.figure (\_ _ ds -> let d = alignL (hcat ds) in
mappend d $ alignBL $ createYAxis (width d) 0 3 12 fromIntegral show
(lc (blend 0.3 white purple) . lw thin . dashingG [0.7,0.7] 0) id
)
$ Graphite.pop
$ Graphite.figure (\r _ d -> let age = runIdentity $ rget (Proxy :: Proxy Age) r in d
& hsep 0 . map alignB
& center
& padX 1.1
& lineOnBottom
& besideText unit_Y (show age)
)
$ Graphite.pop
$ Graphite.figure (\r _ d -> let gender = runIdentity $ rget (Proxy :: Proxy Gender) r in d
& fc (genderColour gender)
& besideText unitY (show gender)
& center
& padX 1.1
& fontSizeL 2
)
$ Graphite.start' (\res -> rect 10 (fromIntegral res))
-- graphD :: Graphite Int (Diagram B) '[] '[Gender, Age]
-- graphD = id
-- $ liftFigureChange (\fig -> mappend fig $ lw thin $ lineOnRight (createYAxis 1.0 0 3 12 fromIntegral show (lw thin)))
-- -- $ liftFigureChange
-- -- (\f -> mappend f (alignBL $ createYAxis (width f) 0 3 12 fromIntegral show
-- -- (lc (blend 0.3 white purple) . lw thin . dashingG [0.7,0.7] 0)
-- -- ))
-- $ modifyFigureFull
-- (\r l fig -> mappend fig $
-- createXAxis 1.0 10.0 (rget (Proxy :: Proxy Age) l) show (lw thin)
-- )
-- $ modifyFigureFull
-- (\r l fig -> mappend fig
-- $ lw thin
-- $ horizontalLine (10.0 * fromIntegral (length (rget (Proxy :: Proxy Age) l) - 1))
-- )
-- $ overlapped
--
-- $ modifyFigure (\r -> lc $ darken 0.6 $ genderColour $ runIdentity $ rget (Proxy :: Proxy Gender) r)
-- $ connected 10.0
--
-- $ modifyFigure (\r -> first $ fc $ genderColour $ runIdentity $ rget (Proxy :: Proxy Gender) r)
-- $ singlePoint 1 fromIntegral
data Gender = Male | Female | Unknown
deriving (Eq,Ord,Bounded,Enum,Show)
data Age = Young | Middle | Old | VeryOld
deriving (Eq,Ord,Bounded,Enum,Show)
allAges :: [Age]
allAges = enumFromTo minBound maxBound
allGenders :: [Gender]
allGenders = enumFromTo minBound maxBound
genderColour :: Gender -> Colour Double
genderColour g = case g of
Male -> blue
Female -> magenta
Unknown -> grey
-- myGraphA,myGraphB :: Graphite '[Age,Gender] Int
-- myGraphA = intervals 16 allAges $ grouped 1 allGenders $ bar fromIntegral 4 id
-- myGraphB = id
-- $ intervalsWithAxis 16 show (zip allAges (repeat id))
-- -- $ stacked allGenders
-- $ stackedWith [ (Male,fc purple . atop (text "Male" & fontSizeL 2 & fc white))
-- , (Female, fc blue . atop (text "Female" & fontSizeL 2 & fc white))
-- , (Unknown, fc green . atop (text "N/A" & fontSizeL 2 & fc white))
-- ]
-- $ bar fromIntegral 6.0 id
--
-- myGraphC :: Graphite '[Gender] Int
-- myGraphC = intervals 16 allGenders $ bar fromIntegral 4.0 (fc blue)
--
-- myGraphD :: Graphite '[Gender] Int
-- myGraphD = lineGraph fromIntegral 16.0 allGenders (fc blue)
funcDataGender :: Rec Identity '[Gender] -> Int
funcDataGender (Identity gender :& RNil) = case gender of
Male -> 11
Female -> 14
Unknown -> 2
funcData :: Rec Identity '[Age,Gender] -> Int
funcData (Identity age :& Identity gender :& RNil) = fAge age * fGender gender
where
fAge Young = 6
fAge Middle = 7
fAge Old = 8
fGender Male = 4
fGender Female = 5
fGender Unknown = 1
funcData2 :: Rec Identity '[Age,Gender] -> Int
funcData2 (Identity age :& Identity gender :& RNil) = case age of
Young -> case gender of
Male -> 18
Female -> 16
Unknown -> 30
Middle -> case gender of
Male -> 12
Female -> 18
Unknown -> 22
Old -> case gender of
Male -> 4
Female -> 26
Unknown -> 20
VeryOld -> case gender of
Male -> 2
Female -> 22
Unknown -> 10
-- myGraphE :: Graphite (Diagram B) r '[Gender] Int
-- myGraphE = stacked id $ bar 4.0
--
-- myGraphF :: Graphite (Diagram B) r '[Age,Gender] Int
-- myGraphF = paddedBy 12.0 id $ stacked id $ barColoured 4.0 genderColour
--
-- myGraphH :: Graphite (Diagram B) r '[Gender] Int
-- myGraphH = connected 4 $ gvalue
--
-- myGraphI :: Graphite (Diagram B) r '[Age,Gender] Int
-- myGraphI = overlapped $ connected 16.0 $ gvalue
--
-- myGraphJ :: Graphite (Diagram B) r '[Age,Gender] Int
-- myGraphJ = id
-- -- $ overlapped
-- $ connectedFiguresMany 16.0
-- $ figureAt (\val gender -> mconcat
-- [ text (show val)
-- , hexagon 1.0 # fc (genderColour gender)
-- ]
-- )
--
-- myGraphK :: Graphite (Diagram B) r '[Gender,Age] Int
-- myGraphK = id
-- -- $ overlapped
-- $ connectedFiguresMany 16.0
-- $ figureAt (\val _ -> mconcat
-- [ text (show val)
-- , hexagon 1.0
-- ]
-- )
--
-- myGraphL :: Graphite (Diagram B) r '[Age,Gender] Int
-- myGraphL = id
-- $ yAxisFromZero HorizontalRight 10 4 (*) show
-- $ xAxisFromZero HorizontalRight show
-- $ graphiteCast (Proxy :: Proxy '[Gender,Age]) (Proxy :: Proxy '[Age,Gender])
-- $ overlapped
-- $ basicConnectedFigures
-- (\g v a -> mconcat [text (show a) & fc white & fontSize 10, hexagon 2.0 & fc (genderColour g)])
-- (\g -> connectOutside' (with { _arrowHead = noHead, _shaftStyle = lc purple mempty} ))
-- padX :: (Metric v, OrderedField n, Monoid' m, R2 v)
-- => n -> QDiagram b v n m -> QDiagram b v n m
-- padX s d = withEnvelope (d # scaleX s) d
-- main = mainWith $ showOrigin $ runGraphite myGraphH fromIntegral funcDataGender (allGenders :& RNil) ()
-- main = mainWith $ showOrigin $ runGraphite myGraphF fromIntegral funcData2 (allAges :& allGenders :& RNil) ()
-- main = mainWith $ pad 1.3 $ centerXY $ showOrigin
-- $ runGraphite myGraphL (\i -> fromIntegral i * 2) (funcData2 . rcast)
-- (rcast $ allAges :& allGenders :& RNil) ()
-- ( build myGraphA funcData
-- ===
-- axisX 16 (map show allAges)
-- ===
-- square 4
-- ) |||
-- ( axisY 16 ["1","2","3","4","5"] ||| build myGraphB funcData) |||
-- ( build myGraphD funcDataGender
-- ===
-- axisX 16 (map show allGenders)
-- ===
-- square 4
-- )
| andrewthad/graphite | example/main.hs | bsd-3-clause | 7,763 | 0 | 23 | 1,952 | 1,170 | 662 | 508 | -1 | -1 |
module Main where
import System.Console.Haskeline
import System.IO
import System.Environment
import System.Exit
import System.FilePath ((</>), addTrailingPathSeparator)
import Data.Maybe
import Data.Version
import Control.Monad.Trans.Error ( ErrorT(..) )
import Control.Monad.Trans.State.Strict ( execStateT, get, put )
import Control.Monad ( when )
import Idris.Core.TT
import Idris.Core.Typecheck
import Idris.Core.Evaluate
import Idris.Core.Constraints
import Idris.AbsSyntax
import Idris.Parser
import Idris.REPL
import Idris.ElabDecls
import Idris.Primitives
import Idris.Imports
import Idris.Error
import IRTS.System ( getLibFlags, getIdrisLibDir, getIncFlags )
import Util.DynamicLinker
import Pkg.Package
import Paths_idris
-- Main program reads command line options, parses the main program, and gets
-- on with the REPL.
main = do xs <- getArgs
let opts = parseArgs xs
result <- runErrorT $ execStateT (runIdris opts) idrisInit
case result of
Left err -> putStrLn $ "Uncaught error: " ++ show err
Right _ -> return ()
runIdris :: [Opt] -> Idris ()
runIdris [Client c] = do setVerbose False
setQuiet True
runIO $ runClient c
runIdris opts = do
when (Ver `elem` opts) $ runIO showver
when (Usage `elem` opts) $ runIO usage
when (ShowIncs `elem` opts) $ runIO showIncs
when (ShowLibs `elem` opts) $ runIO showLibs
when (ShowLibdir `elem` opts) $ runIO showLibdir
case opt getPkgCheck opts of
[] -> return ()
fs -> do runIO $ mapM_ (checkPkg (WarnOnly `elem` opts)) fs
runIO $ exitWith ExitSuccess
case opt getPkgClean opts of
[] -> return ()
fs -> do runIO $ mapM_ cleanPkg fs
runIO $ exitWith ExitSuccess
case opt getPkg opts of
[] -> idrisMain opts -- in Idris.REPL
fs -> runIO $ mapM_ (buildPkg (WarnOnly `elem` opts)) fs
usage = do putStrLn usagemsg
exitWith ExitSuccess
showver = do putStrLn $ "Idris version " ++ ver
exitWith ExitSuccess
showLibs = do libFlags <- getLibFlags
putStrLn libFlags
exitWith ExitSuccess
showLibdir = do dir <- getIdrisLibDir
putStrLn dir
exitWith ExitSuccess
showIncs = do incFlags <- getIncFlags
putStrLn incFlags
exitWith ExitSuccess
usagemsghdr = "Idris version " ++ ver ++ ", (C) The Idris Community 2014"
usagemsg = usagemsghdr ++ "\n" ++
map (\x -> '-') usagemsghdr ++ "\n" ++
"idris [OPTIONS] [FILE]\n\n" ++
"Common flags:\n" ++
"\t --install=IPKG Install package\n" ++
"\t --clean=IPKG Clean package\n" ++
"\t --build=IPKG Build package\n" ++
"\t --exec=EXPR Execute as idris\n" ++
"\t --libdir Display library directory\n" ++
"\t --link Display link directory\n" ++
"\t --include Display the includes directory\n" ++
"\t --nobanner Suppress the banner\n" ++
"\t --color, --colour Force coloured output\n" ++
"\t --nocolor, --nocolour Disable coloured output\n" ++
"\t --errorcontent Undocumented\n" ++
"\t --nocoverage Undocumented\n" ++
"\t -o --output=FILE Specify output file\n" ++
"\t --check Undocumented\n" ++
"\t --total Require functions to be total by default\n" ++
"\t --partial Undocumented\n" ++
"\t --warnpartial Warn about undeclared partial functions.\n" ++
"\t --warn Undocumented\n" ++
"\t --typecase Undocumented\n" ++
"\t --typeintype Undocumented\n" ++
"\t --nobasepkgs Undocumented\n" ++
"\t --noprelude Undocumented\n" ++
"\t --nobuiltins Undocumented\n" ++
"\t -O --level=LEVEL Undocumented\n" ++
"\t -i --idrispath=DIR Add directory to the list of import paths\n" ++
"\t --package=ITEM Undocumented\n" ++
"\t --ibcsubdir=FILE Write IBC files into sub directory\n" ++
"\t --codegen=TARGET Select code generator: C, Java, bytecode,\n" ++
"\t javascript, node, or llvm\n" ++
"\t --mvn Create a maven project (for Java codegen)\n" ++
"\t --cpu=CPU Select tartget CPU e.g. corei7 or cortex-m3\n" ++
"\t (for LLVM codegen)\n" ++
"\t --target=TRIPLE Select target triple (for llvm codegen)\n" ++
"\t -S --codegenonly Do no further compilation of code generator output\n" ++
"\t -c --compileonly Compile to object files rather than an executable\n" ++
"\t -X --extension=ITEM Undocumented\n" ++
"\t --dumpdefuns Undocumented\n" ++
"\t --dumpcases Undocumented\n" ++
"\t --log=LEVEL --loglevel Debugging log level\n" ++
"\t --ideslave Undocumented\n" ++
"\t --client Undocumented\n" ++
"\t -h --help Display help message\n" ++
"\t -v --version Print version information\n" ++
"\t -V --verbose Loud verbosity\n" ++
"\t -q --quiet Quiet verbosity\n"
| ctford/Idris-Elba-dev | main/Main.hs | bsd-3-clause | 5,886 | 0 | 56 | 2,190 | 960 | 487 | 473 | 116 | 4 |
{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-type-defaults #-}
module Main where
import Prelude hiding ( null, lookup, max )
import qualified Prelude
import Control.Applicative ( (<$>), (<*>) )
import qualified Data.Array as A
import Data.Binary ( encode, decode )
import Data.Maybe ( fromJust )
import Data.Monoid
import Data.VectorClock
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit
import Test.QuickCheck
import Text.Printf ( printf )
type VC = VectorClock Char Int
instance (Arbitrary a, Ord a, Arbitrary b) => Arbitrary (VectorClock a b) where
arbitrary = arbitrary >>= return . fromList
data Mutation = Increment Int
| Add Char Int
deriving ( Show )
instance Arbitrary Mutation where
arbitrary = do
x <- choose (0, 5)
if x < (5 :: Int)
then Increment <$> (choose (0, 100))
else Add <$> arbitrary <*> (choose (0, 100))
main :: IO ()
main = defaultMainWithOpts
[ testCase "size" testSize
, testCase "size2" testSize2
, testCase "member" testMember
, testCase "lookup" testLookup
, testCase "insert" testInsert
, testCase "inc" testInc
, testCase "delete" testDelete
, testCase "combine" testCombine
, testCase "relation" testRelation
, testCase "diff" testDiff
, testProperty "fromList" propFromList
, testProperty "binaryId" propBinaryId
, testProperty "maxNotCauses" propMaxNotCauses
, testProperty "relationInverse" propRelationInverse
, testProperty "maxCommutative" propMaxCommutative
, testProperty "maxInclusive" propMaxInclusive
, testProperty "mutationRelation" propMutationRelation
, testProperty "relationTransitive" propRelationTransitive
, testProperty "diffMax" propDiffMax
] opts
where
opts = mempty {
ropt_test_options =
Just (mempty
{ topt_maximum_generated_tests = Just 500
, topt_maximum_unsuitable_generated_tests = Just 5000
})}
--------------------------------
-- Unit tests
--------------------------------
testSize :: Assertion
testSize = do
null empty @?= True
null (singleton 'a' 1) @?= False
testSize2 :: Assertion
testSize2 = do
size empty @?= 0
size (singleton 'a' 1) @?= 1
size (fromList [('a', 1), ('b', 1)]) @?= 2
testMember :: Assertion
testMember = do
member 'a' (fromList [('a', 1), ('b', 2)]) @?= True
member 'c' (fromList [('a', 1), ('b', 2)]) @?= False
testLookup :: Assertion
testLookup = do
lookup 'a' (fromList [('a', 1), ('b', 2)]) @?= Just 1
lookup 'b' (fromList [('a', 1), ('b', 2)]) @?= Just 2
lookup 'c' (fromList [('a', 1), ('b', 2)]) @?= Nothing
testInsert :: Assertion
testInsert = do
insert 'b' 2 (insert 'a' 1 empty) @?= fromList [('a', 1), ('b', 2)]
insert 'a' 1 (insert 'b' 2 empty) @?= fromList [('a', 1), ('b', 2)]
insert 'b' 2 (insert 'a' 1 empty) @?= fromList [('b', 2), ('a', 1)]
insert 'a' 1 (insert 'b' 2 empty) @?= fromList [('b', 2), ('a', 1)]
insert 'a' 2 (insert 'a' 1 empty) @?= fromList [('a', 2)]
testInc :: Assertion
testInc = do
let vc = fromList [('a', 1), ('b', 2)]
inc 'a' vc @?= Just (fromList [('a', 2), ('b', 2)])
inc 'a' (fromJust (inc 'a' vc)) @?= Just (fromList [('a', 3), ('b', 2)])
inc 'b' vc @?= Just (fromList [('a', 1), ('b', 3)])
inc 'c' vc @?= Nothing
incWithDefault 'c' vc 0 @?= fromList [('c', 1), ('a', 1), ('b', 2)]
incWithDefault 'a' vc 0 @?= fromList [('a', 2), ('b', 2)]
testDelete :: Assertion
testDelete = do
let vc = fromList [('a', 1), ('b', 2)]
delete 'a' vc @?= fromList [('b', 2)]
delete 'b' vc @?= fromList [('a', 1)]
delete 'b' (delete 'a' vc) @?= empty
delete 'a' (delete 'b' vc) @?= empty
delete 'c' vc @?= vc
delete 'a' (empty :: VectorClock Char Int) @?= empty
testCombine :: Assertion
testCombine = do
let vc1 = fromList [('a', 1), ('b', 2)]
let vc2 = fromList [('c', 3), ('b', 4)]
let first_vc = \_ x _ -> x
let second_vc = \_ _ x -> x
let neither_vc = \_ _ _ -> Nothing
combine first_vc vc1 vc2 @?= vc1
combine second_vc vc1 vc2 @?= vc2
combine neither_vc vc1 vc2 @?= empty
max empty vc1 @?= vc1
max vc2 empty @?= vc2
max vc1 vc2 @?= fromList [('a', 1), ('b', 4), ('c', 3)]
testRelation :: Assertion
testRelation = do
let vc = fromList [('a', 1), ('d', 2)]
vc `causes` fromList [('a', 1), ('d', 3)] @?= True
vc `causes` fromList [('a', 2), ('d', 2)] @?= True
vc `causes` fromList [('a', 2), ('d', 3)] @?= True
vc `causes` fromList [('a', 2), ('d', 3), ('c', 4)] @?= True
vc `causes` (fromList [('a', 1), ('d', 2), ('c', 4)]) @?= True
vc `causes` (fromList [('a', 1), ('d', 2), ('e', 4)]) @?= True
empty `causes` vc @?= True
relation vc empty @?= CausedBy
relation vc vc @?= Causes
relation vc (fromList [('a', 1), ('d', 1), ('c', 2)]) @?= Concurrent
relation (fromList [('a', 1)]) (fromList [('d', 2)]) @?= Concurrent
testDiff :: Assertion
testDiff = do
fromList [('a', 2)] `diff` fromList [('a', 2)] @?= Just (fromList [])
fromList [('a', 2)] `diff` fromList [('a', 1)] @?= Just (fromList [('a', 2)])
fromList [('a', 1), ('b', 2)] `diff` fromList [('a', 1), ('b', 1)]
@?= Just (fromList [('b', 2)])
fromList [('a', 2), ('b', 1)] `diff` fromList [('a', 1), ('b', 1)]
@?= Just (fromList [('a', 2)])
fromList [('a', 2), ('b', 1), ('c', 2)] `diff` fromList [('a', 2), ('b', 1)]
@?= Just (fromList [('c', 2)])
fromList [('a', 1), ('b', 2)] `diff` fromList [('a', 2), ('b', 2)] @?= Nothing
fromList [('a', 1), ('b', 2)] `diff` fromList [('a', 2), ('b', 1)] @?= Nothing
--------------------------------
-- QuickCheck properties
--------------------------------
propFromList :: VC -> Bool
propFromList vc = valid vc
propBinaryId :: VC -> Bool
propBinaryId vc = vc == decode (encode vc)
-- @max vc1 vc2@ does not cause either @vc1@ or @vc2@, unless @max vc1
-- vc2@ is either @vc1@ or @vc2@
propMaxNotCauses :: VC -> VC -> Property
propMaxNotCauses vc1 vc2 =
let vcMax = max vc1 vc2 in
(vcMax /= vc1 && vcMax /= vc2) ==>
relation vcMax vc1 /= Causes &&
relation vcMax vc2 /= Causes &&
relation vc1 vcMax /= CausedBy &&
relation vc2 vcMax /= CausedBy
-- | Increment the entries that correspond to the mutations.
applyMutations :: VC -> [Mutation] -> VC
applyMutations vc ms =
let sources = map fst $ toList vc in
let sourcesA = A.listArray (0, length sources - 1) sources in
foldl (applyOne sourcesA) vc ms
where
applyOne sourcesA vc' (Increment i) =
let source = sourcesA A.! (i `mod` (1 + snd (A.bounds sourcesA))) in
incWithDefault source vc' (error (printf "unknown source: %s" source))
applyOne sourcesA vc' (Add source i) =
if source `elem` A.elems sourcesA
then vc'
else insert source i vc'
-- @vc1@ causes @vc2@ iff @vc2@ is caused by @vc1@
propRelationInverse :: VC -> [Mutation] -> Property
propRelationInverse vc1 ms =
not (null vc1) ==>
let vc2 = applyMutations vc1 ms in
vc1 `causes` vc2 && vc1 /= vc2 ==>
relation vc2 vc1 == CausedBy
propMaxCommutative :: VC -> VC -> Bool
propMaxCommutative vc1 vc2 =
max vc1 vc2 == max vc2 vc1
-- @max vc1 vc2@ should include all the keys of @vc1@ and @vc2@.
propMaxInclusive :: VC -> VC -> Bool
propMaxInclusive vc1 vc2 =
let vcMax = max vc1 vc2 in
all (\(key, _) -> key `member` vcMax) (toList vc1 ++ toList vc2)
propMutationRelation :: VC -> [Mutation] -> Bool
propMutationRelation vc1 ms =
let vc2 = applyMutations vc1 ms in
vc1 `causes` vc2
propRelationTransitive :: VC -> [Mutation] -> [Mutation] -> Property
propRelationTransitive vc1 ms1 ms2 =
not (null vc1) ==>
let vc2 = applyMutations vc1 ms1 in
let vc3 = applyMutations vc2 ms2 in
vc1 `causes` vc2 && vc2 `causes` vc3 ==>
vc1 `causes` vc3
propDiffMax :: VC -> [Mutation] -> Property
propDiffMax vc1 ms =
not (null vc1) ==>
let vc2 = applyMutations vc1 ms in
let mvc3 = diff vc2 vc1 in
case mvc3 of
Nothing -> False
Just vc3 -> max vc3 vc1 == vc2
| tkonolige/vector-clock | test/Props.hs | gpl-3.0 | 8,380 | 0 | 19 | 2,088 | 3,393 | 1,831 | 1,562 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
module InnerEar.Types.User where
import Text.JSON
import Text.JSON.Generic
import InnerEar.Types.Handle
import InnerEar.Types.Password
import InnerEar.Types.Data
data Role =
NormalUser | -- can only log in, do exercises, inspect their own data/history
Manager | -- can also add NormalUsers, inspect any data/history
Administrator -- can also add Managers
deriving (Show,Eq,Data,Typeable)
data User = User {
handle :: Handle,
password :: Password,
role :: Role
} deriving (Show,Eq,Data,Typeable)
canSeeUserList :: Role -> Bool
canSeeUserList Administrator = True
canSeeUserList Manager = True
canSeeUserList _ = False
| d0kt0r0/InnerEar | src/InnerEar/Types/User.hs | gpl-3.0 | 693 | 0 | 8 | 125 | 151 | 90 | 61 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Network.Wai.Handler.Warp (Port, run)
import Network.Wai.Middleware.Cors (simpleCors)
import System.Environment (getArgs, getEnv)
import System.IO (BufferMode (..), hSetBuffering,
stdout)
import qualified Network.Tox.App as App
-- Run the server.
runTestServer :: Port -> IO ()
runTestServer port = do
putStrLn $ "Running webserver on port " ++ show port
run port $ simpleCors App.app
-- Put this all to work!
main :: IO ()
main = do
-- So real time logging works correctly.
hSetBuffering stdout LineBuffering
args <- getArgs
case args of
[port] -> runTestServer $ read port
_ -> runTestServer =<< read <$> getEnv "PORT"
| iphydf/hs-toxcore | web/webservice.hs | gpl-3.0 | 864 | 0 | 11 | 279 | 203 | 112 | 91 | 19 | 2 |
{-# LANGUAGE StrictData #-}
{-# LANGUAGE Trustworthy #-}
module Network.Tox.NodeInfo.PortNumberSpec where
import Test.Hspec
import Data.Proxy (Proxy (..))
import Network.Tox.EncodingSpec
import Network.Tox.NodeInfo.PortNumber (PortNumber)
spec :: Spec
spec = do
rpcSpec (Proxy :: Proxy PortNumber)
binarySpec (Proxy :: Proxy PortNumber)
readShowSpec (Proxy :: Proxy PortNumber)
| iphydf/hs-toxcore | test/Network/Tox/NodeInfo/PortNumberSpec.hs | gpl-3.0 | 450 | 0 | 9 | 115 | 102 | 59 | 43 | 12 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.Ambiata.Cli where
import qualified Data.Text as T
import P
import System.IO
import Turtle
testShell :: [Text] -> Shell Text -> IO (ExitCode, Text)
testShell args = shellStrict args'
where
args' = T.intercalate " " args
testShell' :: [Text] -> IO (ExitCode, Text)
testShell' = flip testShell empty
| ambiata/tatooine-cli | test/Test/Ambiata/Cli.hs | apache-2.0 | 429 | 0 | 8 | 106 | 112 | 64 | 48 | 12 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="si-LK">
<title>Plug-n-Hack | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/plugnhack/src/main/javahelp/org/zaproxy/zap/extension/plugnhack/resources/help_si_LK/helpset_si_LK.hs | apache-2.0 | 973 | 80 | 68 | 159 | 421 | 213 | 208 | -1 | -1 |
-- |Make URI an instance of Read and Ord, and add functions to
-- manipulate the uriQuery.
module Extra.URI
( module Network.URI
, relURI
, setURIPort
, parseURIQuery
, modifyURIQuery
, setURIQuery
, setURIQueryAttr
, deleteURIQueryAttr
) where
import Network.URI -- (URIAuth(..), URI(..), parseURI, uriToString, escapeURIString, isUnreserved, unEscapeString)
import Data.List(intersperse, groupBy, inits)
import Data.Maybe(isJust, isNothing, catMaybes)
import Control.Arrow(second)
-- |Create a relative URI with the given query.
relURI :: FilePath -> [(String, String)] -> URI
relURI path pairs = URI {uriScheme = "",
uriAuthority = Nothing,
uriPath = path,
uriQuery = formatURIQuery pairs,
uriFragment = ""}
-- |Set the port number in the URI authority, creating it if necessary.
setURIPort port uri =
uri {uriAuthority = Just auth'}
where
auth' = auth {uriPort = port}
auth = maybe nullAuth id (uriAuthority uri)
nullAuth = URIAuth {uriUserInfo = "", uriRegName = "", uriPort = ""}
-- |Return the pairs in a URI's query
parseURIQuery :: URI -> [(String, String)]
parseURIQuery uri =
case uriQuery uri of
"" -> []
'?' : attrs ->
map (second (unEscapeString . tail) . break (== '='))
(filter (/= "&") (groupBy (\ a b -> a /= '&' && b /= '&') attrs))
x -> error $ "Invalid URI query: " ++ x
-- |Modify a URI's query by applying a function to the pairs
modifyURIQuery :: ([(String, String)] -> [(String, String)]) -> URI -> URI
modifyURIQuery f uri = uri {uriQuery = formatURIQuery (f (parseURIQuery uri))}
setURIQuery :: [(String, String)] -> URI -> URI
setURIQuery pairs = modifyURIQuery (const pairs)
setURIQueryAttr :: String -> String -> URI -> URI
setURIQueryAttr name value uri =
modifyURIQuery f uri
where f pairs = (name, value) : filter ((/= name) . fst) pairs
deleteURIQueryAttr :: String -> URI -> URI
deleteURIQueryAttr name uri =
modifyURIQuery f uri
where f pairs = filter ((/= name) . fst) pairs
-- |Turn a list of attribute value pairs into a uriQuery.
formatURIQuery :: [(String, String)] -> String
formatURIQuery [] = ""
formatURIQuery attrs = '?' : concat (intersperse "&" (map (\ (a, b) -> a ++ "=" ++ escapeURIForQueryValue b) attrs))
-- |Escape a value so it can safely appear on the RHS of an element of
-- the URI query. The isUnreserved predicate is the set of characters
-- that can appear in a URI which don't have any special meaning.
-- Everything else gets escaped.
escapeURIForQueryValue = escapeURIString isUnreserved
-- Make URI an instance of Read. This will throw an error if no
-- prefix up to ten characters long of the argument string looks like
-- a URI. If such a prefix is found, it will continue trying longer
-- and longer prefixes until the result no longer looks like a URI.
instance Read URI where
readsPrec _ s =
let allURIs = map parseURI (inits s) in
-- If nothing in the first ten characters looks like a URI, give up
case catMaybes (take 10 allURIs) of
[] -> fail "read URI: no parse"
-- Return the last element that still looks like a URI
_ ->
[(longestURI, drop (length badURIs + length goodURIs - 1) s)]
where
longestURI = case reverse (catMaybes goodURIs) of
[] -> error $ "Invalid URI: " ++ s
(a : _) -> a
goodURIs = takeWhile isJust moreURIs
(badURIs, moreURIs) = span isNothing allURIs
| eigengrau/haskell-extra | Extra/URI.hs | bsd-3-clause | 3,659 | 0 | 17 | 967 | 887 | 491 | 396 | 60 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module Main where
import Headings
import PropertyDrawer
import Test.Tasty
import Timestamps
import Document
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup
"OrgMode Parser Tests"
[ parserHeadingTests
, parserPropertyDrawerTests
, parserTimestampTests
, parserSmallDocumentTests
, parserWeekdayTests
]
| imalsogreg/orgmode-parse | test/Test.hs | bsd-3-clause | 522 | 0 | 6 | 180 | 71 | 43 | 28 | 18 | 1 |
data BinaryTree a = Node a (BinaryTree a) (BinaryTree a)
| Empty
createTree :: Int -> [[Int]] -> BinaryTree Int
createTree n arr = head subtrees
where subtrees = [getSubtree x | x <- [0..n - 1]]
getSubtree x = (\[l, r] -> Node (x + 1) l r) . map (\x -> if x == -1 then Empty else subtrees !! (x - 1)) $ arr !! x
swapTree :: Int -> BinaryTree Int -> BinaryTree Int
swapTree k = swap 1
where swap _ Empty = Empty
swap h (Node x l r)
| h `rem` k == 0 = Node x (swap (h + 1) r) (swap (h + 1) l)
| otherwise = Node x (swap (h + 1) l) (swap (h + 1) r)
inorder :: BinaryTree Int -> [Int]
inorder Empty = []
inorder (Node x l r) = inorder l ++ [x] ++ inorder r
solve :: [Int] -> BinaryTree Int -> IO ()
solve [] tree = return ()
solve (k:ks) tree = putStrLn (unwords . map show . inorder $ ans) >> solve ks ans
where ans = swapTree k tree
validate :: [[Int]] -> IO ()
validate ([n]:rest) = solve k tree
where (arr, t:k) = (\(f, s) -> (f, concat s)) . splitAt n $ rest
tree = createTree n arr
main :: IO ()
main = getContents >>= validate . map (map read . words) . lines
| EdisonAlgorithms/HackerRank | practice/fp/ds/swap-nodes/swap-nodes.hs | mit | 1,189 | 0 | 16 | 365 | 645 | 332 | 313 | 25 | 2 |
{-# LANGUAGE CPP #-}
module Main where
import HList
rev :: [a] -> [a]
rev [] = []
rev (y:ys) = rev ys ++ [y]
main :: IO ()
main = print $ rev [1..10]
-- Should be in the "List" module
{-# RULES "++ []" forall xs . xs ++ [] = xs #-}
{-# RULES "++ strict" (++) undefined = undefined #-}
-- The "Algebra" for repH
{-# RULES "repH ++" forall xs ys . repH (xs ++ ys) = repH xs . repH ys #-}
{-# RULES "repH []" repH [] = id #-}
{-# RULES "repH (:)" forall x xs . repH (x:xs) = ((:) x) . repH xs #-}
| beni55/hermit | examples/concatVanishes/Rev.hs | bsd-2-clause | 571 | 0 | 7 | 191 | 94 | 55 | 39 | 13 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Bead.View.Content.Assignment.Page (
newGroupAssignment
, newCourseAssignment
, modifyAssignment
, viewAssignment
, newGroupAssignmentPreview
, newCourseAssignmentPreview
, modifyAssignmentPreview
) where
import Control.Monad.Error
import qualified Data.Map as Map
import Data.Time (getCurrentTime)
import qualified Bead.Controller.UserStories as S
import qualified Bead.Domain.Entity.Assignment as Assignment
import Bead.View.Content
import Bead.View.ContentHandler (getJSONParameters)
import Bead.View.RequestParams
import Bead.View.Content.Assignment.Data
import Bead.View.Content.Assignment.View
import Bead.View.Fay.Hooks
-- * Content Handlers
newCourseAssignment = ViewModifyHandler newCourseAssignmentPage postCourseAssignment
newGroupAssignment = ViewModifyHandler newGroupAssignmentPage postGroupAssignment
modifyAssignment = ViewModifyHandler modifyAssignmentPage postModifyAssignment
viewAssignment = ViewHandler viewAssignmentPage
newCourseAssignmentPreview = UserViewHandler newCourseAssignmentPreviewPage
newGroupAssignmentPreview = UserViewHandler newGroupAssignmentPreviewPage
modifyAssignmentPreview = UserViewHandler modifyAssignmentPreviewPage
-- * Course Assignment
newCourseAssignmentPage :: GETContentHandler
newCourseAssignmentPage = do
ck <- getParameter (customCourseKeyPrm courseKeyParamName)
(c, tss, ufs) <- userStory $ do
S.isAdministratedCourse ck
(course, _groupKeys) <- S.loadCourse ck
tss' <- S.testScriptInfosOfCourse ck
ufs <- map fst <$> S.listUsersFiles
return ((ck, course), nonEmptyList tss', ufs)
now <- liftIO $ getCurrentTime
tz <- userTimeZoneToLocalTimeConverter
return $ newAssignmentContent $ PD_Course tz now c tss ufs
postCourseAssignment :: POSTContentHandler
postCourseAssignment = do
CreateCourseAssignment
<$> getParameter (customCourseKeyPrm (fieldName selectedCourse))
<*> getAssignment
<*> readTCCreation
newCourseAssignmentPreviewPage :: ViewPOSTContentHandler
newCourseAssignmentPreviewPage = do
ck <- getParameter (customCourseKeyPrm courseKeyParamName)
assignment <- getAssignment
tc <- readTCCreationParameters
(c, tss, ufs) <- userStory $ do
S.isAdministratedCourse ck
(course, _groupKeys) <- S.loadCourse ck
tss' <- S.testScriptInfosOfCourse ck
ufs <- map fst <$> S.listUsersFiles
return ((ck, course), nonEmptyList tss', ufs)
now <- liftIO $ getCurrentTime
tz <- userTimeZoneToLocalTimeConverter
return $ newAssignmentContent $
PD_Course_Preview tz now c tss ufs assignment tc
-- Tries to create a TCCreation descriptive value. If the test script, usersfile and testcase
-- parameters are included returns Just tccreation otherwise Nothing
readTCCreation :: ContentHandler TCCreation
readTCCreation = do
(mTestScript, mZippedTestCaseName, mPlainTestCase) <- readTCCreationParameters
case tcCreation mTestScript mZippedTestCaseName mPlainTestCase of
Left e -> throwError . strMsg $ "Some error in test case parameters " ++ e
Right tc -> return tc
readTCCreationParameters :: ContentHandler TCCreationParameters
readTCCreationParameters = do
mTestScript <- getOptionalParameter (jsonParameter (fieldName assignmentTestScriptField) "Test Script")
mZippedTestCaseName <- getOptionalOrNonEmptyParameter (jsonParameter (fieldName assignmentUsersFileField) "Test Script File")
mPlainTestCase <- getOptionalParameter (stringParameter (fieldName assignmentTestCaseField) "Test Script")
return (mTestScript, mZippedTestCaseName, mPlainTestCase)
tcCreation :: Maybe (Maybe TestScriptKey) -> Maybe UsersFile -> Maybe String -> Either String TCCreation
tcCreation Nothing _ _ = Right NoCreation
tcCreation (Just Nothing) _ _ = Right NoCreation
tcCreation (Just (Just tsk)) (Just uf) _ = Right $ FileCreation tsk uf
tcCreation (Just (Just tsk)) _ (Just t) = Right $ TextCreation tsk t
tcCreation (Just (Just _tsk)) Nothing Nothing = Left "#1"
readTCModificationParameters :: ContentHandler TCModificationParameters
readTCModificationParameters = do
mTestScript <- getOptionalParameter (jsonParameter (fieldName assignmentTestScriptField) "Test Script")
mZippedTestCaseName <- getOptionalOrNonEmptyParameter (jsonParameter (fieldName assignmentUsersFileField) "Test Script File")
mPlainTestCase <- getOptionalParameter (stringParameter (fieldName assignmentTestCaseField) "Test Script")
return (mTestScript,mZippedTestCaseName,mPlainTestCase)
readTCModification :: ContentHandler TCModification
readTCModification = do
(mTestScript,mZippedTestCaseName,mPlainTestCase) <- readTCModificationParameters
case tcModification mTestScript mZippedTestCaseName mPlainTestCase of
Nothing -> throwError $ strMsg "Some error in test case parameters"
Just tm -> return tm
tcModification :: Maybe (Maybe TestScriptKey) -> Maybe (Either () UsersFile) -> Maybe String -> Maybe TCModification
tcModification Nothing _ _ = Just NoModification
tcModification (Just Nothing) _ _ = Just TCDelete
tcModification (Just (Just _tsk)) (Just (Left ())) _ = Just NoModification
tcModification (Just (Just tsk)) (Just (Right uf)) _ = Just $ FileOverwrite tsk uf
tcModification (Just (Just tsk)) _ (Just t) = Just $ TextOverwrite tsk t
tcModification _ _ _ = Nothing
-- * Group Assignment
newGroupAssignmentPage :: GETContentHandler
newGroupAssignmentPage = do
now <- liftIO $ getCurrentTime
gk <- getParameter (customGroupKeyPrm groupKeyParamName)
(g,tss,ufs) <- userStory $ do
S.isAdministratedGroup gk
group <- S.loadGroup gk
tss' <- S.testScriptInfosOfGroup gk
ufs <- map fst <$> S.listUsersFiles
return ((gk, group), nonEmptyList tss', ufs)
tz <- userTimeZoneToLocalTimeConverter
return $ newAssignmentContent $ PD_Group tz now g tss ufs
postGroupAssignment :: POSTContentHandler
postGroupAssignment = do
CreateGroupAssignment
<$> getParameter (customGroupKeyPrm (fieldName selectedGroup))
<*> getAssignment
<*> readTCCreation
newGroupAssignmentPreviewPage :: ViewPOSTContentHandler
newGroupAssignmentPreviewPage = do
gk <- getParameter (customGroupKeyPrm groupKeyParamName)
assignment <- getAssignment
tc <- readTCCreationParameters
(g,tss,ufs) <- userStory $ do
S.isAdministratedGroup gk
group <- S.loadGroup gk
tss' <- S.testScriptInfosOfGroup gk
ufs <- map fst <$> S.listUsersFiles
return ((gk, group), nonEmptyList tss', ufs)
tz <- userTimeZoneToLocalTimeConverter
now <- liftIO $ getCurrentTime
return $ newAssignmentContent $
PD_Group_Preview tz now g tss ufs assignment tc
-- * Modify Assignment
modifyAssignmentPage :: GETContentHandler
modifyAssignmentPage = do
ak <- getAssignmentKey
(as,tss,ufs,tc,ev) <- userStory $ do
S.isAdministratedAssignment ak
as <- S.loadAssignment ak
tss' <- S.testScriptInfosOfAssignment ak
ufs <- map fst <$> S.listUsersFiles
tc <- S.testCaseOfAssignment ak
ev <- not <$> S.isThereASubmission ak
return (as, nonEmptyList tss', ufs, tc, ev)
tz <- userTimeZoneToLocalTimeConverter
return $ newAssignmentContent $
PD_Assignment tz ak as tss ufs tc ev
postModifyAssignment :: POSTContentHandler
postModifyAssignment = do
ModifyAssignment
<$> getAssignmentKey
<*> getAssignment
<*> readTCModification
modifyAssignmentPreviewPage :: ViewPOSTContentHandler
modifyAssignmentPreviewPage = do
ak <- getAssignmentKey
as <- getAssignment
tm <- readTCModificationParameters
(tss,ufs,tc,ev) <- userStory $ do
S.isAdministratedAssignment ak
tss' <- S.testScriptInfosOfAssignment ak
ufs <- map fst <$> S.listUsersFiles
tc <- S.testCaseOfAssignment ak
ev <- not <$> S.isThereASubmission ak
return (nonEmptyList tss', ufs, tc, ev)
tz <- userTimeZoneToLocalTimeConverter
return $ newAssignmentContent $
PD_Assignment_Preview tz ak as tss ufs tc tm ev
viewAssignmentPage :: GETContentHandler
viewAssignmentPage = do
ak <- getAssignmentKey
(as,tss,tc) <- userStory $ do
S.isAdministratedAssignment ak
as <- S.loadAssignment ak
tss' <- S.testScriptInfosOfAssignment ak
ts <- S.testCaseOfAssignment ak
return (as, tss', ts)
tz <- userTimeZoneToLocalTimeConverter
let ti = do (_tck, _tc, tsk) <- tc
Map.lookup tsk $ Map.fromList tss
return $ newAssignmentContent $ PD_ViewAssignment tz ak as ti tc
-- * Helpers
-- | Returns Nothing if the given list was empty, otherwise Just list
nonEmptyList [] = Nothing
nonEmptyList xs = Just xs
-- Get Assignment Value
getAssignment = do
converter <- userTimeZoneToUTCTimeConverter
startDate <- converter <$> getParameter assignmentStartPrm
endDate <- converter <$> getParameter assignmentEndPrm
when (endDate < startDate) . throwError $ strMsg "The assignment starts later than it ends"
pwd <- getParameter (stringParameter (fieldName assignmentPwdField) "Password")
noOfTries <- getParameter (stringParameter (fieldName assignmentNoOfTriesField) "Number of tries")
asp <- Assignment.aspectsFromList <$> getJSONParameters (fieldName assignmentAspectField) "Aspect parameter"
stype <- getJSONParam (fieldName assignmentSubmissionTypeField) "Submission type"
let asp1 = if stype == Assignment.TextSubmission
then Assignment.clearZippedSubmissions asp
else Assignment.setZippedSubmissions asp
let asp2 = if Assignment.isPasswordProtected asp1
then Assignment.setPassword pwd asp1
else asp1
let asp3 = if Assignment.isNoOfTries asp2
then Assignment.setNoOfTries (read noOfTries) asp2
else asp2
Assignment.assignmentAna
(getParameter (stringParameter (fieldName assignmentNameField) "Name"))
(getParameter (stringParameter (fieldName assignmentDescField) "Description"))
(return asp3)
(return startDate)
(return endDate)
(getParameter (evalConfigPrm assignmentEvTypeHook))
getAssignmentKey = getParameter assignmentKeyPrm
| pgj/bead | src/Bead/View/Content/Assignment/Page.hs | bsd-3-clause | 10,185 | 0 | 14 | 1,788 | 2,603 | 1,278 | 1,325 | 206 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Yesod.Form.I18n.Japanese where
import Yesod.Form.Types (FormMessage (..))
import Data.Monoid (mappend)
import Data.Text (Text)
japaneseFormMessage :: FormMessage -> Text
japaneseFormMessage (MsgInvalidInteger t) = "無効な整数です: " `Data.Monoid.mappend` t
japaneseFormMessage (MsgInvalidNumber t) = "無効な数値です: " `mappend` t
japaneseFormMessage (MsgInvalidEntry t) = "無効な入力です: " `mappend` t
japaneseFormMessage MsgInvalidTimeFormat = "無効な時刻です。HH:MM[:SS]フォーマットで入力してください"
japaneseFormMessage MsgInvalidDay = "無効な日付です。YYYY-MM-DDフォーマットで入力してください"
japaneseFormMessage (MsgInvalidUrl t) = "無効なURLです: " `mappend` t
japaneseFormMessage (MsgInvalidEmail t) = "無効なメールアドレスです: " `mappend` t
japaneseFormMessage (MsgInvalidHour t) = "無効な時間です: " `mappend` t
japaneseFormMessage (MsgInvalidMinute t) = "無効な分です: " `mappend` t
japaneseFormMessage (MsgInvalidSecond t) = "無効な秒です: " `mappend` t
japaneseFormMessage MsgCsrfWarning = "CSRF攻撃を防ぐため、フォームの入力を確認してください"
japaneseFormMessage MsgValueRequired = "値は必須です"
japaneseFormMessage (MsgInputNotFound t) = "入力が見つかりません: " `mappend` t
japaneseFormMessage MsgSelectNone = "<なし>"
japaneseFormMessage (MsgInvalidBool t) = "無効なbool値です: " `mappend` t
japaneseFormMessage MsgBoolYes = "はい"
japaneseFormMessage MsgBoolNo = "いいえ"
japaneseFormMessage MsgDelete = "削除しますか?"
| s9gf4ult/yesod | yesod-form/Yesod/Form/I18n/Japanese.hs | mit | 1,650 | 0 | 7 | 136 | 320 | 178 | 142 | 24 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Stackage.LoadDatabase where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Exception (IOException, handle)
import Control.Monad (guard, foldM)
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as L8
import Data.List (stripPrefix)
import qualified Data.Map as Map
import Data.Maybe (catMaybes, listToMaybe,
mapMaybe, fromMaybe)
import Data.Monoid (Monoid (..))
import Data.Set (member)
import qualified Data.Set as Set
import Distribution.Compiler (CompilerFlavor (GHC))
import Distribution.Package (Dependency (Dependency))
import Distribution.PackageDescription (Condition (..),
ConfVar (..),
FlagName (FlagName),
RepoType (Git),
SourceRepo (..),
benchmarkBuildInfo,
buildInfo, buildTools,
condBenchmarks,
condExecutables,
condLibrary,
condTestSuites,
condTreeComponents,
condTreeConstraints,
condTreeData,
flagDefault, flagName,
genPackageFlags,
homepage, libBuildInfo,
packageDescription,
sourceRepos,
testBuildInfo)
import Distribution.PackageDescription.Parse (ParseResult (ParseOk),
parsePackageDescription)
import Distribution.System (buildArch, buildOS)
import Distribution.Text (simpleParse)
import Distribution.Version (Version (Version),
unionVersionRanges,
withinRange)
import Stackage.Config (convertGithubUser)
import Stackage.Types
import Stackage.Util
import System.Directory (doesFileExist, getDirectoryContents)
import System.FilePath ((<.>), (</>))
-- | Load the raw package database.
--
-- We want to put in some restrictions:
--
-- * Drop all core packages. We never want to install a new version of
-- those, nor include them in the package list.
--
-- * For packages with a specific version bound, find the maximum matching
-- version.
--
-- * For other packages, select the maximum version number.
loadPackageDB :: SelectSettings
-> Map PackageName Version -- ^ core packages from HP file
-> Set PackageName -- ^ all core packages, including extras
-> Map PackageName (VersionRange, Maintainer) -- ^ additional deps
-> Set PackageName -- ^ underlay packages to exclude
-> IO PackageDB
loadPackageDB settings coreMap core deps underlay = do
tarName <- getTarballName
lbs <- L.readFile tarName
pdb <- addEntries mempty $ Tar.read lbs
contents <- handle (\(_ :: IOException) -> return [])
$ getDirectoryContents $ selectTarballDir settings
pdb' <- foldM addTarball pdb $ mapMaybe stripTarGz contents
return $ excludeUnderlay pdb'
where
addEntries _ (Tar.Fail e) = error $ show e
addEntries db Tar.Done = return db
addEntries db (Tar.Next e es) = addEntry db e >>= flip addEntries es
stripTarGz = fmap reverse . stripPrefix (reverse ".tar.gz") . reverse
ghcVersion' =
let GhcMajorVersion x y = selectGhcVersion settings
in Version [x, y, 2] []
addEntry :: PackageDB -> Tar.Entry -> IO PackageDB
addEntry pdb e =
case getPackageVersion e of
Nothing -> return pdb
Just (p, v)
| p `member` core -> return pdb
| otherwise ->
case Map.lookup p deps of
Just (vrange, _maintainer)
| not $ withinRange v vrange -> return pdb
_ -> do
let pkgname = packageVersionString (p, v)
tarball = selectTarballDir settings </> pkgname <.> "tar.gz"
case Tar.entryContent e of
Tar.NormalFile bs _ -> addPackage p v bs pdb
_ -> return pdb
addTarball :: PackageDB -> FilePath -> IO PackageDB
addTarball pdb tarball' = do
lbs <- L.readFile tarball
let (v', p') = break (== '-') $ reverse tarball'
p = PackageName $ reverse $ drop 1 p'
v <- maybe (error $ "Invalid tarball name: " ++ tarball) return
$ simpleParse $ reverse v'
case Map.lookup p deps of
Just (vrange, _)
| not $ withinRange v vrange -> return pdb
_ -> findCabalAndAddPackage tarball p v pdb $ Tar.read $ GZip.decompress lbs
where
tarball = selectTarballDir settings </> tarball' <.> "tar.gz"
excludeUnderlay :: PackageDB -> PackageDB
excludeUnderlay (PackageDB pdb) =
PackageDB $ Map.filterWithKey (\k _ -> Set.notMember k underlay) pdb
skipTests p = p `Set.member` skippedTests settings
-- Find the relevant cabal file in the given entries and add its contents
-- to the package database
findCabalAndAddPackage tarball p v pdb =
loop
where
fixPath '\\' = '/'
fixPath c = c
expectedPath = let PackageName p' = p in concat
[ packageVersionString (p, v)
, "/"
, p'
, ".cabal"
]
loop Tar.Done = error $ concat
[ "Missing cabal file "
, show expectedPath
, " in tarball: "
, show tarball
]
loop (Tar.Fail e) = error $ concat
[ "Unable to read tarball "
, show tarball
, ": "
, show e
]
loop (Tar.Next entry rest)
| map fixPath (Tar.entryPath entry) == expectedPath =
case Tar.entryContent entry of
Tar.NormalFile bs _ -> addPackage p v bs pdb
_ -> error $ concat
[ "In tarball "
, show tarball
, " the cabal file "
, show expectedPath
, " was not a normal file"
]
| otherwise = loop rest
addPackage p v lbs pdb = do
let (deps', hasTests, buildToolsExe', buildToolsOther', mgpd, execs, mgithub) = parseDeps p lbs
return $ mappend pdb $ PackageDB $ Map.singleton p PackageInfo
{ piVersion = v
, piDeps = deps'
, piHasTests = hasTests
, piBuildToolsExe = buildToolsExe'
, piBuildToolsAll = buildToolsExe' `Set.union` buildToolsOther'
, piGPD = mgpd
, piExecs = execs
, piGithubUser = fromMaybe [] mgithub
}
parseDeps p lbs =
case parsePackageDescription $ L8.unpack lbs of
ParseOk _ gpd -> (mconcat
[ maybe mempty (go gpd) $ condLibrary gpd
, mconcat $ map (go gpd . snd) $ condExecutables gpd
, if skipTests p
then mempty
else mconcat $ map (go gpd . snd) $ condTestSuites gpd
-- FIXME , mconcat $ map (go gpd . snd) $ condBenchmarks gpd
], not $ null $ condTestSuites gpd
, Set.fromList $ map depName $ libExeBuildInfo gpd
, Set.fromList $ map depName $ testBenchBuildInfo gpd
, Just gpd
, Set.fromList $ map (Executable . fst) $ condExecutables gpd
, fmap convertGithubUser $ listToMaybe $ catMaybes
$ parseGithubUserHP (homepage $ packageDescription gpd)
: map parseGithubUserSR (sourceRepos $ packageDescription gpd)
)
_ -> (mempty, defaultHasTestSuites, Set.empty, Set.empty, Nothing, Set.empty, Nothing)
where
libExeBuildInfo gpd = concat
[ maybe mempty (goBI libBuildInfo) $ condLibrary gpd
, concat $ map (goBI buildInfo . snd) $ condExecutables gpd
]
testBenchBuildInfo gpd = concat
[ if skipTests p
then []
else concat $ map (goBI testBuildInfo . snd) $ condTestSuites gpd
, concat $ map (goBI benchmarkBuildInfo . snd) $ condBenchmarks gpd
]
goBI f x = buildTools $ f $ condTreeData x
depName (Dependency (PackageName pn) _) = Executable pn
go gpd tree
= Map.filterWithKey (\k _ -> not $ ignoredDep k)
$ Map.unionsWith unionVersionRanges
$ Map.fromList (map (\(Dependency pn vr) -> (pn, vr)) $ condTreeConstraints tree)
: map (go gpd) (mapMaybe (checkCond gpd) $ condTreeComponents tree)
-- Some specific overrides for cases where getting Stackage to be smart
-- enough to handle things would be too difficult.
ignoredDep :: PackageName -> Bool
ignoredDep dep
-- The flag logic used by text-stream-decode confuses Stackage.
| dep == PackageName "text" && p == PackageName "text-stream-decode" = True
| otherwise = False
checkCond gpd (cond, tree, melse)
| checkCond' cond = Just tree
| otherwise = melse
where
checkCond' (Var (OS os)) = os == buildOS
checkCond' (Var (Arch arch)) = arch == buildArch
-- Sigh... the small_base flag on mersenne-random-pure64 is backwards
checkCond' (Var (Flag (FlagName "small_base")))
| p == PackageName "mersenne-random-pure64" = False
checkCond' (Var (Flag flag@(FlagName flag'))) =
flag' `Set.notMember` disabledFlags settings &&
flag `elem` flags'
checkCond' (Var (Impl compiler range)) =
compiler == GHC && withinRange ghcVersion' range
checkCond' (Lit b) = b
checkCond' (CNot c) = not $ checkCond' c
checkCond' (COr c1 c2) = checkCond' c1 || checkCond' c2
checkCond' (CAnd c1 c2) = checkCond' c1 && checkCond' c2
flags' = map flagName (filter flagDefault $ genPackageFlags gpd) ++
(map FlagName $ Set.toList $ Stackage.Types.flags settings coreMap)
-- | Attempt to grab the Github username from a homepage.
parseGithubUserHP :: String -> Maybe String
parseGithubUserHP url1 = do
url2 <- listToMaybe $ mapMaybe (flip stripPrefix url1)
[ "http://github.com/"
, "https://github.com/"
]
let x = takeWhile (/= '/') url2
guard $ not $ null x
Just x
-- | Attempt to grab the Github username from a source repo.
parseGithubUserSR :: SourceRepo -> Maybe String
parseGithubUserSR sr =
case (repoType sr, repoLocation sr) of
(Just Git, Just s) -> parseGithubUserHP s
_ -> Nothing
| Tarrasch/stackage | Stackage/LoadDatabase.hs | mit | 12,417 | 0 | 21 | 5,293 | 2,771 | 1,434 | 1,337 | 210 | 22 |
{-# LANGUAGE PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Util.NamedScratchpad
-- Copyright : (c) Konstantin Sobolev <konstantin.sobolev@gmail.com>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Konstantin Sobolev <konstantin.sobolev@gmail.com>
-- Stability : unstable
-- Portability : unportable
--
-- Named scratchpads that support several arbitrary applications at the same time.
--
-----------------------------------------------------------------------------
module XMonad.Util.NamedScratchpad (
-- * Usage
-- $usage
NamedScratchpad(..),
nonFloating,
defaultFloating,
customFloating,
NamedScratchpads,
namedScratchpadAction,
allNamedScratchpadAction,
namedScratchpadManageHook,
namedScratchpadFilterOutWorkspace,
namedScratchpadFilterOutWorkspacePP
) where
import XMonad
import XMonad.Hooks.ManageHelpers (doRectFloat)
import XMonad.Actions.DynamicWorkspaces (addHiddenWorkspace)
import XMonad.Hooks.DynamicLog (PP, ppSort)
import Control.Monad (filterM)
import Data.Maybe (listToMaybe)
import qualified XMonad.StackSet as W
-- $usage
-- Allows to have several floating scratchpads running different applications.
-- Bind a key to 'namedScratchpadSpawnAction'.
-- Pressing it will spawn configured application, or bring it to the current
-- workspace if it already exists.
-- Pressing the key with the application on the current workspace will
-- send it to a hidden workspace called @NSP@.
--
-- If you already have a workspace called @NSP@, it will use that.
-- @NSP@ will also appear in xmobar and dzen status bars. You can tweak your
-- @dynamicLog@ settings to filter it out if you like.
--
-- Create named scratchpads configuration in your xmonad.hs like this:
--
-- > import XMonad.StackSet as W
-- > import XMonad.ManageHook
-- > import XMonad.Util.NamedScratchpad
-- >
-- > scratchpads = [
-- > -- run htop in xterm, find it by title, use default floating window placement
-- > NS "htop" "xterm -e htop" (title =? "htop") defaultFloating ,
-- >
-- > -- run stardict, find it by class name, place it in the floating window
-- > -- 1/6 of screen width from the left, 1/6 of screen height
-- > -- from the top, 2/3 of screen width by 2/3 of screen height
-- > NS "stardict" "stardict" (className =? "Stardict")
-- > (customFloating $ W.RationalRect (1/6) (1/6) (2/3) (2/3)) ,
-- >
-- > -- run gvim, find by role, don't float
-- > NS "notes" "gvim --role notes ~/notes.txt" (role =? "notes") nonFloating
-- > ] where role = stringProperty "WM_WINDOW_ROLE"
--
-- Add keybindings:
--
-- > , ((modm .|. controlMask .|. shiftMask, xK_t), namedScratchpadAction scratchpads "htop")
-- > , ((modm .|. controlMask .|. shiftMask, xK_s), namedScratchpadAction scratchpads "stardict")
-- > , ((modm .|. controlMask .|. shiftMask, xK_n), namedScratchpadAction scratchpads "notes")
--
-- ... and a manage hook:
--
-- > , manageHook = namedScratchpadManageHook scratchpads
--
-- For detailed instruction on editing the key binding see
-- "XMonad.Doc.Extending#Editing_key_bindings"
--
-- | Single named scratchpad configuration
data NamedScratchpad = NS { name :: String -- ^ Scratchpad name
, cmd :: String -- ^ Command used to run application
, query :: Query Bool -- ^ Query to find already running application
, hook :: ManageHook -- ^ Manage hook called for application window, use it to define the placement. See @nonFloating@, @defaultFloating@ and @customFloating@
}
-- | Manage hook that makes the window non-floating
nonFloating :: ManageHook
nonFloating = idHook
-- | Manage hook that makes the window floating with the default placement
defaultFloating :: ManageHook
defaultFloating = doFloat
-- | Manage hook that makes the window floating with custom placement
customFloating :: W.RationalRect -> ManageHook
customFloating = doRectFloat
-- | Named scratchpads configuration
type NamedScratchpads = [NamedScratchpad]
-- | Finds named scratchpad configuration by name
findByName :: NamedScratchpads -> String -> Maybe NamedScratchpad
findByName c s = listToMaybe $ filter ((s==) . name) c
-- | Runs application which should appear in specified scratchpad
runApplication :: NamedScratchpad -> X ()
runApplication = spawn . cmd
-- | Action to pop up specified named scratchpad
namedScratchpadAction :: NamedScratchpads -- ^ Named scratchpads configuration
-> String -- ^ Scratchpad name
-> X ()
namedScratchpadAction = someNamedScratchpadAction (\f ws -> f $ head ws)
allNamedScratchpadAction :: NamedScratchpads
-> String
-> X ()
allNamedScratchpadAction = someNamedScratchpadAction mapM_
someNamedScratchpadAction :: ((Window -> X ()) -> [Window] -> X ())
-> NamedScratchpads
-> String
-> X ()
someNamedScratchpadAction f confs n
| Just conf <- findByName confs n = withWindowSet $ \s -> do
filterCurrent <- filterM (runQuery (query conf))
((maybe [] W.integrate . W.stack . W.workspace . W.current) s)
filterAll <- filterM (runQuery (query conf)) (W.allWindows s)
case filterCurrent of
[] -> do
case filterAll of
[] -> runApplication conf
_ -> f (windows . W.shiftWin (W.currentTag s)) filterAll
_ -> do
if null (filter ((== scratchpadWorkspaceTag) . W.tag) (W.workspaces s))
then addHiddenWorkspace scratchpadWorkspaceTag
else return ()
f (windows . W.shiftWin scratchpadWorkspaceTag) filterAll
| otherwise = return ()
-- tag of the scratchpad workspace
scratchpadWorkspaceTag :: String
scratchpadWorkspaceTag = "NSP"
-- | Manage hook to use with named scratchpads
namedScratchpadManageHook :: NamedScratchpads -- ^ Named scratchpads configuration
-> ManageHook
namedScratchpadManageHook = composeAll . fmap (\c -> query c --> hook c)
-- | Transforms a workspace list containing the NSP workspace into one that
-- doesn't contain it. Intended for use with logHooks.
namedScratchpadFilterOutWorkspace :: [WindowSpace] -> [WindowSpace]
namedScratchpadFilterOutWorkspace = filter (\(W.Workspace tag _ _) -> tag /= scratchpadWorkspaceTag)
-- | Transforms a pretty-printer into one not displaying the NSP workspace.
--
-- A simple use could be:
--
-- > logHook = dynamicLogWithPP . namedScratchpadFilterOutWorkspace $ defaultPP
--
-- Here is another example, when using "XMonad.Layout.IndependentScreens".
-- If you have handles @hLeft@ and @hRight@ for bars on the left and right screens, respectively, and @pp@ is a pretty-printer function that takes a handle, you could write
--
-- > logHook = let log screen handle = dynamicLogWithPP . namedScratchpadFilterOutWorkspacePP . marshallPP screen . pp $ handle
-- > in log 0 hLeft >> log 1 hRight
namedScratchpadFilterOutWorkspacePP :: PP -> PP
namedScratchpadFilterOutWorkspacePP pp = pp {
ppSort = fmap (. namedScratchpadFilterOutWorkspace) (ppSort pp)
}
-- vim:ts=4:shiftwidth=4:softtabstop=4:expandtab:foldlevel=20:
| markus1189/xmonad-contrib-710 | XMonad/Util/NamedScratchpad.hs | bsd-3-clause | 7,541 | 0 | 24 | 1,739 | 896 | 521 | 375 | 72 | 4 |
{-# LANGUAGE DataKinds, ExistentialQuantification, GADTs, PolyKinds, TypeOperators #-}
{-# LANGUAGE TypeInType, TypeFamilies #-}
{- # LANGUAGE UndecidableInstances #-}
module T15552 where
import Data.Kind
data Elem :: k -> [k] -> Type where
Here :: Elem x (x : xs)
There :: Elem x xs -> Elem x (y : xs)
data EntryOfVal (v :: Type) (kvs :: [Type]) where
EntryOfVal :: forall (v :: Type) (kvs :: [Type]) (k :: Type).
Elem (k, v) kvs -> EntryOfVal v kvs
type family EntryOfValKey (eov :: EntryOfVal v kvs) :: Type where
EntryOfValKey ('EntryOfVal (_ :: Elem (k, v) kvs)) = k
type family GetEntryOfVal (eov :: EntryOfVal v kvs)
:: Elem (EntryOfValKey eov, v) kvs
where
GetEntryOfVal ('EntryOfVal e) = e
type family FirstEntryOfVal (v :: Type) (kvs :: [Type]) :: EntryOfVal v kvs
where FirstEntryOfVal v (_ : kvs)
= 'EntryOfVal (There (GetEntryOfVal (FirstEntryOfVal v kvs)))
--type instance FirstEntryOfVal v (_ : kvs)
-- = 'EntryOfVal ('There (GetEntryOfVal (FirstEntryOfVal v kvs)))
| sdiehl/ghc | testsuite/tests/typecheck/should_fail/T15552a.hs | bsd-3-clause | 1,051 | 0 | 12 | 231 | 327 | 191 | 136 | 18 | 0 |
module AddOneParameter.D1 where
{-add parameter 'f' to function 'sq' . This refactoring
affects module 'D1', 'C1' and 'A1'-}
sumSquares (x:xs) = sq x + sumSquares xs
sumSquares [] = 0
sq x = x ^ pow
pow =2
| RefactoringTools/HaRe | test/testdata/AddOneParameter/D1.hs | bsd-3-clause | 215 | 0 | 7 | 47 | 59 | 31 | 28 | 5 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Machine.Wye
-- Copyright : (C) 2012 Edward Kmett, Rúnar Bjarnason, Paul Chiusano
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : Rank-2 Types, GADTs
--
----------------------------------------------------------------------------
module Data.Machine.Wye
(
-- * Wyes
Wye, WyeT
, Y(..)
, wye
, addX, addY
, capX, capY
) where
import Control.Category
import Data.Machine.Process
import Data.Machine.Type
import Data.Machine.Is
import Data.Machine.Source
import Prelude hiding ((.),id)
-------------------------------------------------------------------------------
-- Wyes
-------------------------------------------------------------------------------
-- | The input descriptor for a 'Wye' or 'WyeT'
data Y a b c where
X :: Y a b a -- block waiting on the left input
Y :: Y a b b -- block waiting on the right input
Z :: Y a b (Either a b) -- block waiting on either input
-- | A 'Machine' that can read from two input stream in a non-deterministic manner.
type Wye a b c = Machine (Y a b) c
-- | A 'Machine' that can read from two input stream in a non-deterministic manner with monadic side-effects.
type WyeT m a b c = MachineT m (Y a b) c
-- | Compose a pair of pipes onto the front of a 'Wye'.
-- | Precompose a 'Process' onto each input of a 'Wye' (or 'WyeT').
--
-- This is left biased in that it tries to draw values from the 'X' input whenever they are
-- available, and only draws from the 'Y' input when 'X' would block.
wye :: Monad m => ProcessT m a a' -> ProcessT m b b' -> WyeT m a' b' c -> WyeT m a b c
wye ma mb m = MachineT $ runMachineT m >>= \v -> case v of
Yield o k -> return $ Yield o (wye ma mb k)
Stop -> return Stop
Await f X ff -> runMachineT ma >>= \u -> case u of
Yield a k -> runMachineT . wye k mb $ f a
Stop -> runMachineT $ wye stopped mb ff
Await g Refl fg -> return . Await (\a -> wye (g a) mb $ encased v) X
. wye fg mb $ encased v
Await f Y ff -> runMachineT mb >>= \u -> case u of
Yield b k -> runMachineT . wye ma k $ f b
Stop -> runMachineT $ wye ma stopped ff
Await g Refl fg -> return . Await (\b -> wye ma (g b) $ encased v) Y
. wye ma fg $ encased v
Await f Z ff -> runMachineT ma >>= \u -> case u of
Yield a k -> runMachineT . wye k mb . f $ Left a
Stop -> runMachineT mb >>= \w -> case w of
Yield b k -> runMachineT . wye stopped k . f $ Right b
Stop -> runMachineT $ wye stopped stopped ff
Await g Refl fg -> return . Await (\b -> wye stopped (g b) $ encased v) Y
. wye stopped fg $ encased v
Await g Refl fg -> runMachineT mb >>= \w -> case w of
Yield b k -> runMachineT . wye (encased u) k . f $ Right b
Stop -> return . Await (\a -> wye (g a) stopped $ encased v) X
. wye fg stopped $ encased v
Await h Refl fh -> return . Await (\c -> case c of
Left a -> wye (g a) (encased w) $ encased v
Right b -> wye (encased u) (h b) $ encased v) Z
. wye fg fh $ encased v
-- | Precompose a pipe onto the left input of a wye.
addX :: Monad m => ProcessT m a b -> WyeT m b c d -> WyeT m a c d
addX p = wye p echo
{-# INLINE addX #-}
-- | Precompose a pipe onto the right input of a tee.
addY :: Monad m => ProcessT m b c -> WyeT m a c d -> WyeT m a b d
addY = wye echo
{-# INLINE addY #-}
-- | Tie off one input of a tee by connecting it to a known source.
capX :: Monad m => SourceT m a -> WyeT m a b c -> ProcessT m b c
capX s t = process (capped Right) (addX s t)
{-# INLINE capX #-}
-- | Tie off one input of a tee by connecting it to a known source.
capY :: Monad m => SourceT m b -> WyeT m a b c -> ProcessT m a c
capY s t = process (capped Left) (addY s t)
{-# INLINE capY #-}
-- | Natural transformation used by 'capX' and 'capY'
capped :: (a -> Either a a) -> Y a a b -> a -> b
capped _ X = id
capped _ Y = id
capped f Z = f
{-# INLINE capped #-}
| YoEight/machines | src/Data/Machine/Wye.hs | bsd-3-clause | 4,564 | 0 | 32 | 1,467 | 1,367 | 690 | 677 | 67 | 16 |
module SubPatternIn3 where
-- takes into account general type variables
-- within type implementation.
-- here T has its arguments instantiated within g
-- selecting 'b' should instantiate list patterns
-- selecting 'c' should give an error.
--
data T a b = C1 a b | C2 b
g :: Int -> T Int [Int] -> Int
g z (C1 b c) = b
g z (C2 x@[]) = hd x
g z (C2 x@(b_1 : b_2)) = hd x
g z (C2 x) = hd x
f :: [Int] -> Int
f x@[] = hd x + hd (tl x)
f x@(y:ys) = hd x + hd (tl x)
hd x = head x
tl x = tail x
| kmate/HaRe | old/testing/subIntroPattern/SubPatternIn3_TokOut.hs | bsd-3-clause | 500 | 0 | 10 | 130 | 236 | 124 | 112 | 12 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS -fvia-C -optc-O3 -fexcess-precision -optc-msse3 #-}
import Control.Monad.ST
import Data.Array.ST
import Data.Array.Base
main = print $ runST
(do arr <- newArray (1,2000000) 137.0 :: ST s (STUArray s Int Double)
go arr 2000000 0.0 )
go :: STUArray s Int Double -> Int -> Double -> ST s Double
go !a i !acc
| i < 1 = return acc
| otherwise = do
b <- unsafeRead a i
unsafeWrite a i (b+3.0)
c <- unsafeRead a i
go a (i-1) (c+acc)
| urbanslug/ghc | testsuite/tests/perf/should_run/T3586.hs | bsd-3-clause | 544 | 2 | 13 | 163 | 220 | 106 | 114 | 16 | 1 |
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TemplateHaskell #-}
module SuperUserSpark.CompilerSpec where
import TestImport hiding ((<.>))
import Data.Either (isLeft, isRight)
import Data.List (isPrefixOf)
import System.FilePath.Posix ((<.>))
import SuperUserSpark.Compiler
import SuperUserSpark.Compiler.Gen ()
import SuperUserSpark.Compiler.Internal
import SuperUserSpark.Compiler.TestUtils
import SuperUserSpark.Compiler.Types
import SuperUserSpark.Compiler.Utils
import SuperUserSpark.CoreTypes
import SuperUserSpark.Language.Gen ()
import SuperUserSpark.Language.Types
import SuperUserSpark.OptParse.Gen ()
import SuperUserSpark.PreCompiler
import SuperUserSpark.Utils
import TestUtils
spec :: Spec
spec = do
parallel $ do
instanceSpec
singleCompileDecSpec
precompileSpec
compileUnitSpec
utilsSpec
hopTests
exactTests
compilerBlackBoxTests
precompileSpec :: Spec
precompileSpec = describe "pre-compilation" $ do cleanContentSpec
cleanContentSpec :: Spec
cleanContentSpec = do
let validFp = genValid `suchThat` cleanBy cleanFilePath
describe "cleanCard" $ do
it "doesn't report any card with valid content and a valid name" $ do
forAll (genValid `suchThat` cleanBy cleanCardName) $ \cn ->
forAll (genValid `suchThat` cleanBy cleanDeclaration) $ \cc ->
Card cn cc `shouldSatisfy` cleanBy cleanCard
describe "cleanCardName" $ do
pend
it "doesn't report an emty card name" $ do
"" `shouldSatisfy` cleanBy cleanCardName
it "reports card names with newlines" $ do
forAll (sequenceA [genValid, pure '\n', genValid]) $ \s ->
s `shouldNotSatisfy` cleanBy cleanCardName
describe "cleanDeclaration" $ do
describe "Deploy" $ do
it "doesn't report Deploy declarations with valid filepaths" $ do
forAll validFp $ \src ->
forAll validFp $ \dst ->
forAll genValid $ \kind ->
Deploy src dst kind `shouldSatisfy`
cleanBy cleanDeclaration
it "reports Deploy declarations with an invalid source" $ do
forAll (genValid `suchThat` (not . cleanBy cleanFilePath)) $ \src ->
forAll validFp $ \dst ->
forAll genValid $ \kind ->
Deploy src dst kind `shouldNotSatisfy`
cleanBy cleanDeclaration
it "reports Deploy declarations with an invalid destination" $ do
forAll validFp $ \src ->
forAll (genValid `suchThat` (not . cleanBy cleanFilePath)) $ \dst ->
forAll genValid $ \kind ->
Deploy src dst kind `shouldNotSatisfy`
cleanBy cleanDeclaration
pend
describe "SparkOff" $ do
it "reports SparkOff declarations with an invalid card reference" $ do
forAll (genValid `suchThat` (not . cleanBy cleanCardReference)) $ \cr ->
SparkOff cr `shouldNotSatisfy` cleanBy cleanDeclaration
it
"doesn't report SparkOff declarations with a valid card reference" $ do
forAll (genValid `suchThat` cleanBy cleanCardReference) $ \cr ->
SparkOff cr `shouldSatisfy` cleanBy cleanDeclaration
pend
describe "IntoDir" $ do
it "reports IntoDir declarations with an invalid filepath" $ do
forAll (genValid `suchThat` (not . cleanBy cleanFilePath)) $ \fp ->
IntoDir fp `shouldNotSatisfy` cleanBy cleanDeclaration
it "doesn't report IntoDir declarations with a valid filepath" $ do
forAll (genValid `suchThat` cleanBy cleanFilePath) $ \fp ->
IntoDir fp `shouldSatisfy` cleanBy cleanDeclaration
pend
describe "OutofDir" $ do
it "reports OutofDir declarations with an invalid filepath" $ do
forAll (genValid `suchThat` (not . cleanBy cleanFilePath)) $ \fp ->
OutofDir fp `shouldNotSatisfy` cleanBy cleanDeclaration
it "doesn't report OutofDir declarations with a valid filepath" $ do
forAll (genValid `suchThat` cleanBy cleanFilePath) $ \fp ->
OutofDir fp `shouldSatisfy` cleanBy cleanDeclaration
pend
describe "DeployKindOverride" $ do
it "doesn't report any deployment kind override declarations" $ do
forAll genValid $ \kind ->
DeployKindOverride kind `shouldSatisfy`
cleanBy cleanDeclaration
pend
describe "Alternatives" $ do
it
"reports alternatives declarations with as much as a single invalid filepath" $ do
forAll (genValid `suchThat` (any $ not . cleanBy cleanFilePath)) $ \fs ->
Alternatives fs `shouldNotSatisfy` cleanBy cleanDeclaration
it "doesn't report alternatives declarations with valid filepaths" $ do
forAll (genValid `suchThat` (all $ cleanBy cleanFilePath)) $ \fs ->
Alternatives fs `shouldSatisfy` cleanBy cleanDeclaration
pend
describe "Block" $ do
it
"reports block declarations with as much as a single invalid declaration inside" $ do
forAll
(genValid `suchThat` (any $ not . cleanBy cleanDeclaration)) $ \ds ->
Block ds `shouldNotSatisfy` cleanBy cleanDeclaration
it
"doesn't report any block declarations with valid declarations inside" $ do
forAll (genValid `suchThat` (all $ cleanBy cleanDeclaration)) $ \ds ->
Block ds `shouldSatisfy` cleanBy cleanDeclaration
pend
describe "cleanCardReference" $ do
it "works the same as cleanCardName separately" $ do
forAll genValid $ \cnr ->
cleanBy cleanCardNameReference cnr ===
cleanBy cleanCardReference (CardName cnr)
it "works the same as cleanCardFile separately" $ do
forAll genValid $ \cfr ->
cleanBy cleanCardFileReference cfr ===
cleanBy cleanCardReference (CardFile cfr)
pend
describe "cleanCardNameReference" $ do
it "reports card name references with an invalid card name" $ do
forAll (genValid `suchThat` (not . cleanBy cleanCardName)) $ \cn ->
CardNameReference cn `shouldNotSatisfy`
cleanBy cleanCardNameReference
it "doesn't report card name references with a valid card name" $ do
forAll (genValid `suchThat` cleanBy cleanCardName) $ \cn ->
CardNameReference cn `shouldSatisfy`
cleanBy cleanCardNameReference
pend
describe "cleanCardFileReference" $ do
it "reports card file references with an invalid filepath" $ do
forAll (genValid `suchThat` (not . cleanBy cleanFilePath)) $ \fp ->
forAll genValid $ \cn ->
CardFileReference fp cn `shouldNotSatisfy`
cleanBy cleanCardFileReference
it "reports card file references with an invalid card name" $ do
forAll genValid $ \fp ->
forAll
(genValid `suchThat` (not . cleanBy cleanCardNameReference)) $ \cn ->
CardFileReference fp (Just cn) `shouldNotSatisfy`
cleanBy cleanCardFileReference
it
"doesn't report card file references with a valid card name reference and valid filepath" $ do
forAll (genValid `suchThat` cleanBy cleanFilePath) $ \fp ->
forAll (genValid `suchThat` cleanBy cleanCardNameReference) $ \cn ->
CardFileReference fp (Just cn) `shouldSatisfy`
cleanBy cleanCardFileReference
pend
describe "cleanFilePath" $ do
it "reports empty an filepath" $ do filePathDirty []
let nonNull = genValid `suchThat` (not . null)
it "reports filepaths with newlines" $ do
forAll (nonNull `suchThat` containsNewlineCharacter) filePathDirty
let withoutNewlines =
nonNull `suchThat` (not . containsNewlineCharacter)
it "reports filepaths with multiple consequtive slashes" $ do
once $
forAll
(withoutNewlines `suchThat`
containsMultipleConsequtiveSlashes)
filePathDirty
let c = filePathClean
it "doesn't report these valid filepaths" $ do
c "noextension"
c ".bashrc"
c "file.txt"
c "Some file with spaces.doc"
c "some/relative/filepath.file"
-- TODO(syd) Use the default config to generate this!
defaultCompilerState :: CompilerState
defaultCompilerState =
CompilerState
{ stateDeploymentKindLocalOverride = Nothing
, stateInto = ""
, stateOutofPrefix = []
}
instanceSpec :: Spec
instanceSpec =
parallel $ do
eqSpec @CompileAssignment
genValidSpec @CompileAssignment
eqSpec @CompileSettings
genValidSpec @CompileSettings
eqSpec @(Deployment FilePath)
genValidSpec @(Deployment FilePath)
jsonSpecOnValid @(Deployment FilePath)
functorSpec @Deployment
eqSpec @(DeploymentDirections FilePath)
genValidSpec @(DeploymentDirections FilePath)
jsonSpecOnValid @(DeploymentDirections FilePath)
functorSpec @DeploymentDirections
eqSpec @PrefixPart
genValidSpec @PrefixPart
eqSpec @CompilerState
genValidSpec @CompilerState
singleCompileDecSpec :: Spec
singleCompileDecSpec =
describe "compileDec" $ do
let s = defaultCompilerState
let c = defaultCompileSettings
let sc = singleShouldCompileTo c s
let nonNull = genValid `suchThat` (not . null)
let validFilePath = nonNull `suchThat` (not . containsNewlineCharacter)
let easyFilePath = validFilePath `suchThat` (not . isPrefixOf ".")
let validFp = genValid `suchThat` cleanBy cleanFilePath
describe "Deploy" $ do
it
"uses the exact right text in source and destination when given valid filepaths without a leading dot" $ do
forAll easyFilePath $ \from ->
forAll easyFilePath $ \to ->
sc
(Deploy from to Nothing)
(Deployment (Directions [from] to) LinkDeployment)
it "handles filepaths with a leading dot correctly" $ do pending
it
"figures out the correct paths in these cases with default config and initial state" $ do
let d = (Deploy "from" "to" $ Just LinkDeployment)
sc d (Deployment (Directions ["from"] "to") LinkDeployment)
it "uses the alternates correctly" $ do pending
it "uses the into's correctly" $ do pending
it "uses the outof's correctly" $ do pending
pend
describe "SparkOff" $ do
it
"adds a single card file reference to the list of cards to spark later" $ do
forAll validFilePath $ \f ->
let cr = CardFile $ CardFileReference f Nothing
d = SparkOff cr
in compileSingleDec d s c `shouldBe` Right (s, ([], [cr]))
it "adds any card reference to the list" $ do pending
pend
let shouldState = shouldResultInState c s
describe "IntoDir" $ do
it "adds the given directory to the into state" $ do
forAll validFp $ \fp ->
shouldState (IntoDir fp) $ s {stateInto = fp}
it "compounds with the input state" $ do
pendingWith "Change the input state to an explicit list first"
pend
describe "OutofDir" $ do
it "adds the given directory to the outof state" $ do
forAll validFp $ \fp ->
shouldState (OutofDir fp) $
s {stateOutofPrefix = [Literal fp]}
pend
describe "DeployKindOverride" $ do
it "modifies the internal deployment kind override" $ do
forAll genValid $ \kind ->
shouldState (DeployKindOverride kind) $
s {stateDeploymentKindLocalOverride = Just kind}
pend
describe "Block" $ do
it "uses a separate scope for its sub-compilation" $ do
forAll genValid $ \ds -> shouldState (Block ds) s
pend
describe "Alternatives" $ do
it
"adds an alternatives prefix to the outof prefix in the compiler state" $ do
forAll (listOf validFilePath) $ \fps ->
shouldState (Alternatives fps) $
s {stateOutofPrefix = [Alts fps]}
pend
runDefaultImpureCompiler :: ImpureCompiler a -> IO (Either CompileError a)
runDefaultImpureCompiler = flip runReaderT defaultCompileSettings . runExceptT
compileUnitSpec :: Spec
compileUnitSpec =
describe "compileUnit" $ do
it "Only ever produces valid results" $
forAll genValid $ \sets ->
validIfSucceeds
(runIdentity .
flip runReaderT sets . runExceptT . compileUnit)
utilsSpec :: Spec
utilsSpec =
parallel $ do
describe "initialState" $ it "is valid" $ isValid initialState
describe "sources" $
it "only produces valid prefix parts" $ producesValid sources
describe "resolvePrefix" $
it "only produces valid paths" $ producesValid resolvePrefix
hopTests :: Spec
hopTests = do
describe "hop test" $ do
dir <- runIO $ resolveDir' "test_resources/hop_test"
let root = dir </> $(mkRelFile "root.sus")
let hop1 = dir </> $(mkRelDir "hop1dir") </> $(mkRelFile "hop1.sus")
let hop2 =
dir </> $(mkRelDir "hop1dir") </> $(mkRelDir "hop2dir") </>
$(mkRelFile "hop2.sus")
let hop3 =
dir </> $(mkRelDir "hop1dir") </> $(mkRelDir "hop2dir") </>
$(mkRelDir "hop3dir") </>
$(mkRelFile "hop3.sus")
it "compiles hop3 correctly" $ do
r <-
runDefaultImpureCompiler $
compileJob $ StrongCardFileReference hop3 Nothing
r `shouldBe`
Right
[ Deployment
(Directions ["z/delta"] "d/three")
LinkDeployment
]
it "compiles hop2 correctly" $ do
r <-
runDefaultImpureCompiler $
compileJob $ StrongCardFileReference hop2 Nothing
r `shouldBe`
Right
[ Deployment (Directions ["y/gamma"] "c/two") LinkDeployment
, Deployment
(Directions ["hop3dir/z/delta"] "d/three")
LinkDeployment
]
it "compiles hop1 correctly" $ do
r <-
runDefaultImpureCompiler $
compileJob $ StrongCardFileReference hop1 Nothing
r `shouldBe`
Right
[ Deployment (Directions ["x/beta"] "b/one") LinkDeployment
, Deployment
(Directions ["hop2dir/y/gamma"] "c/two")
LinkDeployment
, Deployment
(Directions ["hop2dir/hop3dir/z/delta"] "d/three")
LinkDeployment
]
it "compiles root correctly" $ do
r <-
runDefaultImpureCompiler $
compileJob $ StrongCardFileReference root Nothing
r `shouldBe`
Right
[ Deployment
(Directions ["u/alpha"] "a/zero")
LinkDeployment
, Deployment
(Directions ["hop1dir/x/beta"] "b/one")
LinkDeployment
, Deployment
(Directions ["hop1dir/hop2dir/y/gamma"] "c/two")
LinkDeployment
, Deployment
(Directions
["hop1dir/hop2dir/hop3dir/z/delta"]
"d/three")
LinkDeployment
]
exactTests :: Spec
exactTests = do
describe "exact tests" $ do
dir <- runIO $ resolveDir' "test_resources/exact_compile_test_src"
forFileInDirss [dir] $ \fp -> do
if fileExtension fp == Just ".res"
then return ()
else do
it (toFilePath fp) $ do
let orig = fp
result <- parseAbsFile $ toFilePath fp <.> "res"
ads <-
runDefaultImpureCompiler $
compileJob $ StrongCardFileReference orig Nothing
eds <- runDefaultImpureCompiler $ inputCompiled result
ads `shouldBe` eds
hopTestDir :: Path Rel Dir
hopTestDir = $(mkRelDir "hop_test")
compilerBlackBoxTests :: Spec
compilerBlackBoxTests = do
tr <- runIO $ resolveDir' "test_resources"
describe "Succesful compile examples" $ do
let dirs = map (tr </>) [shouldCompileDir, hopTestDir]
forFileInDirss dirs $ \f -> do
it (toFilePath f) $ do
r <-
runDefaultImpureCompiler $
compileJob $ StrongCardFileReference f Nothing
r `shouldSatisfy` isRight
describe "Unsuccesfull compile examples" $ do
let dirs = map (tr </>) [shouldNotParseDir, shouldNotCompileDir]
forFileInDirss dirs $ \f -> do
it (toFilePath f) $ do
r <-
runDefaultImpureCompiler $
compileJob $ StrongCardFileReference f Nothing
r `shouldSatisfy` isLeft
| NorfairKing/super-user-spark | test/SuperUserSpark/CompilerSpec.hs | mit | 18,597 | 0 | 24 | 6,794 | 4,055 | 1,941 | 2,114 | 391 | 2 |
module REPL.Parser where
import Letter.Core
import Letter.Parser
import Control.Monad (void)
data Command = Eval Line
| Describe Line
| Import String
| Quit
deriving Show
commandToken long short = choice' $ map (symbol . (':':)) [long, short]
importCmd = Import <$> (commandToken "import" "i" >> filename)
describe = commandToken "describe" "d" >> (Describe <$> line)
commandExp = ((\_ -> Quit) <$> commandToken "quit" "q")
<||> importCmd
<||> describe
<||> (Eval <$> line)
command = commandExp <* space
| harlanhaskins/Letter | Haskell/app/REPL/Parser.hs | mit | 587 | 0 | 11 | 160 | 182 | 102 | 80 | 17 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Idle.Screens.Home where
import Data.IORef
import Data.List.NonEmpty
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Idle.Ore
import Graphics.Vty hiding (string)
import Graphics.Vty.Widgets.All
import System.Exit
import System.IO.Unsafe
import Text.Printf
data Zipper a = Zipper
{ left :: [a]
, zFocus :: a
, right :: [a]
}
enter :: NonEmpty a -> Zipper a
enter (m :| ms) = Zipper [] m ms
next :: Zipper a -> Maybe (Zipper a)
next (Zipper ls f (r:rs)) = Just $ Zipper (f:ls) r rs
next _ = Nothing
previous :: Zipper a -> Maybe (Zipper a)
previous (Zipper (l:ls) f rs) = Just $ Zipper ls l (f:rs)
previous _ = Nothing
num :: IORef (Zipper Ore)
num = unsafePerformIO $ ores >>= newIORef . enter
drawDisplay :: Widget FormattedText -> Widget FormattedText -> Widget FormattedText -> IO ()
drawDisplay disp l r = do
(Zipper ls f rs) <- readIORef num
setText l $ if null ls then " " else "◀ "
setText r $ if null rs then " " else " ▶"
setTextWithAttrs disp $ display f
where
showB m | m >= 10000000000 = T.pack $ printf "%.2fB" (fromIntegral (m `div` 10000000) / 100 :: Double)
showB m = T.pack $ show m
plain t = (T.cons '\n' $ T.center 16 ' ' t, def_attr)
line = ("\n", def_attr)
display (Ore n h d v i) = i
++ [ line, plain n
, plain $ "HP: " <> showB h
, plain $ "D: " <> showB d
, plain $ "$" <> showB v]
home :: IO (Widget (Box Table FormattedText), Widget FocusGroup)
home = do
legend <- plainText "[q]uit [u]pgrade [s]ave"
oreDisplay <- plainText "nothing"
leftArrow <- plainText "nothing"
rightArrow <- plainText "nothing"
oreTable <- newTable
[ ColumnSpec (ColFixed 2) (Just AlignLeft) Nothing
, ColumnSpec (ColFixed 20) (Just AlignCenter) Nothing
, ColumnSpec (ColFixed 2) (Just AlignRight) Nothing
]
BorderNone
addRow oreTable [leftArrow, oreDisplay, rightArrow]
homeScreen <- vBox oreTable legend
f <- newFocusGroup
_ <- addToFocusGroup f homeScreen
drawDisplay oreDisplay leftArrow rightArrow
homeScreen `onKeyPressed` \this k _ms -> case k of
KEsc -> exitSuccess
KASCII 'q' -> exitSuccess
KASCII ']' -> do
atomicModifyIORef' num (\z -> (fromMaybe z (next z), ()))
drawDisplay oreDisplay leftArrow rightArrow
return True
KASCII '[' -> do
atomicModifyIORef' num (\z -> (fromMaybe z (previous z), ()))
drawDisplay oreDisplay leftArrow rightArrow
return True
KASCII 't' -> do
vis <- getVisible this
setVisible this (not vis) >> return True
_ -> return False
return (homeScreen, f)
| pikajude/idle | src/Idle/Screens/Home.hs | mit | 2,879 | 0 | 20 | 838 | 1,059 | 529 | 530 | 74 | 6 |
-- BNF Converter: Error Monad
-- Copyright (C) 2004 Author: Aarne Ranta
-- This file comes with NO WARRANTY and may be used FOR ANY PURPOSE.
module ComponentModel.Parsers.ErrM where
-- the Error monad: like Maybe type with error msgs
import Control.Monad (MonadPlus(..), liftM)
data Err a = Ok a | Bad String
deriving (Read, Show, Eq, Ord)
instance Monad Err where
return = Ok
fail = Bad
Ok a >>= f = f a
Bad s >>= f = Bad s
instance Functor Err where
fmap = liftM
instance MonadPlus Err where
mzero = Bad "Err.mzero"
mplus (Bad _) y = y
mplus x _ = x
| hephaestus-pl/hephaestus | willian/hephaestus-integrated/asset-base/component-model/src/ComponentModel/Parsers/ErrM.hs | mit | 598 | 0 | 8 | 153 | 169 | 91 | 78 | 15 | 0 |
module Config.AppConfig
( AppConfig (..)
, getAppConfig
) where
import Auth0.Config as Auth0
import Config.Environment (Environment (..))
import Control.Monad (liftM)
import Data.Maybe (fromJust)
import Data.Text as T
import LoadEnv
import qualified Network.Wai.Middleware.RequestLogger.LogEntries as LE
import System.Directory (getAppUserDataDirectory)
import qualified System.Environment as Env
import System.FilePath.Posix ((</>), (<.>))
import qualified Users.Api as UsersApi
type AppName = Text
data AppConfig = AppConfig
{ getAppName :: AppName
, getPort :: Int
, getBasePath :: Text
, getEnv :: Environment
, getLogEntriesConfig :: LE.Config
, getAuthConfig :: Auth0.Config
, getUsersApiConfig :: UsersApi.Config
, getMarketingSiteUrl :: Text
} deriving (Show)
getAppConfig :: AppName -> Environment -> IO AppConfig
getAppConfig appName env = do
loadEnvVars appName env
port <- Env.lookupEnv "PORT"
basePath <- T.pack <$> Env.getEnv "BASEPATH"
leConfig <- logEntriesConfig
authConfig <- auth0Config
usersConfig <- usersApiConfig
marketingSiteUrl <- T.pack <$> Env.getEnv "APP_MARKETING_BASE_PATH"
let webServerPort = maybe 8080 id (liftM read port)
return $ AppConfig
{ getAppName = appName
, getPort = webServerPort
, getBasePath = basePath
, getEnv = env
, getLogEntriesConfig = leConfig
, getAuthConfig = authConfig
, getUsersApiConfig = usersConfig
, getMarketingSiteUrl = marketingSiteUrl
}
usersApiConfig :: IO UsersApi.Config
usersApiConfig = do
basePath <- T.pack <$> Env.getEnv "APP_USERS_API_BASE_PATH"
return $ UsersApi.Config basePath
auth0Config :: IO Auth0.Config
auth0Config = do
clientID <- T.pack <$> Env.getEnv "APP_AUTH_ZERO_CLIENT_ID"
clientSecret <- T.pack <$> Env.getEnv "APP_AUTH_ZERO_CLIENT_SECRET"
redirectURI <- T.pack <$> Env.getEnv "APP_AUTH_ZERO_REDIRECT_URI"
grantType <- T.pack <$> Env.getEnv "APP_AUTH_ZERO_GRANT_TYPE"
basePath <- T.pack <$> Env.getEnv "APP_AUTH_ZERO_BASE_PATH"
apiToken <- T.pack <$> Env.getEnv "APP_AUTH_ZERO_API_TOKEN"
return $ Auth0.Config
clientID
clientSecret
redirectURI
grantType
basePath
apiToken
-- The unsafe call to :fromJust is acceptable here
-- since we are bootstrapping the application.
-- If required configuration is not present and parsible,
-- then we should fail to start the app
logEntriesConfig :: IO LE.Config
logEntriesConfig = do
hostname <- Env.getEnv "APP_LOGENTRIES_DATA_DOMAIN"
port <- read <$> Env.getEnv "APP_LOGENTRIES_DATA_PORT"
token <- (fromJust . LE.fromString) <$> Env.getEnv "APP_LOGENTRIES_LOG_KEY"
return $ LE.Config hostname port token
-- laodEnvVars will look for configuration files matching the lowercase
-- environment name in the user's data directory
-- Ex. if the app name is 'cool-app' and the environment is Production,
-- the env vars will be loaded from ~/.cool-app/production.env
-- loadEnvVars will NOT raise an exception if the environment file is not found
loadEnvVars :: AppName -> Environment -> IO ()
loadEnvVars appName env = dataDirectory appName >>= \dataDir -> do
let filePath = dataDir </> envName env <.> "env"
loadEnvFrom $ filePath
where
envName :: Environment -> FilePath
envName = T.unpack . toLower . T.pack . show
dataDirectory :: AppName -> IO FilePath
dataDirectory = getAppUserDataDirectory . T.unpack
| gust/feature-creature | auth-service/src/Config/AppConfig.hs | mit | 3,553 | 0 | 14 | 743 | 789 | 422 | 367 | 77 | 1 |
module Main where
import qualified Dropbox as DB
import System.Exit (exitFailure)
import System.Environment (getArgs)
import System.IO (hGetLine, hPutStrLn, stderr, stdout, stdin)
import qualified Data.ByteString.Char8 as C8
import Control.Monad.IO.Class (liftIO)
hostsDev = DB.Hosts "meta.dbdev.corp.dropbox.com" "api.dbdev.corp.dropbox.com" "api-content.dbdev.corp.dropbox.com"
main :: IO ()
main = do
args <- getArgs
case args of
[appKey, appSecret] -> mainProd appKey appSecret
_ -> do
hPutStrLn stderr "Usage: COMMAND app-key app-secret"
exitFailure
mainProd = main_ DB.hostsDefault
mainDev = main_ hostsDev
mkConfig hosts appKey appSecret = do
base <- DB.mkConfig DB.localeEn appKey appSecret DB.AccessTypeDropbox
return $ base { DB.configHosts = hosts }
auth mgr config = liftIO $ do
-- OAuth
(requestToken, authUrl) <- DB.authStart mgr config Nothing
`dieOnFailure` "Couldn't get request token"
hPutStrLn stdout $ "Request Token: " ++ show requestToken
hPutStrLn stdout $ "Auth URL: " ++ authUrl
hGetLine stdin
(accessToken, userId) <- DB.authFinish mgr config requestToken
`dieOnFailure` "Couldn't get access token"
hPutStrLn stdout $ "Access Token: " ++ show accessToken
return accessToken
accountInfo mgr session = liftIO $ do
hPutStrLn stdout $ "---- Account Info ----"
accountInfo <- DB.getAccountInfo mgr session
`dieOnFailure` "Couldn't get account info"
hPutStrLn stdout $ show accountInfo
rootMetadata mgr session = liftIO $ do
hPutStrLn stdout $ "---- Root Folder ----"
(DB.Meta meta extra, mContents) <- DB.getMetadataWithChildren mgr session "/" Nothing
`dieOnFailure` "Couldn't get root folder listing"
(hash, children) <- case mContents of
Just (DB.FolderContents hash children) -> return (hash, children)
_ -> die "Root is not a folder? What the poop?"
mapM_ ((hPutStrLn stdout).show) children
hPutStrLn stdout $ "---- Root Folder (Again) ----"
secondTime <- DB.getMetadataWithChildrenIfChanged mgr session "/" Nothing hash
`dieOnFailure` "Couldn't get root folder listing again"
hPutStrLn stdout (show secondTime) -- Will almost always print "Nothing" (i.e. "nothing has changed")
addFile mgr session = liftIO $ do
hPutStrLn stdout $ "---- Add File ----"
meta <- DB.putFile mgr session "/Facts.txt" DB.WriteModeAdd (DB.bsRequestBody $ C8.pack "Rian hates types.\n")
`dieOnFailure` "Couldn't add Facts.txt"
hPutStrLn stdout $ show meta
getFileContents mgr session = liftIO $ do
hPutStrLn stdout $ "---- Get File ----"
(meta, contents) <- DB.getFileBs mgr session "/Facts.txt" Nothing
`dieOnFailure` "Couldn't read Facts.txt"
hPutStrLn stdout $ show meta
C8.hPutStrLn stdout contents
main_ :: DB.Hosts -> String -> String -> IO ()
main_ hosts appKey appSecret = do
config <- mkConfig hosts appKey appSecret
DB.withManager $ \mgr -> do
accessToken <- auth mgr config
let session = DB.Session config accessToken
accountInfo mgr session
rootMetadata mgr session
addFile mgr session
getFileContents mgr session
return ()
dieOnFailure :: IO (Either String v) -> String -> IO v
dieOnFailure action errorPrefix = do
ev <- action
case ev of
Left err -> die (errorPrefix ++ ": " ++ err)
Right result -> return result
die message = do
hPutStrLn stderr message
exitFailure
| cakoose/dropbox-sdk-haskell | Examples/Simple.hs | mit | 3,524 | 0 | 15 | 788 | 985 | 477 | 508 | 79 | 2 |
{-# LANGUAGE PackageImports #-}
{-
Instead of generating C code directly, the STG compiler will instead build *abstract* C, a data structure representing the subset of C to which we want to compile.
Abstract C supports nested definitions of functions and static arrays. These are floated to the top level when transformed into C source code.
-}
module AbstractC where
import Control.Applicative
import Control.Monad
import "mtl" Control.Monad.State
import Data.List
import Data.Maybe
import Text.Printf
data Def = StaticArray { arrayName :: String, arrayType :: String, arrayElems :: [String] }
| Function { functionName :: String, functionCode :: [Statement] }
deriving (Show, Eq)
-- | Generate a C declaration for a Def.
genDecl :: Def -> String
genDecl (StaticArray name ty elems) = printf "static %s %s[%d];\n" ty name (length elems)
genDecl def@(Function name code) = unlines (nested ++ [top])
where
top = printf "StgFunPtr %s();" name
nested = map genDecl $ getNested def
-- | Generate C code for a Def.
genDef :: Def -> String
genDef (StaticArray name ty elems) = printf "static %s %s[] = {%s};\n" ty name (intercalate ", " $ map genElem elems)
where
genElem e = printf "(%s)%s" ty e
genDef def@(Function name code) = unlines (nested ++ [top])
where
top = printf "StgFunPtr %s() {\n%s}" name (concat $ catMaybes $ map genStatement code)
nested = map genDef $ getNested def
-- | Represents a statement in a function body.
data Statement = NestedDef Def
| Return String
| Switch String [(String, [Statement])] (Maybe [Statement])
| Code String
deriving (Show, Eq)
-- | Generate C code for a function statement.
genStatement :: Statement -> Maybe String
genStatement (NestedDef _) = Nothing
genStatement (Return expr) = Just $ printf " return %s;\n" expr
genStatement (Switch base cases def) = Just $ printf " switch (%s) {\n%s }\n" base body
where
mkCase (value, code) = printf " case %s:\n%s break;\n" value $ concat $ catMaybes $ map genStatement code
defaultCase Nothing = ""
defaultCase (Just code) = printf " default:\n%s" $ concat $ catMaybes $ map genStatement code
body = concat (map mkCase cases ++ [defaultCase def])
genStatement (Code x) = Just (' ':x ++ ";\n")
data GetNestedState = GNS [Def]
-- | Add a Def to the accumulator state.
addNested :: Def -> State GetNestedState ()
addNested def = do
(GNS defs) <- get
put (GNS (def:defs))
-- | Get a list of inner Defs.
getNested :: Def -> [Def]
getNested fun = let (GNS defs) = execState (mapM_ exNested $ functionCode fun) (GNS []) in defs
-- | Possibly extract a nested Def from s atatement.
exNested :: Statement -> State GetNestedState ()
exNested (NestedDef def) = addNested def
exNested _ = return ()
| tcsavage/lazy-compiler | src/AbstractC.hs | mit | 2,860 | 0 | 12 | 636 | 802 | 419 | 383 | 46 | 2 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, RecordWildCards #-}
module Text.Blaze.Html.Bootstrap where
import Text.Blaze
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A hiding (form, label)
import Text.Blaze.Html5
import Text.Blaze.Html5.Attributes hiding (form, label)
import Text.Blaze.Internal
import qualified Data.Text as T
import Data.Monoid
import Control.Monad
import Text.Blaze.Html.Utils
formControl = A.class_ "form-control"
glyphicon :: T.Text -> H.Html
glyphicon s = H.span !. ("glyphicon glyphicon-"<>s) $ ""
dataToggle :: T.Text -> Attribute
dataToggle k = H.customAttribute "data-toggle" $ preEscapedToValue $ k
dataTarget :: T.Text -> Attribute
dataTarget k = H.customAttribute "data-target" $ preEscapedToValue $ k
dataDismiss :: T.Text -> Attribute
dataDismiss k = H.customAttribute "data-dismiss" $ preEscapedToValue $ k
type NavTree = [NavItem]
data NavItem = Header H.Html
| Divider
| Link H.Html
| ActiveLink H.Html
| SubTree H.Html NavTree
-- example brand : H.a !. "navbar-brand" ! A.href "#" $ "Project name"
data NavBar = NavBar { navBarBrand :: H.Html,
navBarLeft :: NavTree,
navBarRight :: NavTree }
instance Monoid NavBar where
mempty = NavBar mempty [] []
(NavBar b1 l1 r1) `mappend` (NavBar b2 l2 r2)
= NavBar (b1<>b2) (l1++l2) (r1++r2)
instance ToMarkup NavBar where
toMarkup NavBar{..} = H.div !. "navbar navbar-default"
! role "navigation" $ do
H.div !. "container" $ do
H.div !. "navbar-header" $ do
H.button ! A.type_ "button" !. "navbar-toggle"
! dataToggle "collapse" ! dataTarget "navbar-collapse" $ do
H.span !. "sr-only" $ "Toggle navigation"
H.span !. "icon-bar" $ ""
H.span !. "icon-bar" $ ""
H.span !. "icon-bar" $ ""
navBarBrand
H.div !. "navbar-collapse collapse" $ do
H.ul !. "nav navbar-nav" $ do
mapM_ navBarItem navBarLeft
H.ul !. "nav navbar-nav navbar-right" $ do
mapM_ navBarItem navBarRight
staticNavBar :: NavBar -> H.Html
staticNavBar nb = (H.toHtml nb) !. "navbar-static-top"
fixedNavBar :: NavBar -> H.Html
fixedNavBar nb = (H.toHtml nb) !. "navbar-fixed-top"
navBarItem (Header h) = H.li h
navBarItem (Link h) = H.li h
navBarItem (ActiveLink h) = H.li !. "active" $ h
navBarItem (Divider) = mempty
navBarItem (SubTree hdr items) = H.li !. "dropdown" $ do
H.a ! A.href "#" !. "dropdown-toggle" ! dataToggle "dropdown" $ do
hdr
H.b !. "caret" $ ""
H.ul !. "dropdown-menu" $ do
mapM_ dropdownItem items
dropdownItem (Header h) = H.li !. "dropdown-header" $ h
dropdownItem (Divider) = H.li !. "divider" $ ""
dropdownItem (Link h) = H.li h
dropdownItem (ActiveLink h) = H.li !. "active" $ h
-- TODO http://stackoverflow.com/questions/18023493/bootstrap-3-dropdown-sub-menu-missing
dropdownItem (SubTree hdr items) = error "dropdown submenus not yet implemented"
data LoginRegister = LoginRegister
{ loginFormTitle :: Maybe T.Text,
nameLabel :: T.Text,
loginAction :: T.Text,
registerAction:: T.Text,
registerQuestions :: [(T.Text, T.Text)]
}
--generated by blaze-from-html from http://bootsnipp.com/snippets/featured/loginregister-in-tabbed-interface
login_register_form :: LoginRegister -> Html
login_register_form LoginRegister{..} = do
H.div ! class_ "container" $
H.div ! class_ "row" $ H.div ! class_ "span12" $ H.div ! class_ "" ! A.id "loginModal" $ do
case loginFormTitle of
Nothing -> ""
Just title ->
H.div ! class_ "modal-header" $ do
button ! type_ "button" ! class_ "close" ! dataAttribute "dismiss" "modal" $ mempty
h3 $ toHtml title
H.div ! class_ "modal-body" $ H.div ! class_ "well" $ do
ul ! class_ "nav nav-tabs" $ do
li ! class_ "active" $ a ! href "#login" ! dataAttribute "toggle" "tab" $ "Login"
li $ a ! href "#create" ! dataAttribute "toggle" "tab" $ "Create Account"
H.div ! A.id "myTabContent" ! class_ "tab-content" $ do
H.div ! class_ "tab-pane active" ! A.id "login" $ do
H.form ! class_ "form-horizontal" ! action (toValue loginAction) ! method "POST" $ fieldset $ do
H.div ! A.id "legend" $ legend ! class_ "" $ "Login"
H.div ! class_ "control-group" $ do
-- Username
H.label ! class_ "control-label" ! for (toName nameLabel) $ (toHtml nameLabel)
H.div ! class_ "controls" $ input ! type_ "text" ! A.id (toValue nameLabel) ! name (toName nameLabel) ! placeholder "" ! class_ "input-xlarge"
H.div ! class_ "control-group" $ do
-- Password
H.label ! class_ "control-label" ! for "password" $ "Password"
H.div ! class_ "controls" $ input ! type_ "password" ! A.id "password" ! name "password" ! placeholder "" ! class_ "input-xlarge"
H.div ! class_ "control-group" $ do
-- Button
H.div ! class_ "controls" $ button ! class_ "btn btn-success" $ "Login"
H.div ! class_ "tab-pane" ! A.id "create" $ do
H.form ! class_ "form-horizontal" ! action (toValue registerAction) ! method "POST" $ fieldset $ do
H.div ! A.id "legend" $ legend ! class_ "" $ "Register"
H.div ! class_ "control-group" $ do
H.label ! class_ "control-label" $ (toHtml nameLabel)
H.div ! class_ "controls" $ input ! type_ "text" ! value "" ! name (toName nameLabel) ! class_ "input-xlarge"
H.div ! class_ "control-group" $ do
-- Password
H.label ! class_ "control-label" ! for "password" $ "Password"
H.div ! class_ "controls" $ input ! type_ "password" ! A.id "password" ! name "password" ! placeholder "" ! class_ "input-xlarge"
forM_ registerQuestions $ \(lbl, nm) -> do
H.div ! class_ "control-group" $ do
H.label ! class_ "control-label" $ (toHtml lbl)
H.div ! class_ "controls" $ input ! type_ "text" ! value "" ! name (toValue nm) ! class_ "input-xlarge"
H.div $ button ! class_ "btn btn-primary" $ "Create Account"
toName txt = toValue $ T.toLower $ T.filter (/=' ') txt
modal elemid mtitle mbody mfooter = H.div !. "modal fade" ! A.id elemid ! A.tabindex "-1" ! role "dialog" $ do
H.div !. "modal-dialog" $ do
H.div !. "modal-content" $ do
H.div !. "modal-header" $ do
H.button ! A.type_ "button" !. "close" ! dataDismiss "modal" $ do
H.span (preEscapedToHtml ("×"::T.Text))
H.span !. "sr-only" $ "Close"
H.h4 !. "modal-title" ! A.id "myModalLabel" $ mtitle
H.div !. "modal-body" $ mbody
H.div !. "modal-footer" $ mfooter
progressBar = H.div !. "progress" $ do
H.div !. "progress-bar" ! role "progressbar" ! A.style "width: 0%;" $ ""
postButton :: T.Text -> T.Text -> H.Html -> H.Html
postButton url btnClass label = H.form ! A.action (H.toValue $ url) ! A.method "post" $ do
H.button !. ("btn " <> btnClass) ! A.type_ "submit" $ label
row x = H.div ! class_ "row" $ x
| glutamate/blaze-bootstrap | Text/Blaze/Html/Bootstrap.hs | mit | 7,552 | 0 | 35 | 2,148 | 2,446 | 1,185 | 1,261 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Landing.Markdown (parseMarkdown) where
import Text.Pandoc
import Text.Pandoc.Options
import Data.Set (Set, fromList)
import Network.URI (isAbsoluteURI)
import Landing.Repo (Repo, joinPath)
import qualified Data.ByteString.Lazy.Char8 as C
parseMarkdown :: Repo -> C.ByteString -> C.ByteString
parseMarkdown repo = C.pack .
writeHtmlString writeOptions .
changeURIs repo .
readMarkdown readOptions .
C.unpack
changeURIs :: Repo -> Pandoc -> Pandoc
changeURIs repo = bottomUp (map $ convertURIs repo)
convertURIs :: Repo -> Inline -> Inline
convertURIs repo (Image a (b, c))
| isAbsoluteURI b = Image a (b, c)
| otherwise = Image a (generateGitHubURI repo b, c)
convertURIs _ x = x
generateGitHubURI :: Repo -> String -> String
generateGitHubURI repo path = concat
[ "https://raw.githubusercontent.com/", joinPath repo, "/", path ]
writeOptions = def
{ writerExtensions = extensions
, writerReferenceLinks = True
, writerSectionDivs = True
, writerHighlight = True }
readOptions = def
{ readerExtensions = extensions }
extensions :: Set Extension
extensions = fromList
[ Ext_pipe_tables
, Ext_raw_html
, Ext_tex_math_single_backslash
, Ext_fenced_code_blocks
, Ext_fenced_code_attributes
, Ext_auto_identifiers
, Ext_ascii_identifiers
, Ext_backtick_code_blocks
, Ext_autolink_bare_uris
, Ext_intraword_underscores
, Ext_strikeout
, Ext_lists_without_preceding_blankline ]
| dennis84/landing-haskell | Landing/Markdown.hs | mit | 1,552 | 0 | 9 | 319 | 386 | 216 | 170 | 45 | 1 |
{-# LANGUAGE CPP #-}
module Cabal
( getPackageGhcOpts
, findCabalFile, findFile
) where
import Stack
import Control.Exception (IOException, catch)
import Control.Monad (when)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State (execStateT, modify)
import Data.Char (isSpace)
import Data.List (foldl', nub, sort, find, isPrefixOf, isSuffixOf)
import Data.Maybe (isJust)
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>))
import Data.Monoid (Monoid(..))
#endif
#if __GLASGOW_HASKELL__ < 802
import Distribution.Package (PackageIdentifier(..), PackageName)
#endif
import Distribution.PackageDescription (PackageDescription(..), Executable(..), TestSuite(..), Benchmark(..), emptyHookedBuildInfo, buildable, libBuildInfo)
import qualified Distribution.PackageDescription as Distribution
#if MIN_VERSION_Cabal(2, 2, 0)
import qualified Distribution.PackageDescription.Parsec as Distribution
#else
import qualified Distribution.PackageDescription.Parse as Distribution
#endif
import Distribution.Simple.Configure (configure)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..), Component(..), componentName, getComponentLocalBuildInfo, componentBuildInfo)
import Distribution.Simple.Compiler (PackageDB(..))
import Distribution.Simple.Command (CommandParse(..), commandParseArgs)
import Distribution.Simple.GHC (componentGhcOptions)
import Distribution.Simple.Program (defaultProgramConfiguration)
import Distribution.Simple.Program.Db (lookupProgram)
import Distribution.Simple.Program.Types (ConfiguredProgram(programVersion), simpleProgram)
import Distribution.Simple.Program.GHC (GhcOptions(..), renderGhcOptions)
import Distribution.Simple.Setup (ConfigFlags(..), defaultConfigFlags, configureCommand, toFlag, flagToMaybe)
#if MIN_VERSION_Cabal(1,21,1)
import Distribution.Utils.NubList
#endif
import qualified Distribution.Simple.GHC as GHC(configure)
import Distribution.Verbosity (silent)
import qualified Distribution.Verbosity as Distribution
import Distribution.Version
import System.IO.Error (ioeGetErrorString)
import System.Directory (doesFileExist, doesDirectoryExist, getDirectoryContents)
import System.FilePath (takeDirectory, splitFileName, (</>))
readGenericPackageDescription :: Distribution.Verbosity -> FilePath -> IO Distribution.GenericPackageDescription
#if MIN_VERSION_Cabal(2, 0, 0)
readGenericPackageDescription = Distribution.readGenericPackageDescription
#else
readGenericPackageDescription = Distribution.readPackageDescription
#endif
-- TODO: Fix callsites so we don't need `allComponentsBy`. It was taken from
-- http://hackage.haskell.org/package/Cabal-1.16.0.3/docs/src/Distribution-Simple-LocalBuildInfo.html#allComponentsBy
-- since it doesn't exist in Cabal 1.18.*
--
-- | Obtains all components (libs, exes, or test suites), transformed by the
-- given function. Useful for gathering dependencies with component context.
allComponentsBy :: PackageDescription
-> (Component -> a)
-> [a]
allComponentsBy pkg_descr f =
[ f (CLib lib) | Just lib <- [library pkg_descr]
, buildable (libBuildInfo lib) ]
++ [ f (CExe exe) | exe <- executables pkg_descr
, buildable (buildInfo exe) ]
++ [ f (CTest tst) | tst <- testSuites pkg_descr
, buildable (testBuildInfo tst)]
++ [ f (CBench bm) | bm <- benchmarks pkg_descr
, buildable (benchmarkBuildInfo bm)]
stackifyFlags :: ConfigFlags -> Maybe StackConfig -> ConfigFlags
stackifyFlags cfg Nothing = cfg
stackifyFlags cfg (Just si) = cfg { configHcPath = toFlag ghc
, configHcPkg = toFlag ghcPkg
, configDistPref = toFlag dist
, configPackageDBs = pdbs
}
where
pdbs = [Nothing, Just GlobalPackageDB] ++ pdbs'
pdbs' = Just . SpecificPackageDB <$> stackDbs si
dist = stackDist si
ghc = stackGhcBinDir si </> "ghc"
ghcPkg = stackGhcBinDir si </> "ghc-pkg"
-- via: https://groups.google.com/d/msg/haskell-stack/8HJ6DHAinU0/J68U6AXTsasJ
-- cabal configure --package-db=clear --package-db=global --package-db=$(stack path --snapshot-pkg-db) --package-db=$(stack path --local-pkg-db)
getPackageGhcOpts :: FilePath -> Maybe StackConfig -> [String] -> IO (Either String [String])
getPackageGhcOpts path mbStack opts =
getPackageGhcOpts' `catch` (\e ->
return $ Left $ "Cabal error: " ++ ioeGetErrorString (e :: IOException))
where
getPackageGhcOpts' :: IO (Either String [String])
getPackageGhcOpts' = do
genPkgDescr <- readGenericPackageDescription silent path
distDir <- getDistDir
-- TODO(SN): defaultProgramConfiguration is deprecated
let programCfg = defaultProgramConfiguration
let initCfgFlags = (defaultConfigFlags programCfg)
{ configDistPref = toFlag distDir
-- TODO: figure out how to find out this flag
, configUserInstall = toFlag True
-- configure with --enable-tests to include test dependencies/modules
, configTests = toFlag True
-- configure with --enable-benchmarks to include benchmark dependencies/modules
, configBenchmarks = toFlag True
}
let initCfgFlags' = stackifyFlags initCfgFlags mbStack
cfgFlags <- flip execStateT initCfgFlags' $ do
let sandboxConfig = takeDirectory path </> "cabal.sandbox.config"
exists <- lift $ doesFileExist sandboxConfig
when exists $ do
sandboxPackageDb <- lift $ getSandboxPackageDB sandboxConfig
modify $ \x -> x { configPackageDBs = [Just sandboxPackageDb] }
let cmdUI = configureCommand programCfg
case commandParseArgs cmdUI True opts of
CommandReadyToGo (modFlags, _) -> modify modFlags
CommandErrors (e:_) -> error e
_ -> return ()
localBuildInfo <- configure (genPkgDescr, emptyHookedBuildInfo) cfgFlags
let baseDir = fst . splitFileName $ path
case getGhcVersion localBuildInfo of
Nothing -> return $ Left "GHC is not configured"
Just ghcVersion -> do
#if __GLASGOW_HASKELL__ < 802
let pkgDescr = localPkgDescr localBuildInfo
let mbLibName = pkgLibName pkgDescr
#endif
let ghcOpts' = foldl' mappend mempty . map (getComponentGhcOptions localBuildInfo) .
flip allComponentsBy id . localPkgDescr $ localBuildInfo
-- FIX bug in GhcOptions' `mappend`
#if MIN_VERSION_Cabal(2,4,0)
-- API Change, just for the glory of Satan:
-- Distribution.Simple.Program.GHC.GhcOptions no longer uses NubListR's
ghcOpts = ghcOpts' { ghcOptExtra = filter (/= "-Werror") $ ghcOptExtra ghcOpts'
#elif MIN_VERSION_Cabal(1,21,1)
-- API Change:
-- Distribution.Simple.Program.GHC.GhcOptions now uses NubListR's
-- GhcOptions { .. ghcOptPackages :: NubListR (InstalledPackageId, PackageId, ModuleRemaining) .. }
ghcOpts = ghcOpts' { ghcOptExtra = overNubListR (filter (/= "-Werror")) $ ghcOptExtra ghcOpts'
#endif
#if MIN_VERSION_Cabal(1,21,1)
#if __GLASGOW_HASKELL__ >= 709
, ghcOptPackageDBs = sort $ nub (ghcOptPackageDBs ghcOpts')
#endif
#if __GLASGOW_HASKELL__ < 802
, ghcOptPackages = overNubListR (filter (\(_, pkgId, _) -> Just (pkgName pkgId) /= mbLibName)) $ (ghcOptPackages ghcOpts')
#endif
, ghcOptSourcePath = overNubListR (map (baseDir </>)) (ghcOptSourcePath ghcOpts')
}
#else
-- GhcOptions { .. ghcOptPackages :: [(InstalledPackageId, PackageId)] .. }
let ghcOpts = ghcOpts' { ghcOptExtra = filter (/= "-Werror") $ nub $ ghcOptExtra ghcOpts'
, ghcOptPackages = filter (\(_, pkgId) -> Just (pkgName pkgId) /= mbLibName) $ nub (ghcOptPackages ghcOpts')
, ghcOptSourcePath = map (baseDir </>) (ghcOptSourcePath ghcOpts')
}
#endif
let hcPath = flagToMaybe . configHcPath $ configFlags localBuildInfo
let pkgPath = flagToMaybe . configHcPkg $ configFlags localBuildInfo
-- TODO(SN): defaultProgramConfiguration is deprecated
(ghcInfo, mbPlatform, _) <- GHC.configure silent hcPath pkgPath defaultProgramConfiguration
putStrLn $ "Configured GHC " ++ show ghcVersion
++ " " ++ show mbPlatform
#if MIN_VERSION_Cabal(1,23,2)
-- API Change:
-- Distribution.Simple.Program.GHC.renderGhcOptions now takes Platform argument
-- renderGhcOptions :: Compiler -> Platform -> GhcOptions -> [String]
return $ case mbPlatform of
Just platform -> Right $ renderGhcOptions ghcInfo platform ghcOpts
Nothing -> Left "GHC.configure did not return platform"
#else
#if MIN_VERSION_Cabal(1,20,0)
-- renderGhcOptions :: Compiler -> GhcOptions -> [String]
return $ Right $ renderGhcOptions ghcInfo ghcOpts
#else
-- renderGhcOptions :: Version -> GhcOptions -> [String]
return $ Right $ renderGhcOptions ghcVersion ghcOpts
#endif
#endif
-- returns the right 'dist' directory in the case of a sandbox
getDistDir = do
let dir = takeDirectory path </> "dist"
exists <- doesDirectoryExist dir
if not exists then return dir else do
contents <- getDirectoryContents dir
return . maybe dir (dir </>) $ find ("dist-sandbox-" `isPrefixOf`) contents
#if __GLASGOW_HASKELL__ < 802
pkgLibName :: PackageDescription -> Maybe PackageName
pkgLibName pkgDescr = if hasLibrary pkgDescr
then Just $ pkgName . package $ pkgDescr
else Nothing
#endif
hasLibrary :: PackageDescription -> Bool
hasLibrary = isJust . library
getComponentGhcOptions :: LocalBuildInfo -> Component -> GhcOptions
getComponentGhcOptions lbi comp =
componentGhcOptions silent lbi bi clbi (buildDir lbi)
where bi = componentBuildInfo comp
-- TODO(SN): getComponentLocalBuildInfo is deprecated as of Cabal-2.0.0.2
clbi = getComponentLocalBuildInfo lbi (componentName comp)
getGhcVersion :: LocalBuildInfo -> Maybe Version
getGhcVersion lbi = let db = withPrograms lbi
in do ghc <- lookupProgram (simpleProgram "ghc") db
programVersion ghc
getSandboxPackageDB :: FilePath -> IO PackageDB
getSandboxPackageDB sandboxPath = do
contents <- readFile sandboxPath
return $ SpecificPackageDB $ extractValue . parse $ contents
where
pkgDbKey = "package-db:"
parse = head . filter (pkgDbKey `isPrefixOf`) . lines
extractValue = takeWhile (`notElem` "\n\r") . dropWhile isSpace . drop (length pkgDbKey)
-- | looks for file matching a predicate starting from dir and going up until root
findFile :: (FilePath -> Bool) -> FilePath -> IO (Maybe FilePath)
findFile p dir = do
allFiles <- getDirectoryContents dir
case find p allFiles of
Just cabalFile -> return $ Just $ dir </> cabalFile
Nothing ->
let parentDir = takeDirectory dir
in if parentDir == dir
then return Nothing
else findFile p parentDir
findCabalFile :: FilePath -> IO (Maybe FilePath)
findCabalFile = findFile isCabalFile
where
isCabalFile :: FilePath -> Bool
isCabalFile path = ".cabal" `isSuffixOf` path
&& length path > length ".cabal"
| hdevtools/hdevtools | src/Cabal.hs | mit | 11,960 | 0 | 28 | 3,017 | 2,242 | 1,213 | 1,029 | 147 | 5 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE UnicodeSyntax #-}
module Unison.FileParsers where
import Unison.Prelude
import Control.Lens (view, _3)
import qualified Unison.Parser as Parser
import Control.Monad.State (evalStateT)
import Control.Monad.Writer (tell)
import Data.Bifunctor ( first )
import qualified Data.Foldable as Foldable
import qualified Data.Map as Map
import Data.List (partition)
import qualified Data.Set as Set
import qualified Data.Sequence as Seq
import Data.Text (unpack)
import qualified Unison.ABT as ABT
import qualified Unison.Blank as Blank
import qualified Unison.Name as Name
import qualified Unison.Names3 as Names
import Unison.Parser (Ann)
import qualified Unison.Parsers as Parsers
import qualified Unison.Referent as Referent
import Unison.Reference (Reference)
import Unison.Result (Note (..), Result, pattern Result, ResultT, CompilerBug(..))
import qualified Unison.Result as Result
import qualified Unison.Term as Term
import qualified Unison.Type as Type
import qualified Unison.Typechecker as Typechecker
import qualified Unison.Typechecker.TypeLookup as TL
import qualified Unison.Typechecker.Context as Context
import qualified Unison.UnisonFile as UF
import qualified Unison.Util.List as List
import qualified Unison.Util.Relation as Rel
import Unison.Var (Var)
import qualified Unison.Var as Var
import Unison.Names3 (Names0)
type Term v = Term.Term v Ann
type Type v = Type.Type v Ann
type UnisonFile v = UF.UnisonFile v Ann
type Result' v = Result (Seq (Note v Ann))
debug :: Bool
debug = False
convertNotes :: Ord v => Typechecker.Notes v ann -> Seq (Note v ann)
convertNotes (Typechecker.Notes bugs es is) =
(CompilerBug . TypecheckerBug <$> bugs) <> (TypeError <$> es) <> (TypeInfo <$> Seq.fromList is') where
is' = snd <$> List.uniqueBy' f ([(1::Word)..] `zip` Foldable.toList is)
f (_, Context.TopLevelComponent cs) = Right [ v | (v,_,_) <- cs ]
f (i, _) = Left i
-- each round of TDNR emits its own TopLevelComponent notes, so we remove
-- duplicates (based on var name and location), preferring the later note as
-- that will have the latest typechecking info
parseAndSynthesizeFile
:: (Var v, Monad m)
=> [Type v]
-> (Set Reference -> m (TL.TypeLookup v Ann))
-> Parser.ParsingEnv
-> FilePath
-> Text
-> ResultT
(Seq (Note v Ann))
m
(Either Names0 (UF.TypecheckedUnisonFile v Ann))
parseAndSynthesizeFile ambient typeLookupf env filePath src = do
when debug $ traceM "parseAndSynthesizeFile"
uf <- Result.fromParsing $ Parsers.parseFile filePath (unpack src) env
let names0 = Names.currentNames (Parser.names env)
(tm, tdnrMap, typeLookup) <- resolveNames typeLookupf names0 uf
let (Result notes' r) = synthesizeFile ambient typeLookup tdnrMap uf tm
tell notes' $> maybe (Left (UF.toNames uf )) Right r
type TDNRMap v = Map Typechecker.Name [Typechecker.NamedReference v Ann]
resolveNames
:: (Var v, Monad m)
=> (Set Reference -> m (TL.TypeLookup v Ann))
-> Names.Names0
-> UnisonFile v
-> ResultT
(Seq (Note v Ann))
m
(Term v, TDNRMap v, TL.TypeLookup v Ann)
resolveNames typeLookupf preexistingNames uf = do
let tm = UF.typecheckingTerm uf
deps = Term.dependencies tm
possibleDeps = [ (Name.toText name, Var.name v, r) |
(name, r) <- Rel.toList (Names.terms0 preexistingNames),
v <- Set.toList (Term.freeVars tm),
name `Name.endsWithSegments` Name.fromVar v ]
possibleRefs = Referent.toReference . view _3 <$> possibleDeps
tl <- lift . lift . fmap (UF.declsToTypeLookup uf <>)
$ typeLookupf (deps <> Set.fromList possibleRefs)
-- For populating the TDNR environment, we pick definitions
-- from the namespace and from the local file whose full name
-- has a suffix that equals one of the free variables in the file.
-- Example, the namespace has [foo.bar.baz, qux.quaffle] and
-- the file has definitons [utils.zonk, utils.blah] and
-- the file has free variables [bar.baz, zonk].
--
-- In this case, [foo.bar.baz, utils.zonk] are used to create
-- the TDNR environment.
let fqnsByShortName = List.multimap $
-- external TDNR possibilities
[ (shortname, nr) |
(name, shortname, r) <- possibleDeps,
typ <- toList $ TL.typeOfReferent tl r,
let nr = Typechecker.NamedReference name typ (Right r) ] <>
-- local file TDNR possibilities
[ (Var.name v, nr) |
(name, r) <- Rel.toList (Names.terms0 $ UF.toNames uf),
v <- Set.toList (Term.freeVars tm),
name `Name.endsWithSegments` Name.fromVar v,
typ <- toList $ TL.typeOfReferent tl r,
let nr = Typechecker.NamedReference (Name.toText name) typ (Right r) ]
pure (tm, fqnsByShortName, tl)
synthesizeFile'
:: forall v
. Var v
=> [Type v]
-> TL.TypeLookup v Ann
-> UnisonFile v
-> Result (Seq (Note v Ann)) (UF.TypecheckedUnisonFile v Ann)
synthesizeFile' ambient tl uf =
synthesizeFile ambient tl mempty uf $ UF.typecheckingTerm uf
synthesizeFile
:: forall v
. Var v
=> [Type v]
-> TL.TypeLookup v Ann
-> TDNRMap v
-> UnisonFile v
-> Term v
-> Result (Seq (Note v Ann)) (UF.TypecheckedUnisonFile v Ann)
synthesizeFile ambient tl fqnsByShortName uf term = do
let -- substitute Blanks for any remaining free vars in UF body
tdnrTerm = Term.prepareTDNR term
env0 = Typechecker.Env ambient tl fqnsByShortName
Result notes mayType =
evalStateT (Typechecker.synthesizeAndResolve env0) tdnrTerm
-- If typechecking succeeded, reapply the TDNR decisions to user's term:
Result (convertNotes notes) mayType >>= \_typ -> do
let infos = Foldable.toList $ Typechecker.infos notes
(topLevelComponents :: [[(v, Term v, Type v)]]) <-
let
topLevelBindings :: Map v (Term v)
topLevelBindings = Map.mapKeys Var.reset $ extractTopLevelBindings tdnrTerm
extractTopLevelBindings (Term.LetRecNamedAnnotatedTop' True _ bs body) =
Map.fromList (first snd <$> bs) <> extractTopLevelBindings body
extractTopLevelBindings _ = Map.empty
tlcsFromTypechecker =
List.uniqueBy' (fmap vars)
[ t | Context.TopLevelComponent t <- infos ]
where vars (v, _, _) = v
strippedTopLevelBinding (v, typ, redundant) = do
tm <- case Map.lookup v topLevelBindings of
Nothing ->
Result.compilerBug $ Result.TopLevelComponentNotFound v term
Just (Term.Ann' x _) | redundant -> pure x
Just x -> pure x
-- The Var.reset removes any freshening added during typechecking
pure (Var.reset v, tm, typ)
in
-- use tlcsFromTypechecker to inform annotation-stripping decisions
traverse (traverse strippedTopLevelBinding) tlcsFromTypechecker
let doTdnr = applyTdnrDecisions infos
doTdnrInComponent (v, t, tp) = (\t -> (v, t, tp)) <$> doTdnr t
_ <- doTdnr tdnrTerm
tdnredTlcs <- (traverse . traverse) doTdnrInComponent topLevelComponents
let (watches', terms') = partition isWatch tdnredTlcs
isWatch = all (\(v,_,_) -> Set.member v watchedVars)
watchedVars = Set.fromList [ v | (v, _) <- UF.allWatches uf ]
tlcKind [] = error "empty TLC, should never occur"
tlcKind tlc@((v,_,_):_) = let
hasE k =
elem v . fmap fst $ Map.findWithDefault [] k (UF.watches uf)
in case Foldable.find hasE (Map.keys $ UF.watches uf) of
Nothing -> error "wat"
Just kind -> (kind, tlc)
pure $ UF.typecheckedUnisonFile
(UF.dataDeclarationsId uf)
(UF.effectDeclarationsId uf)
terms'
(map tlcKind watches')
where
applyTdnrDecisions
:: [Context.InfoNote v Ann]
-> Term v
-> Result' v (Term v)
applyTdnrDecisions infos tdnrTerm = foldM go tdnrTerm decisions
where
-- UF data/effect ctors + builtins + TLC Term.vars
go term _decision@(shortv, loc, replacement) =
ABT.visit (resolve shortv loc replacement) term
decisions =
[ (v, loc, replacement) | Context.Decision v loc replacement <- infos ]
-- resolve (v,loc) in a matching Blank to whatever `fqn` maps to in `names`
resolve shortv loc replacement t = case t of
Term.Blank' (Blank.Recorded (Blank.Resolve loc' name))
| loc' == loc && Var.nameStr shortv == name ->
-- loc of replacement already chosen correctly by whatever made the
-- Decision
pure . pure $ replacement
_ -> Nothing
| unisonweb/platform | parser-typechecker/src/Unison/FileParsers.hs | mit | 9,073 | 0 | 25 | 2,429 | 2,616 | 1,381 | 1,235 | -1 | -1 |
module Language.TaPL.TypedArith.Parser (parseString, parseFile) where
import qualified Text.Parsec.Token as Token
import Text.Parsec.Language (emptyDef)
import Text.Parsec.Prim (parse)
import Text.Parsec.String (Parser)
import Control.Applicative ((<|>))
import Control.Monad (liftM)
import Language.TaPL.TypedArith.Syntax (Term(..), integerToTerm)
-- This module is adapted from: http://www.haskell.org/haskellwiki/Parsing_a_simple_imperative_language
booleanDef =
emptyDef { Token.reservedNames = [ "if"
, "then"
, "else"
, "true"
, "false"
, "zero"
, "succ"
, "pred"
, "iszero"
]
}
lexer = Token.makeTokenParser booleanDef
reserved = Token.reserved lexer -- parses a reserved name
parens = Token.parens lexer -- parses surrounding parenthesis:
-- parens p
-- takes care of the parenthesis and
-- uses p to parse what's inside them
whiteSpace = Token.whiteSpace lexer -- parses whitespace
integer = Token.integer lexer -- parses an integer
booleanParser :: Parser Term
booleanParser = whiteSpace >> expr
expr :: Parser Term
expr = parens expr
<|> ifExpr
<|> (reserved "true" >> return TmTrue)
<|> (reserved "false" >> return TmFalse)
<|> arithExpr
ifExpr :: Parser Term
ifExpr = do reserved "if"
t1 <- expr
reserved "then"
t2 <- expr
reserved "else"
t3 <- expr
return $ TmIf t1 t2 t3
arithExpr :: Parser Term
arithExpr = (reserved "zero" >> return TmZero)
<|> predExpr
<|> succExpr
<|> iszeroExpr
<|> liftM integerToTerm integer
predExpr :: Parser Term
predExpr = oneArgExprHelper TmPred "pred"
succExpr :: Parser Term
succExpr = oneArgExprHelper TmSucc "succ"
iszeroExpr :: Parser Term
iszeroExpr = oneArgExprHelper TmIsZero "iszero"
oneArgExprHelper :: (Term -> Term) -> String -> Parser Term
oneArgExprHelper constructor word = do reserved word
t <- expr
return $ constructor t
parseString :: String -> Term
parseString str =
case parse booleanParser "" str of
Left e -> error $ show e
Right t -> t
parseFile :: String -> IO Term
parseFile file =
do program <- readFile file
case parse booleanParser "" program of
Left e -> print e >> fail "parse error"
Right t -> return t
| zeckalpha/TaPL | src/Language/TaPL/TypedArith/Parser.hs | mit | 2,844 | 0 | 11 | 1,073 | 625 | 325 | 300 | 66 | 2 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<!-- title -->
<title>Hilfe zu j-Algo</title>
<!-- maps -->
<maps>
<homeID>main.intro</homeID>
<mapref location="map.jhm"/>
</maps>
<!-- presentations -->
<presentation default=true>
<name>main window</name>
<size width="750" height="460" />
<location x="120" y="180" />

<toolbar>
<helpaction>javax.help.BackAction</helpaction>
<helpaction>javax.help.ForwardAction</helpaction>
<helpaction>javax.help.ReloadAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction image="Startseite">javax.help.HomeAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction>javax.help.PrintAction</helpaction>
<helpaction>javax.help.PrintSetupAction</helpaction>
</toolbar>
</presentation>
<!-- views -->
<view mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Inhaltsverzeichnis</label>
<type>javax.help.TOCView</type>
<data>mainTOC.xml</data>
</view>
<view mergetype="javax.help.UniteAppendMerge">
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>mainIndex.xml</data>
</view>
<view xml:lang="de">
<name>Search</name>
<label>Suche</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
</helpset> | jurkov/j-algo-mod | res/main/help/jhelp/main_help.hs | gpl-2.0 | 1,632 | 139 | 34 | 220 | 658 | 334 | 324 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE CPP #-}
-- | The main database ORM interface. This module contains
-- functionality for moving a Haskell data structure in and out of a
-- database table.
--
-- The most important feature is the 'Model' class, which encodes a
-- typed database interface (i.e., the ORM layer). This class has a
-- default implementation for types that are members of the 'Generic'
-- class (using GHC's @DeriveGeneric@ extension), provided the
-- following conditions hold:
--
-- 1. The data type must have a single constructor that is defined
-- using record selector syntax.
--
-- 2. The very first field of the data type must be a 'DBKey' to
-- represent the primary key. Other orders will cause a
-- compilation error.
--
-- 3. Every field of the data structure must be an instance of
-- 'FromField' and 'ToField'.
--
-- If these three conditions hold and your database naming scheme
-- follows the conventions of 'defaultModelInfo'--namely that the
-- table name is the same as the type name with the first character
-- downcased, and the field names are the same as the column
-- names--then it is reasonable to have a completely empty (default)
-- instance declaration:
--
-- > data MyType = MyType { myKey :: !DBKey
-- > , myName :: !S.ByteString
-- > , myCamelCase :: !Int
-- > , ...
-- > } deriving (Show, Generic)
-- > instance Model MyType
--
-- The default 'modelInfo' method is called 'defaultModelInfo'. You
-- may wish to use almost all of the defaults, but tweak a few things.
-- This is easily accomplished by overriding a few fields of the
-- default structure. For example, suppose your database columns use
-- exactly the same name as your Haskell field names, but the name of
-- your database table is not the same as the name of the Haskell data
-- type. You can override the database table name (field 'modelTable')
-- as follows:
--
-- > instance Model MyType where
-- > modelInfo = defaultModelInfo { modelTable = "my_type" }
--
-- Finally, if you dislike the conventions followed by
-- 'defaultModelInfo', you can simply implement an alternate pattern.
-- An example of this is 'underscoreModelInfo', which strips a prefix
-- off every field name and converts everything from camel-case to
-- underscore notation:
--
-- > instance Model MyType where
-- > modelInfo = underscoreModelInfo "my"
--
-- The above code will associate @MyType@ with a database table
-- @my_type@ having column names @key@, @name@, @camel_case@, etc.
--
-- You can implement other patterns like 'underscoreModelInfo' by
-- calling 'defaultModelInfo' and modifying the results.
-- Alternatively, you can directly call the lower-level functions from
-- which 'defaultModelInfo' is built ('defaultModelTable',
-- 'defaultModelColumns', 'defaultModelGetPrimaryKey').
module Database.PostgreSQL.ORM.Model (
-- * The Model class
Model(..), ModelInfo(..), ModelIdentifiers(..), ModelQueries(..)
, underscoreModelInfo
-- * Data types for holding primary keys
, DBKeyType, DBKey(..), isNullKey
, DBRef, DBRefUnique, GDBRef(..), mkDBRef
-- * Database operations on Models
, findAll, findRow, save, save_, trySave, destroy, destroyByRef
-- * Functions for accessing and using Models
, modelName, primaryKey, modelSelectFragment
, LookupRow(..), UpdateRow(..), InsertRow(..)
-- * Table aliases
, As(..), fromAs, toAs, RowAlias(..)
-- * Low-level functions providing manual access to defaults
, defaultModelInfo
, defaultModelTable, defaultModelColumns, defaultModelGetPrimaryKey
, defaultModelIdentifiers
, defaultModelWrite
, defaultModelQueries
, defaultModelLookupQuery, defaultModelUpdateQuery
, defaultModelInsertQuery, defaultModelDeleteQuery
-- * Helper functions and miscellaneous internals
, quoteIdent, NormalRef(..), UniqueRef(..)
, ModelCreateInfo(..), emptyModelCreateInfo
, defaultFromRow, defaultToRow
, printq
-- ** Helper classes
-- $HelperClasses
, GPrimaryKey0, GColumns, GDatatypeName
, GFromRow, GToRow
) where
import Control.Applicative
import Control.Exception
import Control.Monad
import qualified Data.Aeson as A
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Char
import Data.Data
import Data.Int
import qualified Data.HashMap.Strict as H
import Data.Maybe
import Data.Monoid
import Data.List hiding (find)
import Data.String
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromField
import Database.PostgreSQL.Simple.FromRow
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
import Database.PostgreSQL.Simple.Types
import Database.PostgreSQL.ORM.Validations
import GHC.Generics
import Database.PostgreSQL.Escape (quoteIdent)
-- | A type large enough to hold database primary keys. Do not use
-- this type directly in your data structures. Use 'DBKey' to hold a
-- `Model`'s primary key and 'DBRef' to reference the primary key of
-- another model.
type DBKeyType = Int64
-- | The type of the Haskell data structure field containing a model's
-- primary key.
--
-- Every 'Model' must have exactly one @DBKey@, and the @DBKey@ must
-- be the `Model`'s very first field in the Haskel data type
-- definition. (The ordering is enforced by
-- 'defaultModelGetPrimaryKey', which, through use of the
-- @DeriveGeneric@ extension, fails to compile when the first field is
-- not a @DBKey@.)
--
-- Each 'Model' stored in the database should have a unique non-null
-- primary key. However, the key is determined at the time the
-- 'Model' is inserted into the database. While you are constructing
-- a new 'Model' to insert, you will not have its key. Hence, you
-- should use the value @NullKey@ to let the database chose the key.
--
-- If you wish to store a `Model`'s primary key as a reference in
-- another 'Model', do not copy the 'DBKey' structure. Use 'mkDBRef'
-- to convert the `Model`'s primary key to a foreign key reference.
data DBKey = DBKey !DBKeyType | NullKey deriving (Data, Typeable, Generic)
instance A.ToJSON DBKey where
toJSON NullKey = A.Null
toJSON (DBKey k) = A.toJSON k
instance A.FromJSON DBKey where
parseJSON (A.Number a) = return $ DBKey (floor a)
parseJSON A.Null = return NullKey
parseJSON _ = fail "Expected Number or Null"
instance Eq DBKey where
(DBKey a) == (DBKey b) = a == b
_ == _ = error "compare NullKey"
instance Ord DBKey where
compare (DBKey a) (DBKey b) = compare a b
compare _ _ = error "compare NullKey"
instance Show DBKey where
showsPrec n (DBKey k) = showsPrec n k
showsPrec _ NullKey = ("NullKey" ++)
instance FromField DBKey where
fromField _ Nothing = pure NullKey
fromField f bs = DBKey <$> fromField f bs
instance ToField DBKey where
toField (DBKey k) = toField k
toField NullKey = toField Null
-- | Returns 'True' when a 'DBKey' is 'NullKey'.
isNullKey :: DBKey -> Bool
isNullKey NullKey = True
isNullKey _ = False
-- | Many operations can take either a 'DBRef' or a 'DBRefUnique'
-- (both of which consist internally of a 'DBKeyType'). Hence, these
-- two types are just type aliases to a generalized reference type
-- @GDBRef@, where @GDBRef@'s first type argument, @reftype@, is a
-- phantom type denoting the flavor of reference ('NormalRef' or
-- 'UniqueRef').
newtype GDBRef reftype table = DBRef DBKeyType
deriving (Eq, Data, Typeable, Num, Integral, Real, Ord, Enum, Bounded, Generic)
instance A.ToJSON (GDBRef t a) where
toJSON (DBRef k) = A.toJSON k
instance A.FromJSON (GDBRef t a) where
parseJSON (A.Number n) = return $ DBRef (floor n)
parseJSON _ = fail "Expected Number"
instance (Model t) => Show (GDBRef rt t) where
showsPrec n (DBRef k) = showsPrec n k
instance (Model t) => Read (GDBRef rt t) where
readsPrec n str = map wrap $ readsPrec n str
where wrap (k, s) = (DBRef k, s)
instance FromField (GDBRef rt t) where
{-# INLINE fromField #-}
fromField f bs = DBRef <$> fromField f bs
instance ToField (GDBRef rt t) where
{-# INLINE toField #-}
toField (DBRef k) = toField k
-- | Phantom type for instantiating 'GDBRef' that represents a one-to-many
-- relationship between tables.
data NormalRef = NormalRef deriving (Show, Data, Typeable)
-- | A @DBRef T@ represents a many-to-one relationship between tables. For
-- example, if type @A@ contains a @DBRef B@, then each @B@ is associated
-- with many @A@'s. By contrast, a @'DBRefUnique'@ represents a one-to-one
-- relationship.
--
-- @DBRef@ is a type alias of kind @* -> *@. The type @DBRef T@
-- references an instance of type @T@ by the primary key of its
-- database row. The type argument @T@ should be an instance of
-- 'Model'.
type DBRef = GDBRef NormalRef
-- | Phantom type for instantiating 'GDBRef' that represents a one-to-one
-- relationship between tables.
data UniqueRef = UniqueRef deriving (Show, Data, Typeable)
-- | A @DBRefUnique T@ represents a one-to-one relationship between types. For
-- example, if type @A@ contains a @DBRefUnique B@, then each @A@ is associated
-- with one (or at most one) @B@, and each @B@ has one (or at most one) @A@
-- associated with it.
--
-- By contrast, a @'DBRef'@ represents a many-to-one relationship.
type DBRefUnique = GDBRef UniqueRef
-- Functionally, @DBRefUnique@ and @DBRef@ are treated the same by
-- this module. However, other modules make a distinction. In
-- particular, the 'modelCreateStatement' corresponding to a
-- 'DBRefUnique' will include a @UNIQUE@ constraint.
-- | Create a reference to the primary key of a 'Model', suitable for
-- storing in a 'DBRef' or 'DBRefUnique' field of a different 'Model'.
mkDBRef :: (Model a) => a -> GDBRef rt a
mkDBRef a
| (DBKey k) <- primaryKey a = DBRef k
| otherwise = error $ "mkDBRef " ++ S8.unpack (modelName a) ++ ": NullKey"
-- | A @ModelInfo T@ contains the information necessary for mapping
-- @T@ to a database table. Each @'Model'@ type has a single
-- @ModelInfo@ associated with it, accessible through the 'modelInfo'
-- method of the 'Model' class. Note the table and column names must
-- all be unquoted in this data structure, as they will later be
-- quoted using 'quoteIdent' by the 'modelIdentifiers' method.
data ModelInfo a = ModelInfo {
modelTable :: !S.ByteString
-- ^ The name of the database table corresponding to this model.
-- The default 'modelInfo' instance uses 'defaultModelTable',
-- which is the name of your data type with the first letter
-- downcased.
, modelColumns :: ![S.ByteString]
-- ^ The names of the database columns corresponding to fields of
-- this model. The column names should appear in the order in
-- which the fields are defined in the Haskell data type @a@
-- (which should also be the order in which 'modelRead' parses
-- them to an @a@ and 'modelWrite' marshalls them).
--
-- Note that all queries generated by the library specify explicit
-- column names. Hence the order of columns does not need to
-- match their order in the database table. They should instead
-- match the order of fields in the Haskell data structure.
--
-- The default, given by 'defaultModelColumns', is to use the
-- Haskell field names for @a@. This default will fail to compile
-- if @a@ is not defined using record syntax.
, modelPrimaryColumn :: !Int
-- ^ The 0-based index of the primary key column in
-- 'modelColumns'. This should be 0 when your data structure's
-- first field is its 'DBKey' (highly recommended, and required by
-- 'defaultModelGetPrimaryKey'). If you customize this field, you
-- must also customize 'modelGetPrimaryKey'--no check is made that
-- the two are consistent.
, modelGetPrimaryKey :: !(a -> DBKey)
-- ^ Return the primary key of a particular model instance. If
-- you customize this field, you must also customize
-- 'modelPrimaryColumn'--no check is made that the two are
-- consistent.
}
instance Show (ModelInfo a) where
show a = intercalate " " [
"Model", show $ modelTable a, show $ modelColumns a
, show $ modelPrimaryColumn a , "?"]
-- $HelperClasses
--
-- These classes are used internally to manipulate the 'Rep'
-- representations of 'Generic' data structures. You should not be
-- defining instances of or using these classes directly. The names
-- are exported so that you can include them in the context of the
-- type signatures of your functions, should you wish to make use of
-- the various @default@... funcitons in this file.
-- | This class returns the name of a datatype.
class GDatatypeName f where
gDatatypeName :: f p -> String
instance (Datatype c) => GDatatypeName (D1 c f) where
gDatatypeName a = datatypeName a
-- | The default name of the database table corresponding to a Haskell
-- type. The default is the same as the type name with the first
-- letter converted to lower-case. (The rationale is that Haskell
-- requires types to start with a capital letter, but all-lower-case
-- table names are easier to use in queries because PostgreSQL
-- generally does not require them to be quoted.)
defaultModelTable :: (Generic a, GDatatypeName (Rep a)) => a -> S.ByteString
defaultModelTable = fromString . caseFold. gDatatypeName . from
where caseFold (h:t) = toLower h:t
caseFold s = s
-- | This class extracts the field names of a Haskell data structure. Only
-- defined for types with a single constructor that uses record syntax.
class GColumns f where
gColumns :: f p -> [S.ByteString]
instance GColumns U1 where
gColumns _ = []
instance (Selector c) => GColumns (M1 S c f) where
gColumns s = [fromString $ selName s]
instance (GColumns a, GColumns b) => GColumns (a :*: b) where
gColumns ~(a :*: b) = gColumns a ++ gColumns b
instance (GColumns f) => GColumns (M1 C c f) where
gColumns ~(M1 fp) = gColumns fp
instance (GColumns f) => GColumns (M1 D c f) where
gColumns ~(M1 fp) = gColumns fp
-- | Returns the Haskell field names in a data structure.
defaultModelColumns :: (Generic a, GColumns (Rep a)) => a -> [S.ByteString]
defaultModelColumns = gColumns . from
-- | This class extracts the first field in a data structure when the
-- field is of type 'DBKey'. If you get a compilation error because
-- of this class, then move the 'DBKey' first in your data structure.
class GPrimaryKey0 f where
gPrimaryKey0 :: f p -> DBKey
instance GPrimaryKey0 (S1 c (K1 i DBKey)) where
{-# INLINE gPrimaryKey0 #-}
gPrimaryKey0 (M1 (K1 k)) = k
instance (GPrimaryKey0 a) => GPrimaryKey0 (a :*: b) where
{-# INLINE gPrimaryKey0 #-}
gPrimaryKey0 (a :*: _) = gPrimaryKey0 a
instance (GPrimaryKey0 f) => GPrimaryKey0 (C1 c f) where
{-# INLINE gPrimaryKey0 #-}
gPrimaryKey0 (M1 fp) = gPrimaryKey0 fp
instance (GPrimaryKey0 f) => GPrimaryKey0 (D1 c f) where
{-# INLINE gPrimaryKey0 #-}
gPrimaryKey0 (M1 fp) = gPrimaryKey0 fp
-- | Extract the primary key of type 'DBKey' from a model when the
-- 'DBKey' is the first element of the data structure. Fails to
-- compile if the first field is not of type 'DBKey'.
defaultModelGetPrimaryKey :: (Generic a, GPrimaryKey0 (Rep a)) => a -> DBKey
{-# INLINE defaultModelGetPrimaryKey #-}
defaultModelGetPrimaryKey = gPrimaryKey0 . from
class GFromRow f where
gFromRow :: RowParser (f p)
instance GFromRow U1 where
{-# INLINE gFromRow #-}
gFromRow = return U1
instance (FromField c) => GFromRow (K1 i c) where
{-# INLINE gFromRow #-}
gFromRow = K1 <$> field
instance (GFromRow a, GFromRow b) => GFromRow (a :*: b) where
{-# INLINE gFromRow #-}
gFromRow = (:*:) <$> gFromRow <*> gFromRow
instance (GFromRow f) => GFromRow (M1 i c f) where
{-# INLINE gFromRow #-}
gFromRow = M1 <$> gFromRow
-- | This function provides a 'fromRow' function for 'Generic' types,
-- suitable as a default of the 'FromRow' class. This module uses it
-- as the default implementation of 'modelRead'.
defaultFromRow :: (Generic a, GFromRow (Rep a)) => RowParser a
{-# INLINE defaultFromRow #-}
defaultFromRow = to <$> gFromRow
class GToRow f where
gToRow :: f p -> [Action]
instance GToRow U1 where
gToRow _ = []
instance (ToField c) => GToRow (K1 i c) where
gToRow (K1 c) = [toField c]
instance (GToRow a, GToRow b) => GToRow (a :*: b) where
gToRow (a :*: b) = gToRow a ++ gToRow b
instance (GToRow f) => GToRow (M1 i c f) where
gToRow (M1 fp) = gToRow fp
-- | This function provides a 'toRow' function for 'Generic' types
-- that marshalls each field of the data type in the order in which it
-- appears in the type definition. This function is /not/ a suitable
-- implementation of 'modelWrite' (since it marshals the primary key,
-- which is not supposed to be written). However, it is required
-- internally by 'defaultModelWrite', and exposed in the unlikely
-- event it is of use to alternate generic 'modelWrite' functions.
-- You probably don't want to call this function.
defaultToRow :: (Generic a, GToRow (Rep a)) => a -> [Action]
defaultToRow = gToRow . from
-- | Removes a single element from the list at the position specified.
-- (Internal)
deleteAt :: Int -> [a] -> [a]
deleteAt 0 (_:t) = t
deleteAt n (h:t) = h:deleteAt (n-1) t
deleteAt _ _ = []
-- | Returns a series of 'Action's serializing each field of a data
-- structure (in the order of the Haskell datatype definition),
-- /except/ the primary key, since the primary key should never be
-- written to a database. Every field must be an instance of
-- 'ToField'.
defaultModelWrite :: forall a. (Model a, Generic a, GToRow (Rep a))
=> a -> [Action]
{-# INLINE defaultModelWrite #-}
defaultModelWrite a = deleteAt pki $ defaultToRow a
where pki = modelPrimaryColumn (modelInfo :: ModelInfo a)
-- | The default definition of 'modelInfo'. See the documentation at
-- 'Model' for more information. Sets 'modelTable' to the name of the
-- type with the first character converted to lower-case. Sets
-- 'modelColumns' to the names of the Haskell field selectors. Sets
-- 'modelPrimaryColumn' to @0@ and extracts the first field of the
-- structure for 'modelGetPrimaryKey'. Will fail to compile unless
-- the data structure is defined with record syntax and that its first
-- field is of type 'DBKey'.
--
-- Note that defaults for the individual fields are available in
-- separate functions (e.g., 'defaultModelTable') with fewer class
-- requirements in the context, in case you want to make piecemeal use
-- of defaults. The default for 'modelPrimaryColumn' is 0. If you
-- overwrite that, you will need to overwrite 'modelGetPrimaryKey' as
-- well (and likely vice versa).
defaultModelInfo :: forall a.
(Generic a, GDatatypeName (Rep a), GColumns (Rep a)
, GPrimaryKey0 (Rep a)) => ModelInfo a
defaultModelInfo = m
where m = ModelInfo { modelTable = defaultModelTable a
, modelColumns = defaultModelColumns a
, modelPrimaryColumn = 0
, modelGetPrimaryKey = defaultModelGetPrimaryKey
}
a = undefined :: a
-- | An alternate 'Model' pattern in which Haskell type and field
-- names are converted from camel-case to underscore notation. The
-- first argument is a prefix to be removed from field names (since
-- Haskell requires field names to be unique across data types, while
-- SQL allows the same column names to be used in different tables).
--
-- For example:
--
-- > data Bar = Bar {
-- > barId :: !DBKey
-- > , barNameOfBar :: !String
-- > , barParent :: !(Maybe (DBRef Bar))
-- > } deriving (Show, Generic)
-- >
-- > instance Model Bar where modelInfo = underscoreModelInfo "bar"
--
-- would associate type @Bar@ with a database table called @bar@ with
-- fields @id@, @name_of_bar@, and @parent@.
underscoreModelInfo :: (Generic a, GToRow (Rep a), GFromRow (Rep a)
, GPrimaryKey0 (Rep a), GColumns (Rep a)
, GDatatypeName (Rep a)) =>
S.ByteString -> ModelInfo a
underscoreModelInfo prefix = def {
modelTable = toUnderscore True $ modelTable def
, modelColumns = map fixCol $ modelColumns def
}
where def = defaultModelInfo
plen = S.length prefix
fixCol c = toUnderscore False $ stripped
where stripped | prefix `S.isPrefixOf` c = S.drop plen c
| otherwise = c
-- | Convert a name from camel-case to underscore notation.
-- I.e., names of the form "MSizeForm" are changed to "MSize_From".
-- @skipFirst@ determines if the first character should be ignored
-- in the conversion.
toUnderscore :: Bool -> S.ByteString -> S.ByteString
toUnderscore skipFirst | skipFirst = S8.pack . skip . S8.unpack
| otherwise = S8.pack . go True . S8.unpack
where skip "" = ""
skip (h:t) = toLower h : go True t
go _ "" = ""
go _ (h:t) | not (isUpper h) = h : go False t
go True (h:t) = toLower h : go True t
go False (h:t) = '_' : toLower h : go True t
-- | SQL table and column identifiers that should be copied verbatim
-- into queries. For normal models, these will simply be quoted
-- versions of the fields in the corresponding 'ModelInfo'. However,
-- for special cases, the fields of this structure can contain
-- unquoted SQL including @JOIN@ keywords. In the case of joins,
-- different elements of 'modelQColumns' may be qualified by different
-- table names.
--
-- Note that 'modelQColumns' and 'modelQPrimaryColumn' both contain
-- table-qualified names (e.g., @\"\\\"my_type\\\".\\\"key\\\"\"@),
-- while 'modelQWriteColumns' contains only the quoted column names.
data ModelIdentifiers a = ModelIdentifiers {
modelQTable :: !S.ByteString
-- ^ Literal SQL for the name of the table.
, modelQColumns :: ![S.ByteString]
-- ^ Literal SQL for each, table-qualified column.
, modelQPrimaryColumn :: S.ByteString
-- ^ Literal SQL for the model's table-qualified primary key
-- column.
, modelQWriteColumns :: [S.ByteString]
-- ^ Literal SQL for all the columns except the primary key.
-- These are the columns that should be included in an @INSERT@ or
-- @UPDATE@. Note that unlike the other fields, these column
-- names should /not/ be table-qualified.
, modelQualifier :: !(Maybe S.ByteString)
-- ^ When all columns in 'modelQColumns' are qualified by the same
-- table name, this field contains 'Just' the table name.
-- For the ':.' type (in which different columns have different
-- table qualifications), this field is 'Nothing'.
--
-- For normal models, this field will be identical to
-- 'modelQTable'. However, for 'As' models, 'modelQTable' will
-- contain unquoted SQL such as @\"\\\"MyType\\\" AS
-- \\\"my_alias\\\"\"@, in which case @modelQualifier@ will
-- contain @'Just' \"\\\"my_alias\\\"\"@.
, modelOrigTable :: !(Maybe S.ByteString)
-- ^ The original, unquoted name of the table representing the
-- model in the database. Ordinarily, this should be the same as
-- 'modelTable' in 'ModelInfo', but in the case of 'As' aliases,
-- the 'modelTable' is an alias, and 'modelOrigTable' is the
-- original table. 'Nothing' for joins.
} deriving (Show)
-- | The default simply quotes the 'modelInfo' and 'modelColumns'
-- fields of 'ModelInfo' using 'quoteIdent'.
defaultModelIdentifiers :: ModelInfo a -> ModelIdentifiers a
defaultModelIdentifiers mi = ModelIdentifiers {
modelQTable = qtable
, modelQColumns = qcols
, modelQPrimaryColumn = qcols !! pki
, modelQWriteColumns = deleteAt pki $ map quoteIdent $ modelColumns mi
, modelQualifier = Just qtable
, modelOrigTable = Just $ modelTable mi
}
where qtable = quoteIdent (modelTable mi)
qcol c = S.concat [qtable, ".", quoteIdent c]
qcols = map qcol $ modelColumns mi
pki = modelPrimaryColumn mi
-- | Standard CRUD (create\/read\/update\/delete) queries on a model.
data ModelQueries a = ModelQueries {
modelLookupQuery :: !Query
-- ^ A query template for looking up a model by its primary key.
-- Expects a single query parameter, namely the 'DBKey' or 'DBRef'
-- being looked up.
, modelUpdateQuery :: !Query
-- ^ A query template for updating an existing 'Model' in the
-- database. Expects as query parameters a value for every column
-- of the model /except/ the primary key, followed by the primary
-- key. (The primary key is not written to the database, just
-- used to select the row to change.)
, modelInsertQuery :: !Query
-- ^ A query template for inserting a new 'Model' in the database.
-- The query parameters are values for all columns /except/ the
-- primary key. The query returns the full row as stored in the
-- database (including the values of fields, such as the primary
-- key, that have been chosen by the database server).
, modelDeleteQuery :: !Query
-- ^ A query template for deleting a 'Model' from the database.
-- Should have a single query parameter, namely the 'DBKey' of the
-- row to delete.
} deriving (Show)
-- | Default SQL lookup query for a model.
defaultModelLookupQuery :: ModelIdentifiers a -> Query
defaultModelLookupQuery mi = Query $ S.concat [
modelSelectFragment mi, " WHERE ", modelQPrimaryColumn mi, " = ?"
]
-- | Default SQL update query for a model.
defaultModelUpdateQuery :: ModelIdentifiers a -> Query
defaultModelUpdateQuery mi = Query $ S.concat [
"UPDATE ", modelQTable mi, " SET "
, S.intercalate ", " $ map (<> " = ?") $ modelQWriteColumns mi
, " WHERE ", modelQPrimaryColumn mi, " = ?"
, " RETURNING ", S.intercalate ", " (modelQColumns mi)
]
-- | Default SQL insert query for a model.
defaultModelInsertQuery :: ModelIdentifiers a -> Query
defaultModelInsertQuery mi
| null (modelQWriteColumns mi) = Query $ S.concat [
"INSERT INTO ", modelQTable mi, " DEFAULT VALUES RETURNING "
, S.intercalate ", " $ modelQColumns mi ]
| otherwise = Query $ S.concat $ [
"INSERT INTO ", modelQTable mi
, " (", S.intercalate ", " $ modelQWriteColumns mi
, ") VALUES (", S.intercalate ", " $ map (const "?") $ modelQWriteColumns mi
, ") RETURNING ", S.intercalate ", " $ modelQColumns mi
]
-- | Default SQL delete query for a model.
defaultModelDeleteQuery :: ModelIdentifiers a -> Query
defaultModelDeleteQuery mi = Query $ S.concat [
"DELETE FROM ", modelQTable mi
, " WHERE ", modelQPrimaryColumn mi, " = ?"
]
-- | The default value of 'modelQueries'.
defaultModelQueries :: ModelIdentifiers a -> ModelQueries a
defaultModelQueries mi = ModelQueries {
modelLookupQuery = defaultModelLookupQuery mi
, modelUpdateQuery = defaultModelUpdateQuery mi
, modelInsertQuery = defaultModelInsertQuery mi
, modelDeleteQuery = defaultModelDeleteQuery mi
}
-- | Extra information for "Database.PostgreSQL.ORM.CreateTable". You
-- probably don't need to use this.
data ModelCreateInfo a = ModelCreateInfo {
modelCreateColumnTypeExceptions :: ![(S.ByteString, S.ByteString)]
-- ^ A list of (column-name, type) pairs for which you want to
-- override the default.
, modelCreateExtraConstraints :: !S.ByteString
-- ^ Extra constraints to stick at the end of the @CREATE TABLE@
-- statement.
} deriving (Show)
-- | A 'ModelCreateInfo' that doesn't imply any extra constraints or
-- exceptions.
emptyModelCreateInfo :: ModelCreateInfo a
emptyModelCreateInfo = ModelCreateInfo {
modelCreateColumnTypeExceptions = []
, modelCreateExtraConstraints = S.empty
}
-- | The class of data types that represent a database table. This
-- class conveys information necessary to move a Haskell data
-- structure in and out of a database table. The most important field
-- is 'modelInfo', which describes the database table and column
-- names. 'modelInfo' has a reasonable default implementation for
-- types that are members of the 'Generic' class (using GHC's
-- @DeriveGeneric@ extension), provided the following conditions hold:
--
-- 1. The data type must have a single constructor that is defined
-- using record selector syntax.
--
-- 2. The very first field of the data type must be a 'DBKey' to
-- represent the primary key. Other orders will cause a
-- compilation error.
--
-- 3. Every field of the data structure must be an instance of
-- 'FromField' and 'ToField'.
--
-- If these three conditions hold and your database naming scheme
-- follows the conventions of 'defaultModelInfo'--namely that the
-- table name is the same as the type name with the first character
-- downcased, and the field names are the same as the column
-- names--then it is reasonable to have a completely empty (default)
-- instance declaration:
--
-- > data MyType = MyType { myKey :: !DBKey
-- > , myName :: !S.ByteString
-- > , myCamelCase :: !Int
-- > , ...
-- > } deriving (Show, Generic)
-- > instance Model MyType
--
-- The default 'modelInfo' method is called 'defaultModelInfo'. You
-- may wish to use almost all of the defaults, but tweak a few things.
-- This is easily accomplished by overriding a few fields of the
-- default structure. For example, suppose your database columns use
-- exactly the same name as your Haskell field names, but the name of
-- your database table is not the same as the name of the Haskell data
-- type. You can override the database table name (field
-- 'modelTable') as follows:
--
-- > instance Model MyType where
-- > modelInfo = defaultModelInfo { modelTable = "my_type" }
--
-- Finally, if you dislike the conventions followed by
-- 'defaultModelInfo', you can simply implement an alternate pattern.
-- An example of this is 'underscoreModelInfo', which strips a prefix
-- off every field name and converts everything from camel-case to
-- underscore notation:
--
-- > instance Model MyType where
-- > modelInfo = underscoreModelInfo "my"
--
-- The above code will associate @MyType@ with a database table
-- @my_type@ having column names @key@, @name@, @camel_case@, etc.
--
-- You can implement other patterns like 'underscoreModelInfo' by
-- calling 'defaultModelInfo' and modifying the results.
-- Alternatively, you can directly call the lower-level functions from
-- which 'defaultModelInfo' is built ('defaultModelTable',
-- 'defaultModelColumns', 'defaultModelGetPrimaryKey').
class Model a where
-- | @modelInfo@ provides information about how the Haskell data
-- type is stored in the database, in the form of a 'ModelInfo' data
-- structure. Among other things, this structure specifies the name
-- of the database table, the names of the database columns
-- corresponding to the Haskell data structure fields, and the
-- position of the primary key in both the database columns and the
-- Haskell data structure.
modelInfo :: ModelInfo a
default modelInfo :: (Generic a, GDatatypeName (Rep a), GColumns (Rep a)
, GPrimaryKey0 (Rep a)) => ModelInfo a
{-# INLINE modelInfo #-}
modelInfo = defaultModelInfo
-- | 'modelIdentifiers' contains the table and column names verbatim
-- as they should be inserted into SQL queries. For normal models,
-- these are simply double-quoted (with 'quoteIdent') versions of
-- the names in 'modelInfo', with the column names qualified by the
-- double-quoted table name. However, for special cases such as
-- join relations (with ':.') or row aliases (with 'As'),
-- 'modelIdentifiers' can modify the table name with unquoted SQL
-- identifiers (such as @JOIN@ and @AS@) and change the qualified
-- column names appropriately.
modelIdentifiers :: ModelIdentifiers a
{-# INLINE modelIdentifiers #-}
modelIdentifiers = defaultModelIdentifiers modelInfo
-- | @modelRead@ converts from a database 'query' result to the
-- Haskell data type of the @Model@, namely @a@. Note that if type
-- @a@ is an instance of 'FromRow', a fine definition of @modelRead@
-- is @modelRead = fromRow@. The default is to construct a row
-- parser using the 'Generic' class. However, it is crucial that
-- the columns be parsed in the same order they are listed in the
-- 'modelColumns' field of @a@'s 'ModelInfo' structure, and this
-- should generally be the same order they are defined in the
-- Haskell data structure. Hence @modelRead@ should generally look
-- like:
--
-- @
-- -- Call 'field' as many times as there are fields in your type
-- modelRead = Constructor \<$> 'field' \<*> 'field' \<*> 'field'
-- @
modelRead :: RowParser a
default modelRead :: (Generic a, GFromRow (Rep a)) => RowParser a
{-# INLINE modelRead #-}
modelRead = defaultFromRow
-- | Marshal all fields of @a@ /except/ the primary key. As with
-- 'modelRead', the fields must be marshalled in the same order the
-- corresponding columns are listed in 'modelColumns', only with the
-- primary key (generally column 0) deleted.
--
-- Do /not/ define this as 'toRow', even if @a@ is an instance of
-- 'ToRow', because 'toRow' would include the primary key.
-- Similarly, do /not/ define this as 'defaultToRow'. On the other
-- hand, it is reasonable for @modelWrite@ to return an error for
-- degenerate models (such as joins) that should never be 'save'd.
modelWrite :: a -> [Action]
default modelWrite :: (Generic a, GToRow (Rep a)) => a -> [Action]
{-# INLINE modelWrite #-}
modelWrite = defaultModelWrite
-- | @modelQueries@ provides pre-formatted 'Query' templates for
-- 'findRow', 'save', and 'destroy'. The default 'modelQueries'
-- value is generated from 'modelIdentifiers' and should not be
-- modified. However, for degenerate tables (such as joins created
-- with ':.'), it is reasonable to make 'modelQueries' always throw
-- an exception, thereby disallowing ordinary queries and requiring
-- use of more general query functions.
--
-- This method should either throw an exception or use the default
-- implementation.
modelQueries :: ModelQueries a
{-# INLINE modelQueries #-}
modelQueries = defaultModelQueries modelIdentifiers
-- | Extra constraints, if any, to place in a @CREATE TABLE@
-- statement. Only used by "Database.PostgreSQL.ORM.CreateTable".
modelCreateInfo :: ModelCreateInfo a
modelCreateInfo = emptyModelCreateInfo
-- | Perform a validation of the model, returning any errors if
-- it is invalid.
modelValid :: a -> ValidationError
modelValid = const mempty
-- | Degenerate instances of 'Model' for types in the 'ToRow' class
-- are to enable extra 'ToRow' types to be included with ':.' in the
-- result of 'dbSelect' queries.
degen_err :: a
degen_err = error "Attempt to use degenerate ToRow instance as Model"
#define DEGENERATE(ctx,t) \
instance ctx => Model t where { \
modelInfo = degen_err; \
modelIdentifiers = degen_err; \
modelRead = fromRow; \
modelWrite _ = degen_err; \
modelCreateInfo = degen_err; }
DEGENERATE(FromField t, (Only t))
DEGENERATE(FromField t, [t])
DEGENERATE((FromField a, FromField b), (a, b))
DEGENERATE((FromField a, FromField b, FromField c), (a, b, c))
DEGENERATE((FromField a, FromField b, FromField c, FromField d), (a, b, c, d))
DEGENERATE((FromField a, FromField b, FromField c, FromField d, FromField e), \
(a, b, c, d, e))
#undef DEGEN_ERR
#undef DEGENERATE
-- | A degenerate model that lifts any model to a Maybe version. Returns
-- 'Nothing' on a parse failure. Useful, for example, for performing outer
-- joins:
-- @
-- dbJoin modelDBSelect "LEFT OUTER JOIN"
-- (addWhere 'foo = 123' $ modelDBSelect)
-- "USING a.id = b.a_id" :: (A :. Maybe B)
-- @
--
instance forall a. Model a => Model (Maybe a) where
modelInfo = mi_a { modelGetPrimaryKey = getPrimaryKey }
where mi_a = modelInfo :: ModelInfo a
getPrimaryKey Nothing = NullKey
getPrimaryKey (Just a) = modelGetPrimaryKey mi_a a
modelIdentifiers = mi_a { modelQTable = modelQTable mi_a }
where mi_a = modelIdentifiers :: ModelIdentifiers a
modelQueries = mi_a { modelLookupQuery = modelLookupQuery mi_a }
where mi_a = modelQueries :: ModelQueries a
modelCreateInfo = error
"Attempt to use degenerate Maybe (Model a) instance for ModelCreateInfo"
modelValid = maybe mempty modelValid
modelWrite = maybe [] modelWrite
modelRead =
Just `fmap` (modelRead :: RowParser a)
<|> do
let n = length $ modelColumns (modelInfo :: ModelInfo a)
replicateM_ n (field :: RowParser AnyField)
return Nothing
-- | AnyField parses (simply by consuming) any SQL column.
data AnyField = AnyField
instance FromField AnyField where
fromField _ _ = pure AnyField
joinModelIdentifiers :: forall a b. (Model a, Model b)
=> ModelIdentifiers (a :. b)
joinModelIdentifiers = r
where r = ModelIdentifiers {
modelQTable = qtable
, modelQColumns = modelQColumns mia ++ modelQColumns mib
, modelQWriteColumns = error "attempt to write join relation"
, modelQPrimaryColumn =
error "attempt to use primary key of join relation"
, modelQualifier = Nothing
, modelOrigTable = Nothing
}
qtable | S.null $ modelQTable mib = modelQTable mia
| S.null $ modelQTable mia = modelQTable mib
| otherwise = S.concat [modelQTable mia, " CROSS JOIN "
, modelQTable mib]
mia = modelIdentifiers :: ModelIdentifiers a
mib = modelIdentifiers :: ModelIdentifiers b
-- | A degenerate instance of model representing a database join. The
-- ':.' instance does not allow normal model operations such as
-- 'findRow', 'save', and 'destroy'. Attempts to use such functions
-- will result in an exception.
instance (Model a, Model b) => Model (a :. b) where
modelInfo = error "attempt to access ModelInfo of join type :."
modelIdentifiers = joinModelIdentifiers
modelRead = (:.) <$> modelRead <*> modelRead
modelWrite _ = error "attempt to write join type :. as a normal Model"
modelQueries = error "attempt to perform standard query on join type :."
class GUnitType f where
gUnitTypeName :: f p -> String
instance GUnitType (C1 c U1) where
gUnitTypeName _ = error "gUnitTypeName"
instance GUnitType V1 where
gUnitTypeName _ = error "gUnitTypeName"
instance (Datatype c, GUnitType f) => GUnitType (D1 c f) where
gUnitTypeName = datatypeName
-- | The class of types that can be used as tags in as 'As' alias.
-- Such types should be unit types--in other words, have exactly one
-- constructor where the constructor is nullary (take no arguments).
-- The reason for this class is that the 'Model' instance for 'As'
-- requires a way to extract the name of the row alias without having
-- a concrete instance of the type. This is provided by the
-- 'rowAliasName' method (which must be non-strict).
class RowAlias a where
rowAliasName :: g a row -> S.ByteString
-- ^ Return the SQL identifier for the row alias. This method must
-- be non-strict in its argument. Hence, it should discard the
-- argument and return the name of the alias. For example:
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- >
-- > data My_alias = My_alias
-- > instance RowAlias My_alias where rowAliasName _ = "my_alias"
--
-- Keep in mind that PostgreSQL folds unquoted identifiers to
-- lower-case. However, this library quotes row aliases in @SELECT@
-- statements, thereby preserving case. Hence, if you want to call
-- construct a @WHERE@ clause without double-quoting row aliases in
-- your 'Query', you should avoid capital letters in alias names.
--
-- A default implementation of @rowAliasName@ exists for unit types
-- (as well as empty data declarations) in the 'Generic' class. The
-- default converts the first character of the type name to
-- lower-case, following the logic of 'defaultModelTable'.
default rowAliasName :: (Generic a, GUnitType (Rep a)) =>
g a row -> S.ByteString
rowAliasName _ = fromString $ caseFold $ gUnitTypeName . from $ a
where caseFold (h:t) = toLower h:t -- fold first character only
caseFold [] = []
a = undefined :: a
-- | The newtype @As@ can be wrapped around an existing type to give
-- it a table name alias in a query. This is necessary when a model
-- is being joined with itself, to distinguish the two joined
-- instances of the same table.
--
-- For example:
--
-- @{-\# LANGUAGE OverloadedStrings #-}
--
--data X = X
--instance 'RowAlias' X where rowAliasName = const \"x\"
--
-- \ ...
-- r <- 'dbSelect' c $ addWhere_ \"bar.bar_key = x.bar_parent\" modelDBSelect
-- :: IO [Bar :. As X Bar]
-- @
newtype As alias row = As { unAs :: row }
instance (RowAlias alias, Show row) => Show (As alias row) where
showsPrec d as@(As row) = showParen (d > 10) $ \rest ->
"As " ++ S8.unpack (rowAliasName as) ++
" (" ++ showsPrec 11 row (")" ++ rest)
-- | @fromAs@ extracts the @row@ from an @'As' alias row@, but
-- constrains the type of @alias@ to be the same as its first argument
-- (which is non-strict). This can save you from explicitly
-- specifying types. For example:
--
-- > data X = X deriving (Generic)
-- > instance RowAlias X where rowAliasName = const "x"
-- >
-- > ...
-- > r <- map (\(b1 :. b2) -> (b1, fromAs X b2)) <$>
-- > dbSelect c $ addWhere \"bar.bar_key = x.bar_parent\" modelDBSelect
fromAs :: alias -> As alias row -> row
{-# INLINE fromAs #-}
fromAs _ (As row) = row
-- | A type-restricted wrapper around the 'As' constructor, under the
-- same rationale as 'fromAs'. Not strict in its first argument.
toAs :: alias -> row -> As alias row
{-# INLINE toAs #-}
toAs _ = As
aliasModelInfo :: forall a alias.
(Model a, RowAlias alias) =>
ModelInfo a -> ModelInfo (As alias a)
aliasModelInfo mi = r
where alias = rowAliasName (undefined :: As alias a)
r = mi { modelTable = alias
, modelGetPrimaryKey = modelGetPrimaryKey mi . unAs
}
aliasModelIdentifiers :: forall a alias. (Model a, RowAlias alias)
=> ModelInfo a -> ModelIdentifiers (As alias a)
aliasModelIdentifiers mi
| not ok = error $ "aliasModelIdentifiers: degenerate model " ++
show (modelQTable ida )
| otherwise = r
where r = ModelIdentifiers {
modelQTable = S.concat [quoteIdent orig, " AS ", alias]
, modelQColumns = qcols
, modelQPrimaryColumn = qcols !! pki
, modelQWriteColumns = deleteAt pki qcols
, modelQualifier = Just alias
, modelOrigTable = Just orig
}
ida = modelIdentifiers :: ModelIdentifiers a
ok = Just (modelQTable ida) == modelQualifier ida
&& isJust (modelOrigTable ida)
Just orig = modelOrigTable ida
alias = quoteIdent $ rowAliasName (undefined :: As alias a)
qcol c = S.concat [alias, ".", quoteIdent c]
qcols = map qcol $ modelColumns mi
pki = modelPrimaryColumn mi
-- | A degenerate instance of 'Model' that re-names the row with a SQL
-- @AS@ keyword. This is primarily useful when joining a model with
-- itself. Hence, standard operations ('findRow', 'save', 'destroy')
-- are not allowed on 'As' models.
instance (Model a, RowAlias as) => Model (As as a) where
{-# INLINE modelInfo #-}
modelInfo = aliasModelInfo modelInfo
{-# INLINE modelRead #-}
modelRead = As <$> modelRead
modelWrite = error "attempt to write \"As\" alias as normal Model"
{-# INLINE modelIdentifiers #-}
modelIdentifiers = aliasModelIdentifiers modelInfo
modelQueries = error "attempt to perform standard query on AS table alias"
-- | Lookup the 'modelTable' of a 'Model' (@modelName _ = 'modelTable'
-- ('modelInfo' :: 'ModelInfo' a)@).
modelName :: forall a. (Model a) => a -> S.ByteString
{-# INLINE modelName #-}
modelName _ = modelTable (modelInfo :: ModelInfo a)
-- | Lookup the primary key of a 'Model'.
primaryKey :: (Model a) => a -> DBKey
{-# INLINE primaryKey #-}
primaryKey a = modelGetPrimaryKey modelInfo a
-- | Generate a SQL @SELECT@ statement with no @WHERE@ predicate. For
-- example, 'defaultModelLookupQuery' consists of
-- @modelSelectFragment@ followed by \"@WHERE@ /primary-key/ = ?\".
modelSelectFragment :: ModelIdentifiers a -> S.ByteString
modelSelectFragment mi = S.concat [
"SELECT ", S.intercalate ", " $ modelQColumns mi, " FROM ", modelQTable mi ]
-- | A newtype wrapper in the 'FromRow' class, permitting every model
-- to be used as the result of a database query.
newtype LookupRow a = LookupRow { lookupRow :: a } deriving (Show)
instance (Model a) => FromRow (LookupRow a) where
fromRow = LookupRow <$> modelRead
-- | A newtype wrapper in the 'ToRow' class, which marshalls every
-- field except the primary key. For use with 'modelInsertQuery'.
newtype InsertRow a = InsertRow a deriving (Show)
instance (Model a) => ToRow (InsertRow a) where
toRow (InsertRow a) = modelWrite a
-- | A newtype wrapper in the 'ToRow' class, which marshalls every
-- field except the primary key, followed by the primary key. For use
-- with 'modelUpdateQuery'.
newtype UpdateRow a = UpdateRow a deriving (Show)
instance (Model a) => ToRow (UpdateRow a) where
toRow (UpdateRow a) = toRow $ InsertRow a :. Only (primaryKey a)
-- | Dump an entire model. Useful for development and debugging only,
-- as every row will be read into memory before the function returns.
--
-- Note that unlike the other primary model operations, it is OK to
-- call 'findAll' even on degenerate models such as 'As' and ':.'.
findAll :: forall r. (Model r) => Connection -> IO [r]
findAll c = action
where mi = modelIdentifiers :: ModelIdentifiers r
q = Query $ modelSelectFragment mi
action = map lookupRow <$> query_ c q
-- | Follow a 'DBRef' or 'DBRefUnique' and fetch the target row from
-- the database into a 'Model' type @r@.
findRow :: forall r rt. (Model r) => Connection -> GDBRef rt r -> IO (Maybe r)
findRow c k = action
where qs = modelQueries :: ModelQueries r
action = do rs <- query c (modelLookupQuery qs) (Only k)
case rs of [r] -> return $ Just $ lookupRow $ r
_ -> return Nothing
-- | Like 'trySave' but instead of returning an 'Either', throws a
-- 'ValidationError' if the 'Model' is invalid.
save :: (Model r)
=> Connection -> r -> IO r
save c r = do
eResp <- trySave c r
case eResp of
Right resp -> return resp
Left errs -> throwIO errs
-- | 'save' but returning '()' instead of the saved model.
save_ :: (Model r)
=> Connection -> r -> IO ()
save_ c r = void $ save c r
-- | Write a 'Model' to the database. If the primary key is
-- 'NullKey', the item is written with an @INSERT@ query, read back
-- from the database, and returned with its primary key filled in. If
-- the primary key is not 'NullKey', then the 'Model' is written with
-- an @UPDATE@ query and returned as-is.
--
-- If the 'Model' is invalid (i.e. the return value of 'modelValid' is
-- non-empty), a list of 'InvalidError' is returned instead.
trySave :: forall r. Model r
=> Connection -> r -> IO (Either ValidationError r)
trySave c r | not . H.null $ validationErrors errors = return $ Left errors
| NullKey <- primaryKey r = do
rs <- query c (modelInsertQuery qs) (InsertRow r)
case rs of [r'] -> return $ Right $ lookupRow r'
_ -> fail "save: database did not return row"
| otherwise = do
rows <- query c (modelUpdateQuery qs) (UpdateRow r)
case rows of [r'] -> return $ Right $ lookupRow r'
_ -> fail $ "save: database updated "
++ show (length rows)
++ " records"
where qs = modelQueries :: ModelQueries r
errors = modelValid r
-- | Remove the row corresponding to a particular data structure from
-- the database. This function only looks at the primary key in the
-- data structure. It is an error to call this function if the
-- primary key is not set.
destroy :: forall a. (Model a)
=> Connection -> a -> IO (Either ValidationError Bool)
destroy c a =
case primaryKey a of
NullKey -> fail "destroy: NullKey"
DBKey k -> destroyByRef_ "destroy" c (DBRef k :: DBRef a)
-- | Remove a row from the database without fetching it first.
destroyByRef :: forall a rt. (Model a)
=> Connection -> GDBRef rt a -> IO (Either ValidationError Bool)
destroyByRef = destroyByRef_ "destroyByRef"
destroyByRef_ :: forall a rt. (Model a)
=> T.Text -> Connection -> GDBRef rt a -> IO (Either ValidationError Bool)
destroyByRef_ msg c a = action
where mq = modelQueries :: ModelQueries a
mi = modelIdentifiers :: ModelIdentifiers a
pkCol = modelQPrimaryColumn mi
action = do
n <- execute c (modelDeleteQuery mq) (Only a)
return $ case n of
0 -> Right False
1 -> Right True
_ -> Left $ validationError (T.decodeUtf8 pkCol) $
msg <> ": DELETE modified " <> T.pack (show n) <>
" rows. This may indicate that your primary key" <>
" accessor field is not actually a primary key."
-- | Print to stdout the query statement.
printq :: Query -> IO ()
printq (Query bs) = S8.putStrLn bs
| alevy/postgresql-orm | src/Database/PostgreSQL/ORM/Model.hs | gpl-3.0 | 50,385 | 1 | 22 | 11,080 | 7,654 | 4,268 | 3,386 | -1 | -1 |
-- |
-- Module : Commands.RpmBuild
-- Copyright : (C) 2007-2008 Bryan O'Sullivan
-- (C) 2012-2015 Jens Petersen
--
-- Maintainer : Jens Petersen <petersen@fedoraproject.org>
-- Stability : alpha
-- Portability : portable
--
-- Explanation: Support for building RPM packages. Can also generate
-- an RPM spec file if you need a basic one to hand-customize.
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
module Commands.RpmBuild (
rpmBuild, rpmBuild_
) where
import Commands.Spec (createSpecFile)
import Dependencies (missingPackages)
import PackageUtils (copyTarball, isScmDir, PackageData (..), packageName,
packageVersion, rpmbuild, RpmStage (..))
import Setup (RpmFlags (..))
import SysCmd (cmd, pkgInstall, (+-+))
--import Control.Exception (bracket)
import Control.Monad (unless, void, when)
import Distribution.PackageDescription (PackageDescription (..))
--import Distribution.Version (VersionRange, foldVersionRange')
import System.Directory (copyFile, doesFileExist)
import System.FilePath (takeDirectory, (</>))
-- autoreconf :: Verbosity -> PackageDescription -> IO ()
-- autoreconf verbose pkgDesc = do
-- ac <- doesFileExist "configure.ac"
-- when ac $ do
-- c <- doesFileExist "configure"
-- when (not c) $ do
-- setupMessage verbose "Running autoreconf" pkgDesc
-- cmd_ "autoreconf" []
rpmBuild :: PackageData -> RpmFlags -> RpmStage ->
IO FilePath
rpmBuild pkgdata flags stage = do
-- let verbose = rpmVerbosity flags
-- bracket (setFileCreationMask 0o022) setFileCreationMask $ \ _ -> do
-- autoreconf verbose pkgDesc
let pkgDesc = packageDesc pkgdata
mspec = specFilename pkgdata
cabalPath = cabalFilename pkgdata
specFile <- maybe (createSpecFile pkgdata flags Nothing)
(\ s -> putStrLn ("Using existing" +-+ s) >> return s)
mspec
let pkg = package pkgDesc
name = packageName pkg
when (stage `elem` [Binary,BuildDep]) $ do
missing <- missingPackages pkgDesc
pkgInstall missing (stage == Binary)
unless (stage == BuildDep) $ do
srcdir <- cmd "rpm" ["--eval", "%{_sourcedir}"]
let version = packageVersion pkg
tarFile = srcdir </> name ++ "-" ++ version ++ ".tar.gz"
tarFileExists <- doesFileExist tarFile
unless tarFileExists $ do
scmRepo <- isScmDir $ takeDirectory cabalPath
when scmRepo $
error "No tarball for source repo"
copyTarball name version False srcdir
let revision = maybe (0::Int) read (lookup "x-revision" (customFieldsPD pkgDesc))
cabalFile = srcdir </> show revision ++ ".cabal"
cabalFileExists <- doesFileExist cabalFile
unless cabalFileExists $
copyFile cabalPath cabalFile
rpmbuild stage False Nothing specFile
return specFile
rpmBuild_ :: PackageData -> RpmFlags -> RpmStage -> IO ()
rpmBuild_ pkgdata flags stage =
void (rpmBuild pkgdata flags stage)
| opensuse-haskell/cabal-rpm | src/Commands/RpmBuild.hs | gpl-3.0 | 3,188 | 0 | 17 | 700 | 628 | 337 | 291 | 46 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Ml.Projects.Jobs.TestIAMPermissions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a \`NOT_FOUND\` error. Note: This operation is designed to be used
-- for building permission-aware UIs and command-line tools, not for
-- authorization checking. This operation may \"fail open\" without
-- warning.
--
-- /See:/ <https://cloud.google.com/ml/ AI Platform Training & Prediction API Reference> for @ml.projects.jobs.testIamPermissions@.
module Network.Google.Resource.Ml.Projects.Jobs.TestIAMPermissions
(
-- * REST Resource
ProjectsJobsTestIAMPermissionsResource
-- * Creating a Request
, projectsJobsTestIAMPermissions
, ProjectsJobsTestIAMPermissions
-- * Request Lenses
, pjtipXgafv
, pjtipUploadProtocol
, pjtipAccessToken
, pjtipUploadType
, pjtipPayload
, pjtipResource
, pjtipCallback
) where
import Network.Google.MachineLearning.Types
import Network.Google.Prelude
-- | A resource alias for @ml.projects.jobs.testIamPermissions@ method which the
-- 'ProjectsJobsTestIAMPermissions' request conforms to.
type ProjectsJobsTestIAMPermissionsResource =
"v1" :>
CaptureMode "resource" "testIamPermissions" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GoogleIAMV1__TestIAMPermissionsRequest
:>
Post '[JSON] GoogleIAMV1__TestIAMPermissionsResponse
-- | Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a \`NOT_FOUND\` error. Note: This operation is designed to be used
-- for building permission-aware UIs and command-line tools, not for
-- authorization checking. This operation may \"fail open\" without
-- warning.
--
-- /See:/ 'projectsJobsTestIAMPermissions' smart constructor.
data ProjectsJobsTestIAMPermissions =
ProjectsJobsTestIAMPermissions'
{ _pjtipXgafv :: !(Maybe Xgafv)
, _pjtipUploadProtocol :: !(Maybe Text)
, _pjtipAccessToken :: !(Maybe Text)
, _pjtipUploadType :: !(Maybe Text)
, _pjtipPayload :: !GoogleIAMV1__TestIAMPermissionsRequest
, _pjtipResource :: !Text
, _pjtipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsJobsTestIAMPermissions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pjtipXgafv'
--
-- * 'pjtipUploadProtocol'
--
-- * 'pjtipAccessToken'
--
-- * 'pjtipUploadType'
--
-- * 'pjtipPayload'
--
-- * 'pjtipResource'
--
-- * 'pjtipCallback'
projectsJobsTestIAMPermissions
:: GoogleIAMV1__TestIAMPermissionsRequest -- ^ 'pjtipPayload'
-> Text -- ^ 'pjtipResource'
-> ProjectsJobsTestIAMPermissions
projectsJobsTestIAMPermissions pPjtipPayload_ pPjtipResource_ =
ProjectsJobsTestIAMPermissions'
{ _pjtipXgafv = Nothing
, _pjtipUploadProtocol = Nothing
, _pjtipAccessToken = Nothing
, _pjtipUploadType = Nothing
, _pjtipPayload = pPjtipPayload_
, _pjtipResource = pPjtipResource_
, _pjtipCallback = Nothing
}
-- | V1 error format.
pjtipXgafv :: Lens' ProjectsJobsTestIAMPermissions (Maybe Xgafv)
pjtipXgafv
= lens _pjtipXgafv (\ s a -> s{_pjtipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pjtipUploadProtocol :: Lens' ProjectsJobsTestIAMPermissions (Maybe Text)
pjtipUploadProtocol
= lens _pjtipUploadProtocol
(\ s a -> s{_pjtipUploadProtocol = a})
-- | OAuth access token.
pjtipAccessToken :: Lens' ProjectsJobsTestIAMPermissions (Maybe Text)
pjtipAccessToken
= lens _pjtipAccessToken
(\ s a -> s{_pjtipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pjtipUploadType :: Lens' ProjectsJobsTestIAMPermissions (Maybe Text)
pjtipUploadType
= lens _pjtipUploadType
(\ s a -> s{_pjtipUploadType = a})
-- | Multipart request metadata.
pjtipPayload :: Lens' ProjectsJobsTestIAMPermissions GoogleIAMV1__TestIAMPermissionsRequest
pjtipPayload
= lens _pjtipPayload (\ s a -> s{_pjtipPayload = a})
-- | REQUIRED: The resource for which the policy detail is being requested.
-- See the operation documentation for the appropriate value for this
-- field.
pjtipResource :: Lens' ProjectsJobsTestIAMPermissions Text
pjtipResource
= lens _pjtipResource
(\ s a -> s{_pjtipResource = a})
-- | JSONP
pjtipCallback :: Lens' ProjectsJobsTestIAMPermissions (Maybe Text)
pjtipCallback
= lens _pjtipCallback
(\ s a -> s{_pjtipCallback = a})
instance GoogleRequest ProjectsJobsTestIAMPermissions
where
type Rs ProjectsJobsTestIAMPermissions =
GoogleIAMV1__TestIAMPermissionsResponse
type Scopes ProjectsJobsTestIAMPermissions =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsJobsTestIAMPermissions'{..}
= go _pjtipResource _pjtipXgafv _pjtipUploadProtocol
_pjtipAccessToken
_pjtipUploadType
_pjtipCallback
(Just AltJSON)
_pjtipPayload
machineLearningService
where go
= buildClient
(Proxy ::
Proxy ProjectsJobsTestIAMPermissionsResource)
mempty
| brendanhay/gogol | gogol-ml/gen/Network/Google/Resource/Ml/Projects/Jobs/TestIAMPermissions.hs | mpl-2.0 | 6,462 | 0 | 16 | 1,384 | 789 | 465 | 324 | 120 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.DoubleClickSearch.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.DoubleClickSearch.Types.Product where
import Network.Google.DoubleClickSearch.Types.Sum
import Network.Google.Prelude
-- | A row in a DoubleClick Search report.
--
-- /See:/ 'reportRow' smart constructor.
newtype ReportRow = ReportRow'
{ _rrAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportRow' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrAddtional'
reportRow
:: HashMap Text JSONValue -- ^ 'rrAddtional'
-> ReportRow
reportRow pRrAddtional_ =
ReportRow'
{ _rrAddtional = _Coerce # pRrAddtional_
}
-- | Indicates the columns that are represented in this row. That is, each
-- key corresponds to a column with a non-empty cell in this row.
rrAddtional :: Lens' ReportRow (HashMap Text JSONValue)
rrAddtional
= lens _rrAddtional (\ s a -> s{_rrAddtional = a}) .
_Coerce
instance FromJSON ReportRow where
parseJSON
= withObject "ReportRow"
(\ o -> ReportRow' <$> (parseJSONObject o))
instance ToJSON ReportRow where
toJSON = toJSON . _rrAddtional
-- | A request object used to create a DoubleClick Search report.
--
-- /See:/ 'reportRequest' smart constructor.
data ReportRequest = ReportRequest'
{ _rrMaxRowsPerFile :: !(Maybe (Textual Int32))
, _rrReportScope :: !(Maybe ReportRequestReportScope)
, _rrStatisticsCurrency :: !(Maybe Text)
, _rrTimeRange :: !(Maybe ReportRequestTimeRange)
, _rrOrderBy :: !(Maybe [ReportRequestOrderByItem])
, _rrFilters :: !(Maybe [ReportRequestFiltersItem])
, _rrIncludeRemovedEntities :: !Bool
, _rrIncludeDeletedEntities :: !Bool
, _rrDownloadFormat :: !(Maybe Text)
, _rrStartRow :: !(Textual Int32)
, _rrColumns :: !(Maybe [ReportAPIColumnSpec])
, _rrReportType :: !(Maybe Text)
, _rrVerifySingleTimeZone :: !Bool
, _rrRowCount :: !(Textual Int32)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrMaxRowsPerFile'
--
-- * 'rrReportScope'
--
-- * 'rrStatisticsCurrency'
--
-- * 'rrTimeRange'
--
-- * 'rrOrderBy'
--
-- * 'rrFilters'
--
-- * 'rrIncludeRemovedEntities'
--
-- * 'rrIncludeDeletedEntities'
--
-- * 'rrDownloadFormat'
--
-- * 'rrStartRow'
--
-- * 'rrColumns'
--
-- * 'rrReportType'
--
-- * 'rrVerifySingleTimeZone'
--
-- * 'rrRowCount'
reportRequest
:: ReportRequest
reportRequest =
ReportRequest'
{ _rrMaxRowsPerFile = Nothing
, _rrReportScope = Nothing
, _rrStatisticsCurrency = Nothing
, _rrTimeRange = Nothing
, _rrOrderBy = Nothing
, _rrFilters = Nothing
, _rrIncludeRemovedEntities = False
, _rrIncludeDeletedEntities = False
, _rrDownloadFormat = Nothing
, _rrStartRow = 0
, _rrColumns = Nothing
, _rrReportType = Nothing
, _rrVerifySingleTimeZone = False
, _rrRowCount = 10000
}
-- | Asynchronous report only. The maximum number of rows per report file. A
-- large report is split into many files based on this field. Acceptable
-- values are 1000000 to 100000000, inclusive.
rrMaxRowsPerFile :: Lens' ReportRequest (Maybe Int32)
rrMaxRowsPerFile
= lens _rrMaxRowsPerFile
(\ s a -> s{_rrMaxRowsPerFile = a})
. mapping _Coerce
-- | The reportScope is a set of IDs that are used to determine which subset
-- of entities will be returned in the report. The full lineage of IDs from
-- the lowest scoped level desired up through agency is required.
rrReportScope :: Lens' ReportRequest (Maybe ReportRequestReportScope)
rrReportScope
= lens _rrReportScope
(\ s a -> s{_rrReportScope = a})
-- | Specifies the currency in which monetary will be returned. Possible
-- values are: usd, agency (valid if the report is scoped to agency or
-- lower), advertiser (valid if the report is scoped to * advertiser or
-- lower), or account (valid if the report is scoped to engine account or
-- lower).
rrStatisticsCurrency :: Lens' ReportRequest (Maybe Text)
rrStatisticsCurrency
= lens _rrStatisticsCurrency
(\ s a -> s{_rrStatisticsCurrency = a})
-- | If metrics are requested in a report, this argument will be used to
-- restrict the metrics to a specific time range.
rrTimeRange :: Lens' ReportRequest (Maybe ReportRequestTimeRange)
rrTimeRange
= lens _rrTimeRange (\ s a -> s{_rrTimeRange = a})
-- | Synchronous report only. A list of columns and directions defining
-- sorting to be performed on the report rows.
rrOrderBy :: Lens' ReportRequest [ReportRequestOrderByItem]
rrOrderBy
= lens _rrOrderBy (\ s a -> s{_rrOrderBy = a}) .
_Default
. _Coerce
-- | A list of filters to be applied to the report.
rrFilters :: Lens' ReportRequest [ReportRequestFiltersItem]
rrFilters
= lens _rrFilters (\ s a -> s{_rrFilters = a}) .
_Default
. _Coerce
-- | Determines if removed entities should be included in the report.
-- Defaults to false.
rrIncludeRemovedEntities :: Lens' ReportRequest Bool
rrIncludeRemovedEntities
= lens _rrIncludeRemovedEntities
(\ s a -> s{_rrIncludeRemovedEntities = a})
-- | Determines if removed entities should be included in the report.
-- Defaults to false. Deprecated, please use includeRemovedEntities
-- instead.
rrIncludeDeletedEntities :: Lens' ReportRequest Bool
rrIncludeDeletedEntities
= lens _rrIncludeDeletedEntities
(\ s a -> s{_rrIncludeDeletedEntities = a})
-- | Format that the report should be returned in. Currently csv or tsv is
-- supported.
rrDownloadFormat :: Lens' ReportRequest (Maybe Text)
rrDownloadFormat
= lens _rrDownloadFormat
(\ s a -> s{_rrDownloadFormat = a})
-- | Synchronous report only. Zero-based index of the first row to return.
-- Acceptable values are 0 to 50000, inclusive. Defaults to 0.
rrStartRow :: Lens' ReportRequest Int32
rrStartRow
= lens _rrStartRow (\ s a -> s{_rrStartRow = a}) .
_Coerce
-- | The columns to include in the report. This includes both DoubleClick
-- Search columns and saved columns. For DoubleClick Search columns, only
-- the columnName parameter is required. For saved columns only the
-- savedColumnName parameter is required. Both columnName and
-- savedColumnName cannot be set in the same stanza.
rrColumns :: Lens' ReportRequest [ReportAPIColumnSpec]
rrColumns
= lens _rrColumns (\ s a -> s{_rrColumns = a}) .
_Default
. _Coerce
-- | Determines the type of rows that are returned in the report. For
-- example, if you specify reportType: keyword, each row in the report will
-- contain data about a keyword. See the Types of Reports reference for the
-- columns that are available for each type.
rrReportType :: Lens' ReportRequest (Maybe Text)
rrReportType
= lens _rrReportType (\ s a -> s{_rrReportType = a})
-- | If true, the report would only be created if all the requested stat data
-- are sourced from a single timezone. Defaults to false.
rrVerifySingleTimeZone :: Lens' ReportRequest Bool
rrVerifySingleTimeZone
= lens _rrVerifySingleTimeZone
(\ s a -> s{_rrVerifySingleTimeZone = a})
-- | Synchronous report only. The maxinum number of rows to return;
-- additional rows are dropped. Acceptable values are 0 to 10000,
-- inclusive. Defaults to 10000.
rrRowCount :: Lens' ReportRequest Int32
rrRowCount
= lens _rrRowCount (\ s a -> s{_rrRowCount = a}) .
_Coerce
instance FromJSON ReportRequest where
parseJSON
= withObject "ReportRequest"
(\ o ->
ReportRequest' <$>
(o .:? "maxRowsPerFile") <*> (o .:? "reportScope")
<*> (o .:? "statisticsCurrency")
<*> (o .:? "timeRange")
<*> (o .:? "orderBy" .!= mempty)
<*> (o .:? "filters" .!= mempty)
<*> (o .:? "includeRemovedEntities" .!= False)
<*> (o .:? "includeDeletedEntities" .!= False)
<*> (o .:? "downloadFormat")
<*> (o .:? "startRow" .!= 0)
<*> (o .:? "columns" .!= mempty)
<*> (o .:? "reportType")
<*> (o .:? "verifySingleTimeZone" .!= False)
<*> (o .:? "rowCount" .!= 10000))
instance ToJSON ReportRequest where
toJSON ReportRequest'{..}
= object
(catMaybes
[("maxRowsPerFile" .=) <$> _rrMaxRowsPerFile,
("reportScope" .=) <$> _rrReportScope,
("statisticsCurrency" .=) <$> _rrStatisticsCurrency,
("timeRange" .=) <$> _rrTimeRange,
("orderBy" .=) <$> _rrOrderBy,
("filters" .=) <$> _rrFilters,
Just
("includeRemovedEntities" .=
_rrIncludeRemovedEntities),
Just
("includeDeletedEntities" .=
_rrIncludeDeletedEntities),
("downloadFormat" .=) <$> _rrDownloadFormat,
Just ("startRow" .= _rrStartRow),
("columns" .=) <$> _rrColumns,
("reportType" .=) <$> _rrReportType,
Just
("verifySingleTimeZone" .= _rrVerifySingleTimeZone),
Just ("rowCount" .= _rrRowCount)])
--
-- /See:/ 'reportRequestOrderByItem' smart constructor.
data ReportRequestOrderByItem = ReportRequestOrderByItem'
{ _rrobiSortOrder :: !(Maybe Text)
, _rrobiColumn :: !(Maybe ReportAPIColumnSpec)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportRequestOrderByItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrobiSortOrder'
--
-- * 'rrobiColumn'
reportRequestOrderByItem
:: ReportRequestOrderByItem
reportRequestOrderByItem =
ReportRequestOrderByItem'
{ _rrobiSortOrder = Nothing
, _rrobiColumn = Nothing
}
-- | The sort direction, which is either ascending or descending.
rrobiSortOrder :: Lens' ReportRequestOrderByItem (Maybe Text)
rrobiSortOrder
= lens _rrobiSortOrder
(\ s a -> s{_rrobiSortOrder = a})
-- | Column to perform the sort on. This can be a DoubleClick Search-defined
-- column or a saved column.
rrobiColumn :: Lens' ReportRequestOrderByItem (Maybe ReportAPIColumnSpec)
rrobiColumn
= lens _rrobiColumn (\ s a -> s{_rrobiColumn = a})
instance FromJSON ReportRequestOrderByItem where
parseJSON
= withObject "ReportRequestOrderByItem"
(\ o ->
ReportRequestOrderByItem' <$>
(o .:? "sortOrder") <*> (o .:? "column"))
instance ToJSON ReportRequestOrderByItem where
toJSON ReportRequestOrderByItem'{..}
= object
(catMaybes
[("sortOrder" .=) <$> _rrobiSortOrder,
("column" .=) <$> _rrobiColumn])
-- | A DoubleClick Search report. This object contains the report request,
-- some report metadata such as currency code, and the generated report
-- rows or report files.
--
-- /See:/ 'report' smart constructor.
data Report = Report'
{ _rKind :: !Text
, _rRows :: !(Maybe [ReportRow])
, _rStatisticsCurrencyCode :: !(Maybe Text)
, _rIsReportReady :: !(Maybe Bool)
, _rFiles :: !(Maybe [ReportFilesItem])
, _rId :: !(Maybe Text)
, _rStatisticsTimeZone :: !(Maybe Text)
, _rRowCount :: !(Maybe (Textual Int32))
, _rRequest :: !(Maybe ReportRequest)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Report' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rKind'
--
-- * 'rRows'
--
-- * 'rStatisticsCurrencyCode'
--
-- * 'rIsReportReady'
--
-- * 'rFiles'
--
-- * 'rId'
--
-- * 'rStatisticsTimeZone'
--
-- * 'rRowCount'
--
-- * 'rRequest'
report
:: Report
report =
Report'
{ _rKind = "doubleclicksearch#report"
, _rRows = Nothing
, _rStatisticsCurrencyCode = Nothing
, _rIsReportReady = Nothing
, _rFiles = Nothing
, _rId = Nothing
, _rStatisticsTimeZone = Nothing
, _rRowCount = Nothing
, _rRequest = Nothing
}
-- | Identifies this as a Report resource. Value: the fixed string
-- doubleclicksearch#report.
rKind :: Lens' Report Text
rKind = lens _rKind (\ s a -> s{_rKind = a})
-- | Synchronous report only. Generated report rows.
rRows :: Lens' Report [ReportRow]
rRows
= lens _rRows (\ s a -> s{_rRows = a}) . _Default .
_Coerce
-- | The currency code of all monetary values produced in the report,
-- including values that are set by users (e.g., keyword bid settings) and
-- metrics (e.g., cost and revenue). The currency code of a report is
-- determined by the statisticsCurrency field of the report request.
rStatisticsCurrencyCode :: Lens' Report (Maybe Text)
rStatisticsCurrencyCode
= lens _rStatisticsCurrencyCode
(\ s a -> s{_rStatisticsCurrencyCode = a})
-- | Asynchronous report only. True if and only if the report has completed
-- successfully and the report files are ready to be downloaded.
rIsReportReady :: Lens' Report (Maybe Bool)
rIsReportReady
= lens _rIsReportReady
(\ s a -> s{_rIsReportReady = a})
-- | Asynchronous report only. Contains a list of generated report files once
-- the report has succesfully completed.
rFiles :: Lens' Report [ReportFilesItem]
rFiles
= lens _rFiles (\ s a -> s{_rFiles = a}) . _Default .
_Coerce
-- | Asynchronous report only. Id of the report.
rId :: Lens' Report (Maybe Text)
rId = lens _rId (\ s a -> s{_rId = a})
-- | If all statistics of the report are sourced from the same time zone,
-- this would be it. Otherwise the field is unset.
rStatisticsTimeZone :: Lens' Report (Maybe Text)
rStatisticsTimeZone
= lens _rStatisticsTimeZone
(\ s a -> s{_rStatisticsTimeZone = a})
-- | The number of report rows generated by the report, not including
-- headers.
rRowCount :: Lens' Report (Maybe Int32)
rRowCount
= lens _rRowCount (\ s a -> s{_rRowCount = a}) .
mapping _Coerce
-- | The request that created the report. Optional fields not specified in
-- the original request are filled with default values.
rRequest :: Lens' Report (Maybe ReportRequest)
rRequest = lens _rRequest (\ s a -> s{_rRequest = a})
instance FromJSON Report where
parseJSON
= withObject "Report"
(\ o ->
Report' <$>
(o .:? "kind" .!= "doubleclicksearch#report") <*>
(o .:? "rows" .!= mempty)
<*> (o .:? "statisticsCurrencyCode")
<*> (o .:? "isReportReady")
<*> (o .:? "files" .!= mempty)
<*> (o .:? "id")
<*> (o .:? "statisticsTimeZone")
<*> (o .:? "rowCount")
<*> (o .:? "request"))
instance ToJSON Report where
toJSON Report'{..}
= object
(catMaybes
[Just ("kind" .= _rKind), ("rows" .=) <$> _rRows,
("statisticsCurrencyCode" .=) <$>
_rStatisticsCurrencyCode,
("isReportReady" .=) <$> _rIsReportReady,
("files" .=) <$> _rFiles, ("id" .=) <$> _rId,
("statisticsTimeZone" .=) <$> _rStatisticsTimeZone,
("rowCount" .=) <$> _rRowCount,
("request" .=) <$> _rRequest])
--
-- /See:/ 'reportFilesItem' smart constructor.
data ReportFilesItem = ReportFilesItem'
{ _rfiURL :: !(Maybe Text)
, _rfiByteCount :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportFilesItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rfiURL'
--
-- * 'rfiByteCount'
reportFilesItem
:: ReportFilesItem
reportFilesItem =
ReportFilesItem'
{ _rfiURL = Nothing
, _rfiByteCount = Nothing
}
-- | Use this url to download the report file.
rfiURL :: Lens' ReportFilesItem (Maybe Text)
rfiURL = lens _rfiURL (\ s a -> s{_rfiURL = a})
-- | The size of this report file in bytes.
rfiByteCount :: Lens' ReportFilesItem (Maybe Int64)
rfiByteCount
= lens _rfiByteCount (\ s a -> s{_rfiByteCount = a})
. mapping _Coerce
instance FromJSON ReportFilesItem where
parseJSON
= withObject "ReportFilesItem"
(\ o ->
ReportFilesItem' <$>
(o .:? "url") <*> (o .:? "byteCount"))
instance ToJSON ReportFilesItem where
toJSON ReportFilesItem'{..}
= object
(catMaybes
[("url" .=) <$> _rfiURL,
("byteCount" .=) <$> _rfiByteCount])
--
-- /See:/ 'reportRequestFiltersItem' smart constructor.
data ReportRequestFiltersItem = ReportRequestFiltersItem'
{ _rrfiOperator :: !(Maybe Text)
, _rrfiValues :: !(Maybe [JSONValue])
, _rrfiColumn :: !(Maybe ReportAPIColumnSpec)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportRequestFiltersItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrfiOperator'
--
-- * 'rrfiValues'
--
-- * 'rrfiColumn'
reportRequestFiltersItem
:: ReportRequestFiltersItem
reportRequestFiltersItem =
ReportRequestFiltersItem'
{ _rrfiOperator = Nothing
, _rrfiValues = Nothing
, _rrfiColumn = Nothing
}
-- | Operator to use in the filter. See the filter reference for a list of
-- available operators.
rrfiOperator :: Lens' ReportRequestFiltersItem (Maybe Text)
rrfiOperator
= lens _rrfiOperator (\ s a -> s{_rrfiOperator = a})
-- | A list of values to filter the column value against.
rrfiValues :: Lens' ReportRequestFiltersItem [JSONValue]
rrfiValues
= lens _rrfiValues (\ s a -> s{_rrfiValues = a}) .
_Default
. _Coerce
-- | Column to perform the filter on. This can be a DoubleClick Search column
-- or a saved column.
rrfiColumn :: Lens' ReportRequestFiltersItem (Maybe ReportAPIColumnSpec)
rrfiColumn
= lens _rrfiColumn (\ s a -> s{_rrfiColumn = a})
instance FromJSON ReportRequestFiltersItem where
parseJSON
= withObject "ReportRequestFiltersItem"
(\ o ->
ReportRequestFiltersItem' <$>
(o .:? "operator") <*> (o .:? "values" .!= mempty)
<*> (o .:? "column"))
instance ToJSON ReportRequestFiltersItem where
toJSON ReportRequestFiltersItem'{..}
= object
(catMaybes
[("operator" .=) <$> _rrfiOperator,
("values" .=) <$> _rrfiValues,
("column" .=) <$> _rrfiColumn])
-- | A message containing availability data relevant to DoubleClick Search.
--
-- /See:/ 'availability' smart constructor.
data Availability = Availability'
{ _aAgencyId :: !(Maybe (Textual Int64))
, _aAdvertiserId :: !(Maybe (Textual Int64))
, _aSegmentationId :: !(Maybe (Textual Int64))
, _aSegmentationName :: !(Maybe Text)
, _aAvailabilityTimestamp :: !(Maybe (Textual Word64))
, _aSegmentationType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Availability' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aAgencyId'
--
-- * 'aAdvertiserId'
--
-- * 'aSegmentationId'
--
-- * 'aSegmentationName'
--
-- * 'aAvailabilityTimestamp'
--
-- * 'aSegmentationType'
availability
:: Availability
availability =
Availability'
{ _aAgencyId = Nothing
, _aAdvertiserId = Nothing
, _aSegmentationId = Nothing
, _aSegmentationName = Nothing
, _aAvailabilityTimestamp = Nothing
, _aSegmentationType = Nothing
}
-- | DS agency ID.
aAgencyId :: Lens' Availability (Maybe Int64)
aAgencyId
= lens _aAgencyId (\ s a -> s{_aAgencyId = a}) .
mapping _Coerce
-- | DS advertiser ID.
aAdvertiserId :: Lens' Availability (Maybe Int64)
aAdvertiserId
= lens _aAdvertiserId
(\ s a -> s{_aAdvertiserId = a})
. mapping _Coerce
-- | The numeric segmentation identifier (for example, DoubleClick Search
-- Floodlight activity ID).
aSegmentationId :: Lens' Availability (Maybe Int64)
aSegmentationId
= lens _aSegmentationId
(\ s a -> s{_aSegmentationId = a})
. mapping _Coerce
-- | The friendly segmentation identifier (for example, DoubleClick Search
-- Floodlight activity name).
aSegmentationName :: Lens' Availability (Maybe Text)
aSegmentationName
= lens _aSegmentationName
(\ s a -> s{_aSegmentationName = a})
-- | The time by which all conversions have been uploaded, in epoch millis
-- UTC.
aAvailabilityTimestamp :: Lens' Availability (Maybe Word64)
aAvailabilityTimestamp
= lens _aAvailabilityTimestamp
(\ s a -> s{_aAvailabilityTimestamp = a})
. mapping _Coerce
-- | The segmentation type that this availability is for (its default value
-- is FLOODLIGHT).
aSegmentationType :: Lens' Availability (Maybe Text)
aSegmentationType
= lens _aSegmentationType
(\ s a -> s{_aSegmentationType = a})
instance FromJSON Availability where
parseJSON
= withObject "Availability"
(\ o ->
Availability' <$>
(o .:? "agencyId") <*> (o .:? "advertiserId") <*>
(o .:? "segmentationId")
<*> (o .:? "segmentationName")
<*> (o .:? "availabilityTimestamp")
<*> (o .:? "segmentationType"))
instance ToJSON Availability where
toJSON Availability'{..}
= object
(catMaybes
[("agencyId" .=) <$> _aAgencyId,
("advertiserId" .=) <$> _aAdvertiserId,
("segmentationId" .=) <$> _aSegmentationId,
("segmentationName" .=) <$> _aSegmentationName,
("availabilityTimestamp" .=) <$>
_aAvailabilityTimestamp,
("segmentationType" .=) <$> _aSegmentationType])
-- | The request to update availability.
--
-- /See:/ 'updateAvailabilityRequest' smart constructor.
newtype UpdateAvailabilityRequest = UpdateAvailabilityRequest'
{ _uarAvailabilities :: Maybe [Availability]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateAvailabilityRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uarAvailabilities'
updateAvailabilityRequest
:: UpdateAvailabilityRequest
updateAvailabilityRequest =
UpdateAvailabilityRequest'
{ _uarAvailabilities = Nothing
}
-- | The availabilities being requested.
uarAvailabilities :: Lens' UpdateAvailabilityRequest [Availability]
uarAvailabilities
= lens _uarAvailabilities
(\ s a -> s{_uarAvailabilities = a})
. _Default
. _Coerce
instance FromJSON UpdateAvailabilityRequest where
parseJSON
= withObject "UpdateAvailabilityRequest"
(\ o ->
UpdateAvailabilityRequest' <$>
(o .:? "availabilities" .!= mempty))
instance ToJSON UpdateAvailabilityRequest where
toJSON UpdateAvailabilityRequest'{..}
= object
(catMaybes
[("availabilities" .=) <$> _uarAvailabilities])
-- | A message containing the custome metric.
--
-- /See:/ 'customMetric' smart constructor.
data CustomMetric = CustomMetric'
{ _cmValue :: !(Maybe (Textual Double))
, _cmName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CustomMetric' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cmValue'
--
-- * 'cmName'
customMetric
:: CustomMetric
customMetric =
CustomMetric'
{ _cmValue = Nothing
, _cmName = Nothing
}
-- | Custom metric numeric value.
cmValue :: Lens' CustomMetric (Maybe Double)
cmValue
= lens _cmValue (\ s a -> s{_cmValue = a}) .
mapping _Coerce
-- | Custom metric name.
cmName :: Lens' CustomMetric (Maybe Text)
cmName = lens _cmName (\ s a -> s{_cmName = a})
instance FromJSON CustomMetric where
parseJSON
= withObject "CustomMetric"
(\ o ->
CustomMetric' <$> (o .:? "value") <*> (o .:? "name"))
instance ToJSON CustomMetric where
toJSON CustomMetric'{..}
= object
(catMaybes
[("value" .=) <$> _cmValue, ("name" .=) <$> _cmName])
-- | A list of conversions.
--
-- /See:/ 'conversionList' smart constructor.
data ConversionList = ConversionList'
{ _clKind :: !Text
, _clConversion :: !(Maybe [Conversion])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ConversionList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'clKind'
--
-- * 'clConversion'
conversionList
:: ConversionList
conversionList =
ConversionList'
{ _clKind = "doubleclicksearch#conversionList"
, _clConversion = Nothing
}
-- | Identifies this as a ConversionList resource. Value: the fixed string
-- doubleclicksearch#conversionList.
clKind :: Lens' ConversionList Text
clKind = lens _clKind (\ s a -> s{_clKind = a})
-- | The conversions being requested.
clConversion :: Lens' ConversionList [Conversion]
clConversion
= lens _clConversion (\ s a -> s{_clConversion = a})
. _Default
. _Coerce
instance FromJSON ConversionList where
parseJSON
= withObject "ConversionList"
(\ o ->
ConversionList' <$>
(o .:? "kind" .!= "doubleclicksearch#conversionList")
<*> (o .:? "conversion" .!= mempty))
instance ToJSON ConversionList where
toJSON ConversionList'{..}
= object
(catMaybes
[Just ("kind" .= _clKind),
("conversion" .=) <$> _clConversion])
-- | A request object used to create a DoubleClick Search report.
--
-- /See:/ 'reportAPIColumnSpec' smart constructor.
data ReportAPIColumnSpec = ReportAPIColumnSpec'
{ _racsCustomDimensionName :: !(Maybe Text)
, _racsSavedColumnName :: !(Maybe Text)
, _racsGroupByColumn :: !Bool
, _racsCustomMetricName :: !(Maybe Text)
, _racsEndDate :: !(Maybe Text)
, _racsProductReportPerspective :: !(Maybe Text)
, _racsStartDate :: !(Maybe Text)
, _racsHeaderText :: !(Maybe Text)
, _racsPlatformSource :: !(Maybe Text)
, _racsColumnName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportAPIColumnSpec' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'racsCustomDimensionName'
--
-- * 'racsSavedColumnName'
--
-- * 'racsGroupByColumn'
--
-- * 'racsCustomMetricName'
--
-- * 'racsEndDate'
--
-- * 'racsProductReportPerspective'
--
-- * 'racsStartDate'
--
-- * 'racsHeaderText'
--
-- * 'racsPlatformSource'
--
-- * 'racsColumnName'
reportAPIColumnSpec
:: ReportAPIColumnSpec
reportAPIColumnSpec =
ReportAPIColumnSpec'
{ _racsCustomDimensionName = Nothing
, _racsSavedColumnName = Nothing
, _racsGroupByColumn = False
, _racsCustomMetricName = Nothing
, _racsEndDate = Nothing
, _racsProductReportPerspective = Nothing
, _racsStartDate = Nothing
, _racsHeaderText = Nothing
, _racsPlatformSource = Nothing
, _racsColumnName = Nothing
}
-- | Segments a report by a custom dimension. The report must be scoped to an
-- advertiser or lower, and the custom dimension must already be set up in
-- DoubleClick Search. The custom dimension name, which appears in
-- DoubleClick Search, is case sensitive. If used in a conversion report,
-- returns the value of the specified custom dimension for the given
-- conversion, if set. This column does not segment the conversion report.
racsCustomDimensionName :: Lens' ReportAPIColumnSpec (Maybe Text)
racsCustomDimensionName
= lens _racsCustomDimensionName
(\ s a -> s{_racsCustomDimensionName = a})
-- | Name of a saved column to include in the report. The report must be
-- scoped at advertiser or lower, and this saved column must already be
-- created in the DoubleClick Search UI.
racsSavedColumnName :: Lens' ReportAPIColumnSpec (Maybe Text)
racsSavedColumnName
= lens _racsSavedColumnName
(\ s a -> s{_racsSavedColumnName = a})
-- | Synchronous report only. Set to true to group by this column. Defaults
-- to false.
racsGroupByColumn :: Lens' ReportAPIColumnSpec Bool
racsGroupByColumn
= lens _racsGroupByColumn
(\ s a -> s{_racsGroupByColumn = a})
-- | Name of a custom metric to include in the report. The report must be
-- scoped to an advertiser or lower, and the custom metric must already be
-- set up in DoubleClick Search. The custom metric name, which appears in
-- DoubleClick Search, is case sensitive.
racsCustomMetricName :: Lens' ReportAPIColumnSpec (Maybe Text)
racsCustomMetricName
= lens _racsCustomMetricName
(\ s a -> s{_racsCustomMetricName = a})
-- | Inclusive day in YYYY-MM-DD format. When provided, this overrides the
-- overall time range of the report for this column only. Must be provided
-- together with startDate.
racsEndDate :: Lens' ReportAPIColumnSpec (Maybe Text)
racsEndDate
= lens _racsEndDate (\ s a -> s{_racsEndDate = a})
-- | Returns metrics only for a specific type of product activity. Accepted
-- values are: - \"sold\": returns metrics only for products that were sold
-- - \"advertised\": returns metrics only for products that were advertised
-- in a Shopping campaign, and that might or might not have been sold
racsProductReportPerspective :: Lens' ReportAPIColumnSpec (Maybe Text)
racsProductReportPerspective
= lens _racsProductReportPerspective
(\ s a -> s{_racsProductReportPerspective = a})
-- | Inclusive date in YYYY-MM-DD format. When provided, this overrides the
-- overall time range of the report for this column only. Must be provided
-- together with endDate.
racsStartDate :: Lens' ReportAPIColumnSpec (Maybe Text)
racsStartDate
= lens _racsStartDate
(\ s a -> s{_racsStartDate = a})
-- | Text used to identify this column in the report output; defaults to
-- columnName or savedColumnName when not specified. This can be used to
-- prevent collisions between DoubleClick Search columns and saved columns
-- with the same name.
racsHeaderText :: Lens' ReportAPIColumnSpec (Maybe Text)
racsHeaderText
= lens _racsHeaderText
(\ s a -> s{_racsHeaderText = a})
-- | The platform that is used to provide data for the custom dimension.
-- Acceptable values are \"floodlight\".
racsPlatformSource :: Lens' ReportAPIColumnSpec (Maybe Text)
racsPlatformSource
= lens _racsPlatformSource
(\ s a -> s{_racsPlatformSource = a})
-- | Name of a DoubleClick Search column to include in the report.
racsColumnName :: Lens' ReportAPIColumnSpec (Maybe Text)
racsColumnName
= lens _racsColumnName
(\ s a -> s{_racsColumnName = a})
instance FromJSON ReportAPIColumnSpec where
parseJSON
= withObject "ReportAPIColumnSpec"
(\ o ->
ReportAPIColumnSpec' <$>
(o .:? "customDimensionName") <*>
(o .:? "savedColumnName")
<*> (o .:? "groupByColumn" .!= False)
<*> (o .:? "customMetricName")
<*> (o .:? "endDate")
<*> (o .:? "productReportPerspective")
<*> (o .:? "startDate")
<*> (o .:? "headerText")
<*> (o .:? "platformSource")
<*> (o .:? "columnName"))
instance ToJSON ReportAPIColumnSpec where
toJSON ReportAPIColumnSpec'{..}
= object
(catMaybes
[("customDimensionName" .=) <$>
_racsCustomDimensionName,
("savedColumnName" .=) <$> _racsSavedColumnName,
Just ("groupByColumn" .= _racsGroupByColumn),
("customMetricName" .=) <$> _racsCustomMetricName,
("endDate" .=) <$> _racsEndDate,
("productReportPerspective" .=) <$>
_racsProductReportPerspective,
("startDate" .=) <$> _racsStartDate,
("headerText" .=) <$> _racsHeaderText,
("platformSource" .=) <$> _racsPlatformSource,
("columnName" .=) <$> _racsColumnName])
-- | If metrics are requested in a report, this argument will be used to
-- restrict the metrics to a specific time range.
--
-- /See:/ 'reportRequestTimeRange' smart constructor.
data ReportRequestTimeRange = ReportRequestTimeRange'
{ _rrtrEndDate :: !(Maybe Text)
, _rrtrChangedAttributesSinceTimestamp :: !(Maybe DateTime')
, _rrtrStartDate :: !(Maybe Text)
, _rrtrChangedMetricsSinceTimestamp :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportRequestTimeRange' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrtrEndDate'
--
-- * 'rrtrChangedAttributesSinceTimestamp'
--
-- * 'rrtrStartDate'
--
-- * 'rrtrChangedMetricsSinceTimestamp'
reportRequestTimeRange
:: ReportRequestTimeRange
reportRequestTimeRange =
ReportRequestTimeRange'
{ _rrtrEndDate = Nothing
, _rrtrChangedAttributesSinceTimestamp = Nothing
, _rrtrStartDate = Nothing
, _rrtrChangedMetricsSinceTimestamp = Nothing
}
-- | Inclusive date in YYYY-MM-DD format.
rrtrEndDate :: Lens' ReportRequestTimeRange (Maybe Text)
rrtrEndDate
= lens _rrtrEndDate (\ s a -> s{_rrtrEndDate = a})
-- | Inclusive UTC timestamp in RFC format, e.g., 2013-07-16T10:16:23.555Z.
-- See additional references on how changed attribute reports work.
rrtrChangedAttributesSinceTimestamp :: Lens' ReportRequestTimeRange (Maybe UTCTime)
rrtrChangedAttributesSinceTimestamp
= lens _rrtrChangedAttributesSinceTimestamp
(\ s a ->
s{_rrtrChangedAttributesSinceTimestamp = a})
. mapping _DateTime
-- | Inclusive date in YYYY-MM-DD format.
rrtrStartDate :: Lens' ReportRequestTimeRange (Maybe Text)
rrtrStartDate
= lens _rrtrStartDate
(\ s a -> s{_rrtrStartDate = a})
-- | Inclusive UTC timestamp in RFC format, e.g., 2013-07-16T10:16:23.555Z.
-- See additional references on how changed metrics reports work.
rrtrChangedMetricsSinceTimestamp :: Lens' ReportRequestTimeRange (Maybe UTCTime)
rrtrChangedMetricsSinceTimestamp
= lens _rrtrChangedMetricsSinceTimestamp
(\ s a -> s{_rrtrChangedMetricsSinceTimestamp = a})
. mapping _DateTime
instance FromJSON ReportRequestTimeRange where
parseJSON
= withObject "ReportRequestTimeRange"
(\ o ->
ReportRequestTimeRange' <$>
(o .:? "endDate") <*>
(o .:? "changedAttributesSinceTimestamp")
<*> (o .:? "startDate")
<*> (o .:? "changedMetricsSinceTimestamp"))
instance ToJSON ReportRequestTimeRange where
toJSON ReportRequestTimeRange'{..}
= object
(catMaybes
[("endDate" .=) <$> _rrtrEndDate,
("changedAttributesSinceTimestamp" .=) <$>
_rrtrChangedAttributesSinceTimestamp,
("startDate" .=) <$> _rrtrStartDate,
("changedMetricsSinceTimestamp" .=) <$>
_rrtrChangedMetricsSinceTimestamp])
-- | A conversion containing data relevant to DoubleClick Search.
--
-- /See:/ 'conversion' smart constructor.
data Conversion = Conversion'
{ _cAdGroupId :: !(Maybe (Textual Int64))
, _cConversionModifiedTimestamp :: !(Maybe (Textual Word64))
, _cState :: !(Maybe Text)
, _cEngineAccountId :: !(Maybe (Textual Int64))
, _cAgencyId :: !(Maybe (Textual Int64))
, _cCurrencyCode :: !(Maybe Text)
, _cStoreId :: !(Maybe Text)
, _cDsConversionId :: !(Maybe (Textual Int64))
, _cConversionId :: !(Maybe Text)
, _cAdvertiserId :: !(Maybe (Textual Int64))
, _cSegmentationId :: !(Maybe (Textual Int64))
, _cChannel :: !(Maybe Text)
, _cProductCountry :: !(Maybe Text)
, _cCampaignId :: !(Maybe (Textual Int64))
, _cCriterionId :: !(Maybe (Textual Int64))
, _cConversionTimestamp :: !(Maybe (Textual Word64))
, _cAttributionModel :: !(Maybe Text)
, _cSegmentationName :: !(Maybe Text)
, _cProductLanguage :: !(Maybe Text)
, _cCustomMetric :: !(Maybe [CustomMetric])
, _cCountMillis :: !(Maybe (Textual Int64))
, _cQuantityMillis :: !(Maybe (Textual Int64))
, _cAdId :: !(Maybe (Textual Int64))
, _cDeviceType :: !(Maybe Text)
, _cType :: !(Maybe Text)
, _cCustomDimension :: !(Maybe [CustomDimension])
, _cFloodlightOrderId :: !(Maybe Text)
, _cRevenueMicros :: !(Maybe (Textual Int64))
, _cClickId :: !(Maybe Text)
, _cInventoryAccountId :: !(Maybe (Textual Int64))
, _cSegmentationType :: !(Maybe Text)
, _cProductId :: !(Maybe Text)
, _cProductGroupId :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Conversion' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cAdGroupId'
--
-- * 'cConversionModifiedTimestamp'
--
-- * 'cState'
--
-- * 'cEngineAccountId'
--
-- * 'cAgencyId'
--
-- * 'cCurrencyCode'
--
-- * 'cStoreId'
--
-- * 'cDsConversionId'
--
-- * 'cConversionId'
--
-- * 'cAdvertiserId'
--
-- * 'cSegmentationId'
--
-- * 'cChannel'
--
-- * 'cProductCountry'
--
-- * 'cCampaignId'
--
-- * 'cCriterionId'
--
-- * 'cConversionTimestamp'
--
-- * 'cAttributionModel'
--
-- * 'cSegmentationName'
--
-- * 'cProductLanguage'
--
-- * 'cCustomMetric'
--
-- * 'cCountMillis'
--
-- * 'cQuantityMillis'
--
-- * 'cAdId'
--
-- * 'cDeviceType'
--
-- * 'cType'
--
-- * 'cCustomDimension'
--
-- * 'cFloodlightOrderId'
--
-- * 'cRevenueMicros'
--
-- * 'cClickId'
--
-- * 'cInventoryAccountId'
--
-- * 'cSegmentationType'
--
-- * 'cProductId'
--
-- * 'cProductGroupId'
conversion
:: Conversion
conversion =
Conversion'
{ _cAdGroupId = Nothing
, _cConversionModifiedTimestamp = Nothing
, _cState = Nothing
, _cEngineAccountId = Nothing
, _cAgencyId = Nothing
, _cCurrencyCode = Nothing
, _cStoreId = Nothing
, _cDsConversionId = Nothing
, _cConversionId = Nothing
, _cAdvertiserId = Nothing
, _cSegmentationId = Nothing
, _cChannel = Nothing
, _cProductCountry = Nothing
, _cCampaignId = Nothing
, _cCriterionId = Nothing
, _cConversionTimestamp = Nothing
, _cAttributionModel = Nothing
, _cSegmentationName = Nothing
, _cProductLanguage = Nothing
, _cCustomMetric = Nothing
, _cCountMillis = Nothing
, _cQuantityMillis = Nothing
, _cAdId = Nothing
, _cDeviceType = Nothing
, _cType = Nothing
, _cCustomDimension = Nothing
, _cFloodlightOrderId = Nothing
, _cRevenueMicros = Nothing
, _cClickId = Nothing
, _cInventoryAccountId = Nothing
, _cSegmentationType = Nothing
, _cProductId = Nothing
, _cProductGroupId = Nothing
}
-- | DS ad group ID.
cAdGroupId :: Lens' Conversion (Maybe Int64)
cAdGroupId
= lens _cAdGroupId (\ s a -> s{_cAdGroupId = a}) .
mapping _Coerce
-- | The time at which the conversion was last modified, in epoch millis UTC.
cConversionModifiedTimestamp :: Lens' Conversion (Maybe Word64)
cConversionModifiedTimestamp
= lens _cConversionModifiedTimestamp
(\ s a -> s{_cConversionModifiedTimestamp = a})
. mapping _Coerce
-- | The state of the conversion, that is, either ACTIVE or REMOVED. Note:
-- state DELETED is deprecated.
cState :: Lens' Conversion (Maybe Text)
cState = lens _cState (\ s a -> s{_cState = a})
-- | DS engine account ID.
cEngineAccountId :: Lens' Conversion (Maybe Int64)
cEngineAccountId
= lens _cEngineAccountId
(\ s a -> s{_cEngineAccountId = a})
. mapping _Coerce
-- | DS agency ID.
cAgencyId :: Lens' Conversion (Maybe Int64)
cAgencyId
= lens _cAgencyId (\ s a -> s{_cAgencyId = a}) .
mapping _Coerce
-- | The currency code for the conversion\'s revenue. Should be in ISO 4217
-- alphabetic (3-char) format.
cCurrencyCode :: Lens' Conversion (Maybe Text)
cCurrencyCode
= lens _cCurrencyCode
(\ s a -> s{_cCurrencyCode = a})
-- | The ID of the local store for which the product was advertised.
-- Applicable only when the channel is \"local\".
cStoreId :: Lens' Conversion (Maybe Text)
cStoreId = lens _cStoreId (\ s a -> s{_cStoreId = a})
-- | ID that DoubleClick Search generates for each conversion.
cDsConversionId :: Lens' Conversion (Maybe Int64)
cDsConversionId
= lens _cDsConversionId
(\ s a -> s{_cDsConversionId = a})
. mapping _Coerce
-- | For offline conversions, this is an ID that advertisers are required to
-- provide. Advertisers can specify any ID that is meaningful to them. For
-- online conversions, DS copies the dsConversionId or floodlightOrderId
-- into this property depending on the advertiser\'s Floodlight
-- instructions.
cConversionId :: Lens' Conversion (Maybe Text)
cConversionId
= lens _cConversionId
(\ s a -> s{_cConversionId = a})
-- | DS advertiser ID.
cAdvertiserId :: Lens' Conversion (Maybe Int64)
cAdvertiserId
= lens _cAdvertiserId
(\ s a -> s{_cAdvertiserId = a})
. mapping _Coerce
-- | The numeric segmentation identifier (for example, DoubleClick Search
-- Floodlight activity ID).
cSegmentationId :: Lens' Conversion (Maybe Int64)
cSegmentationId
= lens _cSegmentationId
(\ s a -> s{_cSegmentationId = a})
. mapping _Coerce
-- | Sales channel for the product. Acceptable values are: - \"local\": a
-- physical store - \"online\": an online store
cChannel :: Lens' Conversion (Maybe Text)
cChannel = lens _cChannel (\ s a -> s{_cChannel = a})
-- | The country registered for the Merchant Center feed that contains the
-- product. Use an ISO 3166 code to specify a country.
cProductCountry :: Lens' Conversion (Maybe Text)
cProductCountry
= lens _cProductCountry
(\ s a -> s{_cProductCountry = a})
-- | DS campaign ID.
cCampaignId :: Lens' Conversion (Maybe Int64)
cCampaignId
= lens _cCampaignId (\ s a -> s{_cCampaignId = a}) .
mapping _Coerce
-- | DS criterion (keyword) ID.
cCriterionId :: Lens' Conversion (Maybe Int64)
cCriterionId
= lens _cCriterionId (\ s a -> s{_cCriterionId = a})
. mapping _Coerce
-- | The time at which the conversion took place, in epoch millis UTC.
cConversionTimestamp :: Lens' Conversion (Maybe Word64)
cConversionTimestamp
= lens _cConversionTimestamp
(\ s a -> s{_cConversionTimestamp = a})
. mapping _Coerce
-- | Available to advertisers only after contacting DoubleClick Search
-- customer support.
cAttributionModel :: Lens' Conversion (Maybe Text)
cAttributionModel
= lens _cAttributionModel
(\ s a -> s{_cAttributionModel = a})
-- | The friendly segmentation identifier (for example, DoubleClick Search
-- Floodlight activity name).
cSegmentationName :: Lens' Conversion (Maybe Text)
cSegmentationName
= lens _cSegmentationName
(\ s a -> s{_cSegmentationName = a})
-- | The language registered for the Merchant Center feed that contains the
-- product. Use an ISO 639 code to specify a language.
cProductLanguage :: Lens' Conversion (Maybe Text)
cProductLanguage
= lens _cProductLanguage
(\ s a -> s{_cProductLanguage = a})
-- | Custom metrics for the conversion.
cCustomMetric :: Lens' Conversion [CustomMetric]
cCustomMetric
= lens _cCustomMetric
(\ s a -> s{_cCustomMetric = a})
. _Default
. _Coerce
-- | Available to advertisers only after contacting DoubleClick Search
-- customer support.
cCountMillis :: Lens' Conversion (Maybe Int64)
cCountMillis
= lens _cCountMillis (\ s a -> s{_cCountMillis = a})
. mapping _Coerce
-- | The quantity of this conversion, in millis.
cQuantityMillis :: Lens' Conversion (Maybe Int64)
cQuantityMillis
= lens _cQuantityMillis
(\ s a -> s{_cQuantityMillis = a})
. mapping _Coerce
-- | DS ad ID.
cAdId :: Lens' Conversion (Maybe Int64)
cAdId
= lens _cAdId (\ s a -> s{_cAdId = a}) .
mapping _Coerce
-- | The type of device on which the conversion occurred.
cDeviceType :: Lens' Conversion (Maybe Text)
cDeviceType
= lens _cDeviceType (\ s a -> s{_cDeviceType = a})
-- | The type of the conversion, that is, either ACTION or TRANSACTION. An
-- ACTION conversion is an action by the user that has no monetarily
-- quantifiable value, while a TRANSACTION conversion is an action that
-- does have a monetarily quantifiable value. Examples are email list
-- signups (ACTION) versus ecommerce purchases (TRANSACTION).
cType :: Lens' Conversion (Maybe Text)
cType = lens _cType (\ s a -> s{_cType = a})
-- | Custom dimensions for the conversion, which can be used to filter data
-- in a report.
cCustomDimension :: Lens' Conversion [CustomDimension]
cCustomDimension
= lens _cCustomDimension
(\ s a -> s{_cCustomDimension = a})
. _Default
. _Coerce
-- | The Floodlight order ID provided by the advertiser for the conversion.
cFloodlightOrderId :: Lens' Conversion (Maybe Text)
cFloodlightOrderId
= lens _cFloodlightOrderId
(\ s a -> s{_cFloodlightOrderId = a})
-- | The revenue amount of this TRANSACTION conversion, in micros (value
-- multiplied by 1000000, no decimal). For example, to specify a revenue
-- value of \"10\" enter \"10000000\" (10 million) in your request.
cRevenueMicros :: Lens' Conversion (Maybe Int64)
cRevenueMicros
= lens _cRevenueMicros
(\ s a -> s{_cRevenueMicros = a})
. mapping _Coerce
-- | DS click ID for the conversion.
cClickId :: Lens' Conversion (Maybe Text)
cClickId = lens _cClickId (\ s a -> s{_cClickId = a})
-- | ID that DS generates and uses to uniquely identify the inventory account
-- that contains the product.
cInventoryAccountId :: Lens' Conversion (Maybe Int64)
cInventoryAccountId
= lens _cInventoryAccountId
(\ s a -> s{_cInventoryAccountId = a})
. mapping _Coerce
-- | The segmentation type of this conversion (for example, FLOODLIGHT).
cSegmentationType :: Lens' Conversion (Maybe Text)
cSegmentationType
= lens _cSegmentationType
(\ s a -> s{_cSegmentationType = a})
-- | The product ID (SKU).
cProductId :: Lens' Conversion (Maybe Text)
cProductId
= lens _cProductId (\ s a -> s{_cProductId = a})
-- | DS product group ID.
cProductGroupId :: Lens' Conversion (Maybe Int64)
cProductGroupId
= lens _cProductGroupId
(\ s a -> s{_cProductGroupId = a})
. mapping _Coerce
instance FromJSON Conversion where
parseJSON
= withObject "Conversion"
(\ o ->
Conversion' <$>
(o .:? "adGroupId") <*>
(o .:? "conversionModifiedTimestamp")
<*> (o .:? "state")
<*> (o .:? "engineAccountId")
<*> (o .:? "agencyId")
<*> (o .:? "currencyCode")
<*> (o .:? "storeId")
<*> (o .:? "dsConversionId")
<*> (o .:? "conversionId")
<*> (o .:? "advertiserId")
<*> (o .:? "segmentationId")
<*> (o .:? "channel")
<*> (o .:? "productCountry")
<*> (o .:? "campaignId")
<*> (o .:? "criterionId")
<*> (o .:? "conversionTimestamp")
<*> (o .:? "attributionModel")
<*> (o .:? "segmentationName")
<*> (o .:? "productLanguage")
<*> (o .:? "customMetric" .!= mempty)
<*> (o .:? "countMillis")
<*> (o .:? "quantityMillis")
<*> (o .:? "adId")
<*> (o .:? "deviceType")
<*> (o .:? "type")
<*> (o .:? "customDimension" .!= mempty)
<*> (o .:? "floodlightOrderId")
<*> (o .:? "revenueMicros")
<*> (o .:? "clickId")
<*> (o .:? "inventoryAccountId")
<*> (o .:? "segmentationType")
<*> (o .:? "productId")
<*> (o .:? "productGroupId"))
instance ToJSON Conversion where
toJSON Conversion'{..}
= object
(catMaybes
[("adGroupId" .=) <$> _cAdGroupId,
("conversionModifiedTimestamp" .=) <$>
_cConversionModifiedTimestamp,
("state" .=) <$> _cState,
("engineAccountId" .=) <$> _cEngineAccountId,
("agencyId" .=) <$> _cAgencyId,
("currencyCode" .=) <$> _cCurrencyCode,
("storeId" .=) <$> _cStoreId,
("dsConversionId" .=) <$> _cDsConversionId,
("conversionId" .=) <$> _cConversionId,
("advertiserId" .=) <$> _cAdvertiserId,
("segmentationId" .=) <$> _cSegmentationId,
("channel" .=) <$> _cChannel,
("productCountry" .=) <$> _cProductCountry,
("campaignId" .=) <$> _cCampaignId,
("criterionId" .=) <$> _cCriterionId,
("conversionTimestamp" .=) <$> _cConversionTimestamp,
("attributionModel" .=) <$> _cAttributionModel,
("segmentationName" .=) <$> _cSegmentationName,
("productLanguage" .=) <$> _cProductLanguage,
("customMetric" .=) <$> _cCustomMetric,
("countMillis" .=) <$> _cCountMillis,
("quantityMillis" .=) <$> _cQuantityMillis,
("adId" .=) <$> _cAdId,
("deviceType" .=) <$> _cDeviceType,
("type" .=) <$> _cType,
("customDimension" .=) <$> _cCustomDimension,
("floodlightOrderId" .=) <$> _cFloodlightOrderId,
("revenueMicros" .=) <$> _cRevenueMicros,
("clickId" .=) <$> _cClickId,
("inventoryAccountId" .=) <$> _cInventoryAccountId,
("segmentationType" .=) <$> _cSegmentationType,
("productId" .=) <$> _cProductId,
("productGroupId" .=) <$> _cProductGroupId])
-- | A saved column
--
-- /See:/ 'savedColumn' smart constructor.
data SavedColumn = SavedColumn'
{ _scSavedColumnName :: !(Maybe Text)
, _scKind :: !Text
, _scType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SavedColumn' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scSavedColumnName'
--
-- * 'scKind'
--
-- * 'scType'
savedColumn
:: SavedColumn
savedColumn =
SavedColumn'
{ _scSavedColumnName = Nothing
, _scKind = "doubleclicksearch#savedColumn"
, _scType = Nothing
}
-- | The name of the saved column.
scSavedColumnName :: Lens' SavedColumn (Maybe Text)
scSavedColumnName
= lens _scSavedColumnName
(\ s a -> s{_scSavedColumnName = a})
-- | Identifies this as a SavedColumn resource. Value: the fixed string
-- doubleclicksearch#savedColumn.
scKind :: Lens' SavedColumn Text
scKind = lens _scKind (\ s a -> s{_scKind = a})
-- | The type of data this saved column will produce.
scType :: Lens' SavedColumn (Maybe Text)
scType = lens _scType (\ s a -> s{_scType = a})
instance FromJSON SavedColumn where
parseJSON
= withObject "SavedColumn"
(\ o ->
SavedColumn' <$>
(o .:? "savedColumnName") <*>
(o .:? "kind" .!= "doubleclicksearch#savedColumn")
<*> (o .:? "type"))
instance ToJSON SavedColumn where
toJSON SavedColumn'{..}
= object
(catMaybes
[("savedColumnName" .=) <$> _scSavedColumnName,
Just ("kind" .= _scKind), ("type" .=) <$> _scType])
-- | A message containing the custome dimension.
--
-- /See:/ 'customDimension' smart constructor.
data CustomDimension = CustomDimension'
{ _cdValue :: !(Maybe Text)
, _cdName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CustomDimension' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cdValue'
--
-- * 'cdName'
customDimension
:: CustomDimension
customDimension =
CustomDimension'
{ _cdValue = Nothing
, _cdName = Nothing
}
-- | Custom dimension value.
cdValue :: Lens' CustomDimension (Maybe Text)
cdValue = lens _cdValue (\ s a -> s{_cdValue = a})
-- | Custom dimension name.
cdName :: Lens' CustomDimension (Maybe Text)
cdName = lens _cdName (\ s a -> s{_cdName = a})
instance FromJSON CustomDimension where
parseJSON
= withObject "CustomDimension"
(\ o ->
CustomDimension' <$>
(o .:? "value") <*> (o .:? "name"))
instance ToJSON CustomDimension where
toJSON CustomDimension'{..}
= object
(catMaybes
[("value" .=) <$> _cdValue, ("name" .=) <$> _cdName])
-- | The response to a update availability request.
--
-- /See:/ 'updateAvailabilityResponse' smart constructor.
newtype UpdateAvailabilityResponse = UpdateAvailabilityResponse'
{ _uAvailabilities :: Maybe [Availability]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateAvailabilityResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uAvailabilities'
updateAvailabilityResponse
:: UpdateAvailabilityResponse
updateAvailabilityResponse =
UpdateAvailabilityResponse'
{ _uAvailabilities = Nothing
}
-- | The availabilities being returned.
uAvailabilities :: Lens' UpdateAvailabilityResponse [Availability]
uAvailabilities
= lens _uAvailabilities
(\ s a -> s{_uAvailabilities = a})
. _Default
. _Coerce
instance FromJSON UpdateAvailabilityResponse where
parseJSON
= withObject "UpdateAvailabilityResponse"
(\ o ->
UpdateAvailabilityResponse' <$>
(o .:? "availabilities" .!= mempty))
instance ToJSON UpdateAvailabilityResponse where
toJSON UpdateAvailabilityResponse'{..}
= object
(catMaybes
[("availabilities" .=) <$> _uAvailabilities])
-- | The reportScope is a set of IDs that are used to determine which subset
-- of entities will be returned in the report. The full lineage of IDs from
-- the lowest scoped level desired up through agency is required.
--
-- /See:/ 'reportRequestReportScope' smart constructor.
data ReportRequestReportScope = ReportRequestReportScope'
{ _rrrsKeywordId :: !(Maybe (Textual Int64))
, _rrrsAdGroupId :: !(Maybe (Textual Int64))
, _rrrsEngineAccountId :: !(Maybe (Textual Int64))
, _rrrsAgencyId :: !(Maybe (Textual Int64))
, _rrrsAdvertiserId :: !(Maybe (Textual Int64))
, _rrrsCampaignId :: !(Maybe (Textual Int64))
, _rrrsAdId :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportRequestReportScope' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrrsKeywordId'
--
-- * 'rrrsAdGroupId'
--
-- * 'rrrsEngineAccountId'
--
-- * 'rrrsAgencyId'
--
-- * 'rrrsAdvertiserId'
--
-- * 'rrrsCampaignId'
--
-- * 'rrrsAdId'
reportRequestReportScope
:: ReportRequestReportScope
reportRequestReportScope =
ReportRequestReportScope'
{ _rrrsKeywordId = Nothing
, _rrrsAdGroupId = Nothing
, _rrrsEngineAccountId = Nothing
, _rrrsAgencyId = Nothing
, _rrrsAdvertiserId = Nothing
, _rrrsCampaignId = Nothing
, _rrrsAdId = Nothing
}
-- | DS keyword ID.
rrrsKeywordId :: Lens' ReportRequestReportScope (Maybe Int64)
rrrsKeywordId
= lens _rrrsKeywordId
(\ s a -> s{_rrrsKeywordId = a})
. mapping _Coerce
-- | DS ad group ID.
rrrsAdGroupId :: Lens' ReportRequestReportScope (Maybe Int64)
rrrsAdGroupId
= lens _rrrsAdGroupId
(\ s a -> s{_rrrsAdGroupId = a})
. mapping _Coerce
-- | DS engine account ID.
rrrsEngineAccountId :: Lens' ReportRequestReportScope (Maybe Int64)
rrrsEngineAccountId
= lens _rrrsEngineAccountId
(\ s a -> s{_rrrsEngineAccountId = a})
. mapping _Coerce
-- | DS agency ID.
rrrsAgencyId :: Lens' ReportRequestReportScope (Maybe Int64)
rrrsAgencyId
= lens _rrrsAgencyId (\ s a -> s{_rrrsAgencyId = a})
. mapping _Coerce
-- | DS advertiser ID.
rrrsAdvertiserId :: Lens' ReportRequestReportScope (Maybe Int64)
rrrsAdvertiserId
= lens _rrrsAdvertiserId
(\ s a -> s{_rrrsAdvertiserId = a})
. mapping _Coerce
-- | DS campaign ID.
rrrsCampaignId :: Lens' ReportRequestReportScope (Maybe Int64)
rrrsCampaignId
= lens _rrrsCampaignId
(\ s a -> s{_rrrsCampaignId = a})
. mapping _Coerce
-- | DS ad ID.
rrrsAdId :: Lens' ReportRequestReportScope (Maybe Int64)
rrrsAdId
= lens _rrrsAdId (\ s a -> s{_rrrsAdId = a}) .
mapping _Coerce
instance FromJSON ReportRequestReportScope where
parseJSON
= withObject "ReportRequestReportScope"
(\ o ->
ReportRequestReportScope' <$>
(o .:? "keywordId") <*> (o .:? "adGroupId") <*>
(o .:? "engineAccountId")
<*> (o .:? "agencyId")
<*> (o .:? "advertiserId")
<*> (o .:? "campaignId")
<*> (o .:? "adId"))
instance ToJSON ReportRequestReportScope where
toJSON ReportRequestReportScope'{..}
= object
(catMaybes
[("keywordId" .=) <$> _rrrsKeywordId,
("adGroupId" .=) <$> _rrrsAdGroupId,
("engineAccountId" .=) <$> _rrrsEngineAccountId,
("agencyId" .=) <$> _rrrsAgencyId,
("advertiserId" .=) <$> _rrrsAdvertiserId,
("campaignId" .=) <$> _rrrsCampaignId,
("adId" .=) <$> _rrrsAdId])
-- | A list of saved columns. Advertisers create saved columns to report on
-- Floodlight activities, Google Analytics goals, or custom KPIs. To
-- request reports with saved columns, you\'ll need the saved column names
-- that are available from this list.
--
-- /See:/ 'savedColumnList' smart constructor.
data SavedColumnList = SavedColumnList'
{ _sclKind :: !Text
, _sclItems :: !(Maybe [SavedColumn])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SavedColumnList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sclKind'
--
-- * 'sclItems'
savedColumnList
:: SavedColumnList
savedColumnList =
SavedColumnList'
{ _sclKind = "doubleclicksearch#savedColumnList"
, _sclItems = Nothing
}
-- | Identifies this as a SavedColumnList resource. Value: the fixed string
-- doubleclicksearch#savedColumnList.
sclKind :: Lens' SavedColumnList Text
sclKind = lens _sclKind (\ s a -> s{_sclKind = a})
-- | The saved columns being requested.
sclItems :: Lens' SavedColumnList [SavedColumn]
sclItems
= lens _sclItems (\ s a -> s{_sclItems = a}) .
_Default
. _Coerce
instance FromJSON SavedColumnList where
parseJSON
= withObject "SavedColumnList"
(\ o ->
SavedColumnList' <$>
(o .:? "kind" .!=
"doubleclicksearch#savedColumnList")
<*> (o .:? "items" .!= mempty))
instance ToJSON SavedColumnList where
toJSON SavedColumnList'{..}
= object
(catMaybes
[Just ("kind" .= _sclKind),
("items" .=) <$> _sclItems])
| rueshyna/gogol | gogol-doubleclick-search/gen/Network/Google/DoubleClickSearch/Types/Product.hs | mpl-2.0 | 62,378 | 0 | 43 | 16,210 | 11,714 | 6,732 | 4,982 | 1,304 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Partners.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Partners.Types.Sum where
import Network.Google.Prelude
| rueshyna/gogol | gogol-partners/gen/Network/Google/Partners/Types/Sum.hs | mpl-2.0 | 600 | 0 | 4 | 109 | 29 | 25 | 4 | 8 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Licenses.TestIAMPermissions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns permissions that a caller has on the specified resource. Caution
-- This resource is intended for use only by third-party partners who are
-- creating Cloud Marketplace images.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.licenses.testIamPermissions@.
module Network.Google.Resource.Compute.Licenses.TestIAMPermissions
(
-- * REST Resource
LicensesTestIAMPermissionsResource
-- * Creating a Request
, licensesTestIAMPermissions
, LicensesTestIAMPermissions
-- * Request Lenses
, ltipProject
, ltipPayload
, ltipResource
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.licenses.testIamPermissions@ method which the
-- 'LicensesTestIAMPermissions' request conforms to.
type LicensesTestIAMPermissionsResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"licenses" :>
Capture "resource" Text :>
"testIamPermissions" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TestPermissionsRequest :>
Post '[JSON] TestPermissionsResponse
-- | Returns permissions that a caller has on the specified resource. Caution
-- This resource is intended for use only by third-party partners who are
-- creating Cloud Marketplace images.
--
-- /See:/ 'licensesTestIAMPermissions' smart constructor.
data LicensesTestIAMPermissions =
LicensesTestIAMPermissions'
{ _ltipProject :: !Text
, _ltipPayload :: !TestPermissionsRequest
, _ltipResource :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LicensesTestIAMPermissions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ltipProject'
--
-- * 'ltipPayload'
--
-- * 'ltipResource'
licensesTestIAMPermissions
:: Text -- ^ 'ltipProject'
-> TestPermissionsRequest -- ^ 'ltipPayload'
-> Text -- ^ 'ltipResource'
-> LicensesTestIAMPermissions
licensesTestIAMPermissions pLtipProject_ pLtipPayload_ pLtipResource_ =
LicensesTestIAMPermissions'
{ _ltipProject = pLtipProject_
, _ltipPayload = pLtipPayload_
, _ltipResource = pLtipResource_
}
-- | Project ID for this request.
ltipProject :: Lens' LicensesTestIAMPermissions Text
ltipProject
= lens _ltipProject (\ s a -> s{_ltipProject = a})
-- | Multipart request metadata.
ltipPayload :: Lens' LicensesTestIAMPermissions TestPermissionsRequest
ltipPayload
= lens _ltipPayload (\ s a -> s{_ltipPayload = a})
-- | Name or id of the resource for this request.
ltipResource :: Lens' LicensesTestIAMPermissions Text
ltipResource
= lens _ltipResource (\ s a -> s{_ltipResource = a})
instance GoogleRequest LicensesTestIAMPermissions
where
type Rs LicensesTestIAMPermissions =
TestPermissionsResponse
type Scopes LicensesTestIAMPermissions =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient LicensesTestIAMPermissions'{..}
= go _ltipProject _ltipResource (Just AltJSON)
_ltipPayload
computeService
where go
= buildClient
(Proxy :: Proxy LicensesTestIAMPermissionsResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Licenses/TestIAMPermissions.hs | mpl-2.0 | 4,407 | 0 | 17 | 990 | 479 | 287 | 192 | 80 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
module Blick.Server where
import Servant
import qualified Servant.Server.StaticFiles as Static
import qualified Blick.API as API
import Blick.Context (AppCtx (..), AppM)
import qualified Blick.Database as Database
import Blick.Types (CreateSecretResponse (..),
SecretBody (..))
import Control.Monad.Reader (asks, liftIO)
import Network.HTTP.Types (status200)
import Network.Wai as Wai
apiServer :: Servant.ServerT API.API AppM
apiServer =
getSecret
:<|> createSecret
server :: Servant.ServerT API.Combined AppM
server =
apiServer
:<|> (Static.serveDirectoryWebApp "./assets")
:<|> (Tagged serveSinglePageApp)
getSecret :: String -> AppM SecretBody
getSecret secretId = do
db <- asks _getSecretDb
secret <- liftIO $ Database.lookupSecret db secretId
case secret of
Nothing ->
-- TODO: Descriptive error
throwError err404
Just s ->
return s
createSecret :: SecretBody -> AppM CreateSecretResponse
createSecret body = do
db <- asks _getSecretDb
secretId <- liftIO $ Database.createSecret db body
return $ CreateSecretResponse { secretId = secretId }
-- | This is pretty gross, but the idea is that since the frontend is
-- responsible for routing, we never actually send a 404 from the
-- backend. The frontend always receives the same barebones html and
-- does the routing / rendering.
serveSinglePageApp :: Wai.Application
serveSinglePageApp _req respond =
respond $
Wai.responseLBS
status200
[("Content-Type", "text/html")]
baseHtml
-- TODO: Probably switch to something like Blaze HTML
baseHtml =
"<!doctype html>\n\
\<head>\n\
\ <meta charset=\"utf-8\">\n\
\ <title>blick</title>\n\
\ <link rel=\"stylesheet\" href=\"/static/main.css\">\n\
\</head>\n\
\<body>\n\
\ <div id=\"app\"></div>\n\
\ <script src=\"/static/main.js\"></script>\n\
\ <script src=\"/static/ports.js\"></script>\n\
\ <script>\n\
\ const app = Elm.Main.init({node: document.getElementById(\"app\")});\n\
\ registerPorts(app);\n\
\ </script>\n\
\</body>"
| erik/sketches | projects/blick/src/Blick/Server.hs | agpl-3.0 | 2,340 | 0 | 10 | 596 | 359 | 200 | 159 | 45 | 2 |
{-# LANGUAGE ForeignFunctionInterface, FlexibleInstances #-}
-----------------------------------------------------------------------------------------
{-| Module : Types
Copyright : (c) Daan Leijen 2003
License : wxWindows
Maintainer : wxhaskell-devel@lists.sourceforge.net
Stability : provisional
Portability : portable
Basic types and operations.
-}
-----------------------------------------------------------------------------------------
module Graphics.UI.WXCore.Types(
-- * Objects
( # )
, Object, objectNull, objectIsNull, objectCast, objectIsManaged
, objectDelete
, withObjectPtr, withObjectRef
, withObjectResult, withManagedObjectResult
, objectFinalize, objectNoFinalize
, objectFromPtr, managedObjectFromPtr
-- , Managed, managedNull, managedIsNull, managedCast, createManaged, withManaged, managedTouch
-- * Identifiers
, Id, idAny, idCreate
-- * Bits
, (.+.), (.-.)
, bits
, bitsSet
-- * Control
, unitIO, bracket, bracket_, finally, finalize, when
-- * Variables
, Var, varCreate, varGet, varSet, varUpdate, varSwap
-- * Misc.
, Style
, EventId
, TreeItem, treeItemInvalid, treeItemIsOk
-- * Basic types
-- ** Booleans
, toCBool, fromCBool
-- ** Colors
, Color, rgb, colorRGB, colorRed, colorGreen, colorBlue, intFromColor, colorFromInt, colorIsOk, colorOk
, black, darkgrey, dimgrey, mediumgrey, grey, lightgrey, white
, red, green, blue
, cyan, magenta, yellow
-- *** System colors
, SystemColor(..), colorSystem
-- ** Points
, Point, Point2(Point,pointX,pointY), point, pt, pointFromVec, pointFromSize, pointZero, pointNull
, pointMove, pointMoveBySize, pointAdd, pointSub, pointScale
-- ** Sizes
, Size, Size2D(Size,sizeW,sizeH), sz, sizeFromPoint, sizeFromVec, sizeZero, sizeNull, sizeEncloses
, sizeMin, sizeMax
-- ** Vectors
, Vector, Vector2(Vector,vecX,vecY), vector, vec, vecFromPoint, vecFromSize, vecZero, vecNull
, vecNegate, vecOrtogonal, vecAdd, vecSub, vecScale, vecBetween, vecLength
, vecLengthDouble
-- ** Rectangles
, Rect, Rect2D(Rect,rectLeft,rectTop,rectWidth,rectHeight)
, rectTopLeft, rectTopRight, rectBottomLeft, rectBottomRight, rectBottom, rectRight
, rect, rectBetween, rectFromSize, rectZero, rectNull, rectSize, rectIsEmpty
, rectContains, rectMoveTo, rectFromPoint, rectCentralPoint, rectCentralRect, rectStretchTo
, rectCentralPointDouble, rectCentralRectDouble
, rectMove, rectOverlaps, rectsDiff, rectUnion, rectOverlap, rectUnions
) where
import Data.List( (\\) )
import Graphics.UI.WXCore.WxcTypes
import Graphics.UI.WXCore.WxcDefs
import Graphics.UI.WXCore.WxcClasses( wxcSystemSettingsGetColour )
import System.IO.Unsafe( unsafePerformIO )
-- utility
import Data.Array
import Data.Bits
import Control.Concurrent.STM
import qualified Control.Exception as CE
import qualified Control.Monad as M
infixl 5 .+.
infixl 5 .-.
infix 5 #
-- | Reverse application, i.e. @x # f@ = @f x@.
-- Useful for an object oriented style of programming.
--
-- > (frame # frameSetTitle) "hi"
--
( # ) :: obj -> (obj -> a) -> a
object # method = method object
{--------------------------------------------------------------------------------
Bitmasks
--------------------------------------------------------------------------------}
-- | Bitwise /or/ of two bit masks.
(.+.) :: Int -> Int -> Int
(.+.) i j
= i .|. j
-- | Unset certain bits in a bitmask.
(.-.) :: Int -> BitFlag -> Int
(.-.) i j
= i .&. complement j
-- | Bitwise /or/ of a list of bit masks.
bits :: [Int] -> Int
bits xs
= foldr (.+.) 0 xs
-- | (@bitsSet mask i@) tests if all bits in @mask@ are also set in @i@.
bitsSet :: Int -> Int -> Bool
bitsSet mask i
= (i .&. mask == mask)
{--------------------------------------------------------------------------------
Id
--------------------------------------------------------------------------------}
{-# NOINLINE varTopId #-}
varTopId :: Var Id
varTopId
= unsafePerformIO (varCreate (wxID_HIGHEST+1))
-- | When creating a new window you may specify 'idAny' to let wxWindows
-- assign an unused identifier to it automatically. Furthermore, it can be
-- used in an event connection to handle events for any identifier.
idAny :: Id
idAny
= -1
-- | Create a new unique identifier.
idCreate :: IO Id
idCreate
= varUpdate varTopId (+1)
{--------------------------------------------------------------------------------
Control
--------------------------------------------------------------------------------}
-- | Ignore the result of an 'IO' action.
unitIO :: IO a -> IO ()
unitIO io
= do io; return ()
-- | Perform an action when a test succeeds.
when :: Bool -> IO () -> IO ()
when = M.when
-- | Properly release resources, even in the event of an exception.
bracket :: IO a -- ^ computation to run first (acquire resource)
-> (a -> IO b) -- ^ computation to run last (release resource)
-> (a -> IO c) -- ^ computation to run in-between (use resource)
-> IO c
bracket = CE.bracket
-- | Specialized variant of 'bracket' where the return value is not required.
bracket_ :: IO a -- ^ computation to run first (acquire resource)
-> IO b -- ^ computation to run last (release resource)
-> IO c -- ^ computation to run in-between (use resource)
-> IO c
bracket_ = CE.bracket_
-- | Run some computation afterwards, even if an exception occurs.
finally :: IO a -- ^ computation to run first
-> IO b -- ^ computation to run last (release resource)
-> IO a
finally = CE.finally
-- | Run some computation afterwards, even if an exception occurs. Equals 'finally' but
-- with the arguments swapped.
finalize :: IO b -- ^ computation to run last (release resource)
-> IO a -- ^ computation to run first
-> IO a
finalize last first
= finally first last
{--------------------------------------------------------------------------------
Variables
--------------------------------------------------------------------------------}
-- | A mutable variable. Use this instead of 'MVar's or 'IORef's to accomodate for
-- future expansions with possible concurrency.
type Var a = TVar a
-- | Create a fresh mutable variable.
varCreate :: a -> IO (Var a)
varCreate x = newTVarIO x
-- | Get the value of a mutable variable.
varGet :: Var a -> IO a
varGet v = atomically $ readTVar v
-- | Set the value of a mutable variable.
varSet :: Var a -> a -> IO ()
varSet v x = atomically $ writeTVar v x
-- | Swap the value of a mutable variable.
varSwap :: Var a -> a -> IO a
varSwap v x = atomically $ do
prev <- readTVar v
writeTVar v x
return prev
-- | Update the value of a mutable variable and return the old value.
varUpdate :: Var a -> (a -> a) -> IO a
varUpdate v f = atomically $ do
x <- readTVar v
writeTVar v (f x)
return x
{-----------------------------------------------------------------------------------------
Point
-----------------------------------------------------------------------------------------}
pointMove :: (Num a) => Vector2 a -> Point2 a -> Point2 a
pointMove (Vector dx dy) (Point x y)
= Point (x+dx) (y+dy)
pointMoveBySize :: (Num a) => Point2 a -> Size2D a -> Point2 a
pointMoveBySize (Point x y) (Size w h) = Point (x + w) (y + h)
pointAdd :: (Num a) => Point2 a -> Point2 a -> Point2 a
pointAdd (Point x1 y1) (Point x2 y2) = Point (x1+x2) (y1+y2)
pointSub :: (Num a) => Point2 a -> Point2 a -> Point2 a
pointSub (Point x1 y1) (Point x2 y2) = Point (x1-x2) (y1-y2)
pointScale :: (Num a) => Point2 a -> a -> Point2 a
pointScale (Point x y) v = Point (v*x) (v*y)
instance (Num a, Ord a) => Ord (Point2 a) where
compare (Point x1 y1) (Point x2 y2)
= case compare y1 y2 of
EQ -> compare x1 x2
neq -> neq
instance Ix (Point2 Int) where
range (Point x1 y1,Point x2 y2)
= [Point x y | y <- [y1..y2], x <- [x1..x2]]
inRange (Point x1 y1, Point x2 y2) (Point x y)
= (x >= x1 && x <= x2 && y >= y1 && y <= y2)
rangeSize (Point x1 y1, Point x2 y2)
= let w = abs (x2 - x1) + 1
h = abs (y2 - y1) + 1
in w*h
index bnd@(Point x1 y1, Point x2 y2) p@(Point x y)
= if inRange bnd p
then let w = abs (x2 - x1) + 1
in (y-y1)*w + x
else error ("Point index out of bounds: " ++ show p ++ " not in " ++ show bnd)
{-----------------------------------------------------------------------------------------
Size
-----------------------------------------------------------------------------------------}
-- | Return the width. (see also 'sizeW').
sizeWidth :: (Num a) => Size2D a -> a
sizeWidth (Size w h)
= w
-- | Return the height. (see also 'sizeH').
sizeHeight :: (Num a) => Size2D a -> a
sizeHeight (Size w h)
= h
-- | Returns 'True' if the first size totally encloses the second argument.
sizeEncloses :: (Num a, Ord a) => Size2D a -> Size2D a -> Bool
sizeEncloses (Size w0 h0) (Size w1 h1)
= (w0 >= w1) && (h0 >= h1)
-- | The minimum of two sizes.
sizeMin :: (Num a, Ord a) => Size2D a -> Size2D a -> Size2D a
sizeMin (Size w0 h0) (Size w1 h1)
= Size (min w0 w1) (min h0 h1)
-- | The maximum of two sizes.
sizeMax :: (Num a, Ord a) => Size2D a -> Size2D a -> Size2D a
sizeMax (Size w0 h0) (Size w1 h1)
= Size (max w0 w1) (max h0 h1)
{-----------------------------------------------------------------------------------------
Vector
-----------------------------------------------------------------------------------------}
vecNegate :: (Num a) => Vector2 a -> Vector2 a
vecNegate (Vector x y)
= Vector (-x) (-y)
vecOrtogonal :: (Num a) => Vector2 a -> Vector2 a
vecOrtogonal (Vector x y) = (Vector y (-x))
vecAdd :: (Num a) => Vector2 a -> Vector2 a -> Vector2 a
vecAdd (Vector x1 y1) (Vector x2 y2) = Vector (x1+x2) (y1+y2)
vecSub :: (Num a) => Vector2 a -> Vector2 a -> Vector2 a
vecSub (Vector x1 y1) (Vector x2 y2) = Vector (x1-x2) (y1-y2)
vecScale :: (Num a) => Vector2 a -> a -> Vector2 a
vecScale (Vector x y) v = Vector (v*x) (v*y)
vecBetween :: (Num a) => Point2 a -> Point2 a -> Vector2 a
vecBetween (Point x1 y1) (Point x2 y2) = Vector (x2-x1) (y2-y1)
vecLength :: Vector -> Double
vecLength (Vector x y)
= sqrt (fromIntegral (x*x + y*y))
vecLengthDouble :: Vector2 Double -> Double
vecLengthDouble (Vector x y)
= sqrt (x*x + y*y)
{-----------------------------------------------------------------------------------------
Rectangle
-----------------------------------------------------------------------------------------}
rectContains :: (Num a, Ord a) => Rect2D a -> Point2 a -> Bool
rectContains (Rect l t w h) (Point x y)
= (x >= l && x <= (l+w) && y >= t && y <= (t+h))
rectMoveTo :: (Num a) => Rect2D a -> Point2 a -> Rect2D a
rectMoveTo r p
= rect p (rectSize r)
rectFromPoint :: (Num a) => Point2 a -> Rect2D a
rectFromPoint (Point x y)
= Rect x y x y
rectCentralPoint :: Rect2D Int -> Point2 Int
rectCentralPoint (Rect l t w h)
= Point (l + div w 2) (t + div h 2)
rectCentralRect :: Rect2D Int -> Size -> Rect2D Int
rectCentralRect r@(Rect l t rw rh) (Size w h)
= let c = rectCentralPoint r
in Rect (pointX c - (w - div w 2)) (pointY c - (h - div h 2)) w h
rectCentralPointDouble :: (Fractional a) => Rect2D a -> Point2 a
rectCentralPointDouble (Rect l t w h)
= Point (l + w/2) (t + h/2)
rectCentralRectDouble :: (Fractional a) => Rect2D a -> Size2D a -> Rect2D a
rectCentralRectDouble r@(Rect l t rw rh) (Size w h)
= let c = rectCentralPointDouble r
in Rect (pointX c - (w - w/2)) (pointY c - (h - h/2)) w h
rectStretchTo :: (Num a) => Rect2D a -> Size2D a -> Rect2D a
rectStretchTo (Rect l t _ _) (Size w h)
= Rect l t w h
rectMove :: (Num a) => Rect2D a -> Vector2 a -> Rect2D a
rectMove (Rect x y w h) (Vector dx dy)
= Rect (x+dx) (y+dy) w h
rectOverlaps :: (Num a, Ord a) => Rect2D a -> Rect2D a -> Bool
rectOverlaps (Rect x1 y1 w1 h1) (Rect x2 y2 w2 h2)
= (x1+w1 >= x2 && x1 <= x2+w2) && (y1+h1 >= y2 && y1 <= y2+h2)
-- | A list with rectangles that constitute the difference between two rectangles.
rectsDiff :: (Num a, Ord a) => Rect2D a -> Rect2D a -> [Rect2D a]
rectsDiff rect1 rect2
= subtractFittingRect rect1 (rectOverlap rect1 rect2)
where
-- subtractFittingRect r1 r2 subtracts r2 from r1 assuming that r2 fits inside r1
subtractFittingRect :: (Num a, Ord a) => Rect2D a -> Rect2D a -> [Rect2D a]
subtractFittingRect r1 r2 =
filter (not . rectIsEmpty)
[ rectBetween (rectTopLeft r1) (rectTopRight r2)
, rectBetween (pt (rectLeft r1) (rectTop r2)) (rectBottomLeft r2)
, rectBetween (pt (rectLeft r1) (rectBottom r2)) (pt (rectRight r2) (rectBottom r1))
, rectBetween (rectTopRight r2) (rectBottomRight r1)
]
rectUnion :: (Num a, Ord a) => Rect2D a -> Rect2D a -> Rect2D a
rectUnion r1 r2
= rectBetween (pt (min (rectLeft r1) (rectLeft r2)) (min (rectTop r1) (rectTop r2)))
(pt (max (rectRight r1) (rectRight r2)) (max (rectBottom r1) (rectBottom r2)))
rectUnions :: (Num a, Ord a) => [Rect2D a] -> Rect2D a
rectUnions []
= rectZero
rectUnions (r:rs)
= foldr rectUnion r rs
-- | The intersection between two rectangles.
rectOverlap :: (Num a, Ord a) => Rect2D a -> Rect2D a -> Rect2D a
rectOverlap r1 r2
| rectOverlaps r1 r2 = rectBetween (pt (max (rectLeft r1) (rectLeft r2)) (max (rectTop r1) (rectTop r2)))
(pt (min (rectRight r1) (rectRight r2)) (min (rectBottom r1) (rectBottom r2)))
| otherwise = rectZero
{-----------------------------------------------------------------------------------------
Default colors.
-----------------------------------------------------------------------------------------}
black, darkgrey, dimgrey, mediumgrey, grey, lightgrey, white :: Color
red, green, blue :: Color
cyan, magenta, yellow :: Color
black = colorRGB 0x00 0x00 0x00
darkgrey = colorRGB 0x2F 0x2F 0x2F
dimgrey = colorRGB 0x54 0x54 0x54
mediumgrey= colorRGB 0x64 0x64 0x64
grey = colorRGB 0x80 0x80 0x80
lightgrey = colorRGB 0xC0 0xC0 0xC0
white = colorRGB 0xFF 0xFF 0xFF
red = colorRGB 0xFF 0x00 0x00
green = colorRGB 0x00 0xFF 0x00
blue = colorRGB 0x00 0x00 0xFF
yellow = colorRGB 0xFF 0xFF 0x00
magenta = colorRGB 0xFF 0x00 0xFF
cyan = colorRGB 0x00 0xFF 0xFF
{--------------------------------------------------------------------------
System colors
--------------------------------------------------------------------------}
-- | System Colors.
data SystemColor
= ColorScrollBar -- ^ The scrollbar grey area.
| ColorBackground -- ^ The desktop colour.
| ColorActiveCaption -- ^ Active window caption.
| ColorInactiveCaption -- ^ Inactive window caption.
| ColorMenu -- ^ Menu background.
| ColorWindow -- ^ Window background.
| ColorWindowFrame -- ^ Window frame.
| ColorMenuText -- ^ Menu text.
| ColorWindowText -- ^ Text in windows.
| ColorCaptionText -- ^ Text in caption, size box and scrollbar arrow box.
| ColorActiveBorder -- ^ Active window border.
| ColorInactiveBorder -- ^ Inactive window border.
| ColorAppWorkspace -- ^ Background colour MDI -- ^applications.
| ColorHighlight -- ^ Item(s) selected in a control.
| ColorHighlightText -- ^ Text of item(s) selected in a control.
| ColorBtnFace -- ^ Face shading on push buttons.
| ColorBtnShadow -- ^ Edge shading on push buttons.
| ColorGrayText -- ^ Greyed (disabled) text.
| ColorBtnText -- ^ Text on push buttons.
| ColorInactiveCaptionText -- ^ Colour of text in active captions.
| ColorBtnHighlight -- ^ Highlight colour for buttons (same as 3DHILIGHT).
| Color3DDkShadow -- ^ Dark shadow for three-dimensional display elements.
| Color3DLight -- ^ Light colour for three-dimensional display elements.
| ColorInfoText -- ^ Text colour for tooltip controls.
| ColorInfoBk -- ^ Background colour for tooltip controls.
| ColorDesktop -- ^ Same as BACKGROUND.
| Color3DFace -- ^ Same as BTNFACE.
| Color3DShadow -- ^ Same as BTNSHADOW.
| Color3DHighlight -- ^ Same as BTNHIGHLIGHT.
| Color3DHilight -- ^ Same as BTNHIGHLIGHT.
| ColorBtnHilight -- ^ Same as BTNHIGHLIGHT.
instance Enum SystemColor where
toEnum i
= error "Graphics.UI.WXCore.Types.SytemColor.toEnum: can not convert integers to system colors."
fromEnum systemColor
= case systemColor of
ColorScrollBar -> wxSYS_COLOUR_SCROLLBAR
ColorBackground -> wxSYS_COLOUR_BACKGROUND
ColorActiveCaption -> wxSYS_COLOUR_ACTIVECAPTION
ColorInactiveCaption -> wxSYS_COLOUR_INACTIVECAPTION
ColorMenu -> wxSYS_COLOUR_MENU
ColorWindow -> wxSYS_COLOUR_WINDOW
ColorWindowFrame -> wxSYS_COLOUR_WINDOWFRAME
ColorMenuText -> wxSYS_COLOUR_MENUTEXT
ColorWindowText -> wxSYS_COLOUR_WINDOWTEXT
ColorCaptionText -> wxSYS_COLOUR_CAPTIONTEXT
ColorActiveBorder -> wxSYS_COLOUR_ACTIVEBORDER
ColorInactiveBorder -> wxSYS_COLOUR_INACTIVEBORDER
ColorAppWorkspace -> wxSYS_COLOUR_APPWORKSPACE
ColorHighlight -> wxSYS_COLOUR_HIGHLIGHT
ColorHighlightText -> wxSYS_COLOUR_HIGHLIGHTTEXT
ColorBtnFace -> wxSYS_COLOUR_BTNFACE
ColorBtnShadow -> wxSYS_COLOUR_BTNSHADOW
ColorGrayText -> wxSYS_COLOUR_GRAYTEXT
ColorBtnText -> wxSYS_COLOUR_BTNTEXT
ColorInactiveCaptionText -> wxSYS_COLOUR_INACTIVECAPTIONTEXT
ColorBtnHighlight -> wxSYS_COLOUR_BTNHIGHLIGHT
Color3DDkShadow -> wxSYS_COLOUR_3DDKSHADOW
Color3DLight -> wxSYS_COLOUR_3DLIGHT
ColorInfoText -> wxSYS_COLOUR_INFOTEXT
ColorInfoBk -> wxSYS_COLOUR_INFOBK
ColorDesktop -> wxSYS_COLOUR_DESKTOP
Color3DFace -> wxSYS_COLOUR_3DFACE
Color3DShadow -> wxSYS_COLOUR_3DSHADOW
Color3DHighlight -> wxSYS_COLOUR_3DHIGHLIGHT
Color3DHilight -> wxSYS_COLOUR_3DHILIGHT
ColorBtnHilight -> wxSYS_COLOUR_BTNHILIGHT
-- | Convert a system color to a color.
colorSystem :: SystemColor -> Color
colorSystem systemColor
= unsafePerformIO $
wxcSystemSettingsGetColour (fromEnum systemColor)
| thielema/wxhaskell | wxcore/src/haskell/Graphics/UI/WXCore/Types.hs | lgpl-2.1 | 19,284 | 0 | 14 | 4,817 | 4,967 | 2,663 | 2,304 | 332 | 1 |
module Game where
import Linear
import Time
import Input
import Transform
data GameState a = GameState { worms :: [Worm a] }
data Worm a = Worm
type Position = V2
-- nextState :: State -> DTime -> [InputEvent] -> State
-- nextState s dt is = s -- TODO
| epeld/zatacka | old/Game.hs | apache-2.0 | 257 | 0 | 10 | 55 | 54 | 35 | 19 | 8 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Network.GRPC.HighLevel.Client
( ClientError(..)
, ClientRegisterable(..)
, ClientRequest(..)
, ClientResult(..)
, GRPCMethodType(..)
, MetadataMap(..)
, RegisteredMethod
, ServiceClient
, StatusCode(..)
, StatusDetails(..)
, StreamRecv
, StreamSend
, TimeoutSeconds
, WritesDone
, LL.Client
, LL.ClientConfig(..)
, LL.ClientSSLConfig(..)
, LL.ClientSSLKeyCertPair(..)
, LL.Host(..)
, LL.Port(..)
, clientRequest
-- * Client utility functions
, acquireClient
, simplifyServerStreaming
, simplifyUnary
)
where
import Control.Monad.Managed (Managed, liftIO,
managed)
import qualified Data.ByteString.Lazy as BL
import Network.GRPC.HighLevel.Server (convertRecv,
convertSend)
import Network.GRPC.LowLevel (GRPCIOError (..),
GRPCMethodType (..),
MetadataMap (..),
StatusCode (..),
StatusDetails (..),
StreamRecv, StreamSend)
import qualified Network.GRPC.LowLevel as LL
import Network.GRPC.LowLevel.CompletionQueue (TimeoutSeconds)
import Network.GRPC.LowLevel.Op (WritesDone)
import Proto3.Suite (Message, fromByteString,
toLazyByteString)
import Proto3.Wire.Decode (ParseError)
newtype RegisteredMethod (mt :: GRPCMethodType) request response
= RegisteredMethod (LL.RegisteredMethod mt)
deriving Show
type ServiceClient service = service ClientRequest ClientResult
data ClientError
= ClientErrorNoParse ParseError
| ClientIOError GRPCIOError
deriving (Show, Eq)
data ClientRequest (streamType :: GRPCMethodType) request response where
ClientNormalRequest :: request -> TimeoutSeconds -> MetadataMap -> ClientRequest 'Normal request response
ClientWriterRequest :: TimeoutSeconds -> MetadataMap -> (StreamSend request -> IO ()) -> ClientRequest 'ClientStreaming request response
-- | The final field will be invoked once, and it should repeatedly
-- invoke its final argument (of type @(StreamRecv response)@)
-- in order to obtain the streaming response incrementally.
ClientReaderRequest :: request -> TimeoutSeconds -> MetadataMap -> (LL.ClientCall -> MetadataMap -> StreamRecv response -> IO ()) -> ClientRequest 'ServerStreaming request response
ClientBiDiRequest :: TimeoutSeconds -> MetadataMap -> (LL.ClientCall -> MetadataMap -> StreamRecv response -> StreamSend request -> WritesDone -> IO ()) -> ClientRequest 'BiDiStreaming request response
data ClientResult (streamType :: GRPCMethodType) response where
ClientNormalResponse :: response -> MetadataMap -> MetadataMap -> StatusCode -> StatusDetails -> ClientResult 'Normal response
ClientWriterResponse :: Maybe response -> MetadataMap -> MetadataMap -> StatusCode -> StatusDetails -> ClientResult 'ClientStreaming response
ClientReaderResponse :: MetadataMap -> StatusCode -> StatusDetails -> ClientResult 'ServerStreaming response
ClientBiDiResponse :: MetadataMap -> StatusCode -> StatusDetails -> ClientResult 'BiDiStreaming response
ClientErrorResponse :: ClientError -> ClientResult streamType response
class ClientRegisterable (methodType :: GRPCMethodType) where
clientRegisterMethod :: LL.Client
-> LL.MethodName
-> IO (RegisteredMethod methodType request response)
instance ClientRegisterable 'Normal where
clientRegisterMethod client methodName =
RegisteredMethod <$> LL.clientRegisterMethodNormal client methodName
instance ClientRegisterable 'ClientStreaming where
clientRegisterMethod client methodName =
RegisteredMethod <$> LL.clientRegisterMethodClientStreaming client methodName
instance ClientRegisterable 'ServerStreaming where
clientRegisterMethod client methodName =
RegisteredMethod <$> LL.clientRegisterMethodServerStreaming client methodName
instance ClientRegisterable 'BiDiStreaming where
clientRegisterMethod client methodName =
RegisteredMethod <$> LL.clientRegisterMethodBiDiStreaming client methodName
clientRequest :: (Message request, Message response) =>
LL.Client -> RegisteredMethod streamType request response
-> ClientRequest streamType request response -> IO (ClientResult streamType response)
clientRequest client (RegisteredMethod method) (ClientNormalRequest req timeout meta) =
mkResponse <$> LL.clientRequest client method timeout (BL.toStrict (toLazyByteString req)) meta
where
mkResponse (Left ioError_) = ClientErrorResponse (ClientIOError ioError_)
mkResponse (Right rsp) =
case fromByteString (LL.rspBody rsp) of
Left err -> ClientErrorResponse (ClientErrorNoParse err)
Right parsedRsp ->
ClientNormalResponse parsedRsp (LL.initMD rsp) (LL.trailMD rsp) (LL.rspCode rsp) (LL.details rsp)
clientRequest client (RegisteredMethod method) (ClientWriterRequest timeout meta handler) =
mkResponse <$> LL.clientWriter client method timeout meta (handler . convertSend)
where
mkResponse (Left ioError_) = ClientErrorResponse (ClientIOError ioError_)
mkResponse (Right (rsp_, initMD_, trailMD_, rspCode_, details_)) =
case maybe (Right Nothing) (fmap Just . fromByteString) rsp_ of
Left err -> ClientErrorResponse (ClientErrorNoParse err)
Right parsedRsp ->
ClientWriterResponse parsedRsp initMD_ trailMD_ rspCode_ details_
clientRequest client (RegisteredMethod method) (ClientReaderRequest req timeout meta handler) =
mkResponse <$> LL.clientReader client method timeout (BL.toStrict (toLazyByteString req)) meta (\cc m recv -> handler cc m (convertRecv recv))
where
mkResponse (Left ioError_) = ClientErrorResponse (ClientIOError ioError_)
mkResponse (Right (meta_, rspCode_, details_)) =
ClientReaderResponse meta_ rspCode_ details_
clientRequest client (RegisteredMethod method) (ClientBiDiRequest timeout meta handler) =
mkResponse <$> LL.clientRW client method timeout meta (\cc _m recv send writesDone -> handler cc meta (convertRecv recv) (convertSend send) writesDone)
where
mkResponse (Left ioError_) = ClientErrorResponse (ClientIOError ioError_)
mkResponse (Right (meta_, rspCode_, details_)) =
ClientBiDiResponse meta_ rspCode_ details_
acquireClient
:: LL.ClientConfig
-- ^ The client configuration (host, port, SSL settings, etc)
-> (LL.Client -> IO (ServiceClient service))
-- ^ The client implementation (typically generated)
-> Managed (ServiceClient service)
acquireClient cfg impl = do
g <- managed LL.withGRPC
c <- managed (LL.withClient g cfg)
liftIO (impl c)
-- | A utility for simplifying server-streaming gRPC client requests; you can
-- use this to avoid 'ClientRequest' and 'ClientResult' pattern-matching
-- boilerplate at call sites.
simplifyServerStreaming :: TimeoutSeconds
-- ^ RPC call timeout, in seconds
-> MetadataMap
-- ^ RPC call metadata
-> (ClientError -> IO StatusDetails)
-- ^ Handler for client errors
-> (StatusCode -> StatusDetails -> IO StatusDetails)
-- ^ Handler for non-StatusOk response
-> (ClientRequest 'ServerStreaming request response
-> IO (ClientResult 'ServerStreaming response))
-- ^ Endpoint implementation (typically generated by grpc-haskell)
-> request
-- ^ Request payload
-> (LL.ClientCall -> MetadataMap -> StreamRecv response -> IO ())
-- ^ Stream handler; note that the 'StreamRecv'
-- action must be called repeatedly in order to
-- consume the stream
-> IO StatusDetails
simplifyServerStreaming timeout meta clientError nonStatusOkError f x handler = do
let request = ClientReaderRequest x timeout meta handler
response <- f request
case response of
ClientReaderResponse _ StatusOk details
-> pure details
ClientReaderResponse _ statusCode details
-> nonStatusOkError statusCode details
ClientErrorResponse err
-> clientError err
-- | A utility for simplifying unary gRPC client requests; you can use this to
-- avoid 'ClientRequest' and 'ClientResult' pattern-matching boilerplate at
-- call sites.
simplifyUnary :: TimeoutSeconds
-- ^ RPC call timeout, in seconds
-> MetadataMap
-- ^ RPC call metadata
-> (ClientError -> IO (response, StatusDetails))
-- ^ Handler for client errors
-> (response -> StatusCode -> StatusDetails -> IO (response, StatusDetails))
-- ^ Handler for non-StatusOK responses
-> (ClientRequest 'Normal request response -> IO (ClientResult 'Normal response))
-- ^ Endpoint implementation (typically generated by grpc-haskell)
-> (request -> IO (response, StatusDetails))
-- ^ The simplified happy-path (StatusOk) unary call action
simplifyUnary timeout meta clientError nonStatusOkError f x = do
let request = ClientNormalRequest x timeout meta
response <- f request
case response of
ClientNormalResponse y _ _ StatusOk details
-> pure (y, details)
ClientNormalResponse y _ _ code details
-> nonStatusOkError y code details
ClientErrorResponse err
-> clientError err
| awakenetworks/gRPC-haskell | src/Network/GRPC/HighLevel/Client.hs | apache-2.0 | 10,256 | 0 | 17 | 2,753 | 2,129 | 1,129 | 1,000 | 154 | 7 |
{-# LANGUAGE FlexibleContexts, TypeOperators, Rank2Types, TypeFamilies #-}
module Data.HMin where
import Data.MemoTrie
import Data.AdditiveGroup
import Data.VectorSpace
import Data.Maclaurin
import Data.Basis
import Data.LinearMap
import Control.Arrow
-- convenience type and eq operator
type Equation a b = (a -> b) -> b -> a -> b
infixl 5 ~=
(~=) :: Num b => (a -> b) -> b -> a -> b
f ~= c = \x -> f x - c
eqAnd :: Num c => (a -> c) -> (b -> c) -> (a, b) -> c
eqAnd f1 f2 (x, y) = sqr (f1 x) + sqr (f2 y)
eqAnd' :: Num c => (a -> c) -> (a -> c) -> a -> c
eqAnd' f1 f2 x = (f1 `eqAnd` f2) (dup x)
where dup v = (v, v)
sqr :: Num b => b -> b
sqr x = x*x
sqrD :: (Num b, VectorSpace b, b ~ Scalar b, b ~ (Scalar a), HasBasis a, HasTrie (Basis a)) => (a :> b) -> (a :> b)
sqrD = sqr
solve :: (Show a, Num b, Ord b, AdditiveGroup a) => (a :~> b) -> ((a :> b) -> Bool) -> ((a :> b) -> a) -> a -> a
solve eq stopfunc stepfunc start = let delta = eq start in
if stopfunc delta
then start
else solve eq stopfunc stepfunc (start ^+^ stepfunc delta)
-- TODO
-- need this
-- https://en.wikipedia.org/wiki/Pushforward_(differential)
gradStepFunc :: (HasBasis a, HasTrie (Basis a), VectorSpace b, Num b, Scalar a ~ b, Scalar b ~ b) => a -> (a :> b) -> a
gradStepFunc gammas delta = negateV $ dV gammas (sqrD delta)
-- TODO double check this please
-- the derivative in the V direction with "f," a function to get us
-- from a :> b to a...
dV :: (Scalar a ~ b, Scalar b ~ b, HasBasis a, HasTrie (Basis a), VectorSpace b) => a -> (a :> b) -> a
dV dv dfx = recompose . map (\(v, s) -> (v, (^* s) . powVal $ derivAtBasis dfx v)) $ decompose dv
equ' :: ((Double, Double) :~> Double)
equ' = sqr fstD + sqr sndD ~= 4
equ'' :: ((Double, Double) :~> Double)
equ'' = fstD + 2*sndD ~= 2
-- this seems to requre the list of basis values...
{-
instance (VectorSpace b, HasBasis a, HasTrie (Basis a), Scalar a ~ Scalar b) => HasBasis (a -> b) where
type Basis (a -> b) = (Basis a) -> b
-- TODO
-- do we really need Scalar a ~ Scalar b?
-- lapply requires it, so seems possible.
-- move to linear maps...
basisValue f = sumV . map (\(b, s) -> s *^ f b) . decompose
-- TODO
-- I think I need the list of basis vectors (possibly infinite) here...
-- Data.VectorSpace.project???
decompose f = map (first (f . basisValue))
-}
-- https://stackoverflow.com/questions/9313994/derivative-towers-and-how-to-use-the-vector-space-package-haskell
diff :: (Double :~> (Double,Double,Double) ) -> (Double :~> (Double,Double,Double))
diff g = \x -> (atBasis (derivative (g x)) ())
diff' :: Either () () -> ((Double, Double) :~> Double) -> ((Double, Double) :~> Double)
diff' b g = \(x,y) -> derivAtBasis (g (x,y)) b
eval :: (a :~> b) -> a -> b
eval g x = powVal (g x)
f :: Double :~> (Double,Double,Double)
f x = tripleD (pureD 0, pureD 1, (2*idD) x)
f' :: (Double, Double) :~> Double
f' xy = fstD xy + (sndD*2) xy
| cspollard/hminimize | src/Data/HMin.hs | apache-2.0 | 3,062 | 0 | 13 | 763 | 1,160 | 624 | 536 | 45 | 2 |
module OAuthToken
( AccessToken
, RequestToken
, OAuthToken
) where
import Prelude
import Yesod
import qualified Data.Text as T
import Database.Persist.Store
( SqlType (SqlString)
, PersistValue (PersistText)
)
class (Read a, Show a, PathPiece a, PersistField a) => OAuthToken a where
mkToken :: String -> Maybe a
getToken :: a -> T.Text
newtype AccessToken = AccessToken T.Text deriving (Eq)
newtype RequestToken = RequestToken T.Text deriving (Eq)
instance Show AccessToken where
show (AccessToken t) = T.unpack t
instance Show RequestToken where
show (RequestToken t) = T.unpack t
instance OAuthToken RequestToken where
mkToken = mkOAuthToken RequestToken "R-"
getToken (RequestToken t) = t
instance OAuthToken AccessToken where
mkToken = mkOAuthToken AccessToken "A-"
getToken (AccessToken t) = t
simpleReadsPrec :: (OAuthToken t) => Int -> ReadS t
simpleReadsPrec _ s = case mkToken s of
Just tok -> [(tok, "")]
Nothing -> []
instance Read AccessToken where
readsPrec = simpleReadsPrec
instance Read RequestToken where
readsPrec = simpleReadsPrec
mkOAuthToken :: (OAuthToken a) => (T.Text -> a) -> String -> String -> Maybe a
mkOAuthToken constructor pre text = if correctLength && validChars text && prefixMatches
then Just $ constructor $ T.pack text
else Nothing
where
length_without_prefix = 16
correctLength = length text == length_without_prefix + length pre
validChars = foldr ((&&) . base64Char) True
prefixMatches = take (length pre) text == pre
base64Char x = or [ x `elem` ['A' .. 'Z']
, x `elem` ['a' .. 'z']
, x `elem` ['0' .. '9']
, x `elem` "+/="
, x `elem` pre]
generalFromPathPiece :: OAuthToken a => T.Text -> Maybe a
generalFromPathPiece s =
case reads $ T.unpack s of
[(a, "")] -> Just a
_ -> Nothing
instance PathPiece RequestToken where
fromPathPiece = generalFromPathPiece
toPathPiece = T.pack . show
instance PathPiece AccessToken where
fromPathPiece = generalFromPathPiece
toPathPiece = T.pack . show
instance PersistField RequestToken where
sqlType _ = SqlString
toPersistValue = PersistText . getToken
fromPersistValue (PersistText val)
= case mkToken $ T.unpack val of
Just tok -> Right tok
Nothing -> Left "no token"
fromPersistValue _ = Left "unsupported value"
instance PersistField AccessToken where
sqlType _ = SqlString
toPersistValue = PersistText . getToken
fromPersistValue (PersistText val)
= case mkToken $ T.unpack val of
Just tok -> Right tok
Nothing -> Left "no token"
fromPersistValue _ = Left "unsupported value"
| JanAhrens/yesod-oauth-demo | OAuthToken.hs | bsd-2-clause | 2,945 | 0 | 10 | 858 | 855 | 452 | 403 | 73 | 2 |
-- Usage: xmacrorec2 | ConvertClicks > clicks.txt
module Main (main) where
import Control.Monad (void)
import Data.Char (isDigit)
import Data.List (isPrefixOf)
import System.Environment (getArgs)
main :: IO ()
main = do
args <- getArgs
let printFunc = toClick $ if "--haskell" `elem` args
then printTuple
else printCommand
void $ mainLoop printFunc ("", "")
where xOffset = id
yOffset = flip (-) 38
printTuple x y = print (xOffset x, yOffset y)
printCommand x y = putStrLn $ "click " ++ show (xOffset x) ++ " "
++ show (yOffset y)
mainLoop :: PrintFunc -> (String, String) -> IO (String, String)
mainLoop prt window = getLine >>= updateWindow >>= prt >>= mainLoop prt
where move (_, a) b = (a, b)
updateWindow = return . move window
-- |Output click with coordinates if applicable.
-- >>> toClick (\x y -> print (x, y)) ("MotionNotify 1500 550","ButtonPress 1")
-- (1500,550)
-- ("MotionNotify 1500 550","ButtonPress 1")
-- >>> toClick (\x y -> print (x, y)) ("ButtonPress 1","MotionNotify 1500 550")
-- ("ButtonPress 1","MotionNotify 1500 550")
toClick :: (Int -> Int -> IO ()) -> (String, String) -> IO (String, String)
toClick prt window@(a, b)
| isClick b = print' (toXY a) >> return window
| otherwise = return window
where isClick = (==) "ButtonPress 1"
print' Nothing = return ()
print' (Just (x, y)) = prt x y
-- |Return the XY coordinates from the string.
-- >>> toXY "MotionNotify 123 456"
-- Just (123,456)
-- >>> toXY "Bla 123 456"
-- Nothing
toXY :: String -> Maybe (Int, Int)
toXY a
| "MotionNotify " `isPrefixOf` a = Just (x, y)
| otherwise = Nothing
where xy = dropWhile (not . isDigit) a
x = read $ takeWhile isDigit xy
y = read $ dropWhile isDigit xy
type PrintFunc = (String, String) -> IO (String, String)
| KaiHa/GuiTest | src/ConvertClicks.hs | bsd-3-clause | 1,984 | 0 | 12 | 559 | 583 | 313 | 270 | 36 | 2 |
-- | Simple prettified log
module Emulator.Log
( prettify
, state
, core
, registers
, ram
) where
import Control.Applicative ((<$>))
import Control.Monad (forM)
import Data.List (intercalate)
import Emulator
import Emulator.Monad
import Instruction
import Memory (Address (..))
import Util
prettify :: MonadEmulator m => m String
prettify = unlines <$> sequence
[core , registers , return "" , return "RAM:" , return "" , ram]
state :: MonadEmulator m => Instruction Operand -> Instruction Value -> m String
state instr instr' = unlines <$> sequence
[ return $ "Execute: " ++ show instr ++ " -> " ++ show instr'
, core
, registers
]
core :: MonadEmulator m => m String
core = do
pc <- load Pc
sp <- load Sp
o <- load O
cycles <- load Cycles
return $ intercalate ", " $
[ "PC: " ++ prettifyWord16 pc
, "SP: " ++ prettifyWord16 sp
, "O: " ++ prettifyWord16 o
, "CYCLES: " ++ prettifyWord16 cycles
]
registers :: MonadEmulator m => m String
registers = do
rs <- forM [minBound .. maxBound] $ \name -> do
val <- load (Register name)
return (name, val)
return $ intercalate ", " $
[show name ++ ": " ++ prettifyWord16 val | (name, val) <- rs]
ram :: MonadEmulator m => m String
ram = unlines <$> mapM line [(x * 8, x * 8 + 7) | x <- [0 .. 0xffff `div` 8]]
where
line (lo, up) = do
vs <- mapM (load . Ram) [lo .. up]
return $ prettifyWord16 lo ++ ": " ++ unwords (map prettifyWord16 vs)
| jaspervdj/dcpu16-hs | src/Emulator/Log.hs | bsd-3-clause | 1,573 | 0 | 15 | 457 | 585 | 301 | 284 | 45 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Cloud.AWS.EC2.Types.RouteTable
( PropagatingVgw
, Route(..)
, RouteOrigin(..)
, RouteState(..)
, RouteTable(..)
, RouteTableAssociation(..)
) where
import Cloud.AWS.EC2.Types.Common (ResourceTag)
import Cloud.AWS.Lib.FromText (deriveFromText)
import Data.Text (Text)
type PropagatingVgw = Text
data Route = Route
{ routeDestinationCidrBlock :: Text
, routeGatewayId :: Maybe Text
, routeInstanceId :: Maybe Text
, routeInstanceOwnerId :: Maybe Text
, routeNetworkInterfaceId :: Maybe Text
, routeState :: RouteState
, routeOrigin :: Maybe RouteOrigin
}
deriving (Show, Read, Eq)
data RouteOrigin
= RouteOriginCreateRouteTable
| RouteOriginCreateRoute
| RouteOriginTableEnableVgwRoutePropagation
deriving (Show, Read, Eq)
data RouteState
= RouteStateActive
| RouteStateBlackhole
deriving (Show, Read, Eq)
data RouteTable = RouteTable
{ routeTableId :: Text
, routeTableVpcId :: Text
, routeTableRouteSet :: [Route]
, routeTableAssociationSet :: [RouteTableAssociation]
, routeTablePropagatingVgw :: Maybe PropagatingVgw
, routeTableTagSet :: [ResourceTag]
}
deriving (Show, Read, Eq)
data RouteTableAssociation = RouteTableAssociation
{ routeTableAssociationId :: Text
, routeTableAssociationRouteTableId :: Text
, routeTableAssociationSubnetId :: Maybe Text
, routeTableAssociationMain :: Maybe Bool
}
deriving (Show, Read, Eq)
deriveFromText "RouteOrigin"
[ "CreateRouteTable"
, "CreateRoute"
, "EnableVgwRoutePropagation"
]
deriveFromText "RouteState" ["active", "blackhole"]
| worksap-ate/aws-sdk | Cloud/AWS/EC2/Types/RouteTable.hs | bsd-3-clause | 1,682 | 0 | 9 | 335 | 382 | 230 | 152 | 49 | 0 |
{-# OPTIONS -fglasgow-exts #-}
----------------------------------------------------------------------
-- |
-- Module : Interface.TV.Misc
-- Copyright : (c) Conal Elliott 2006
-- License : LGPL
--
-- Maintainer : conal@conal.net
-- Stability : experimental
-- Portability : portable
--
-- Miscellaneous helpers
----------------------------------------------------------------------
module Interface.TV.Misc
(
-- readD {-, Cofunctor(..), ToIO(..), wrapF -}
) where
-- | Read with default value. If the input doesn't parse as a value of
-- the expected type, or it's ambiguous, yield the default value.
readD :: Read a => a -> String -> a
readD dflt str | [(a,"")] <- reads str = a
| otherwise = dflt
-- Cofunctor is in TypeCompose
-- -- | Often useful for \"acceptors\" of values.
-- class Cofunctor acceptor where
-- cofmap :: (a -> b) -> (acceptor b -> acceptor a)
-- -- | Arrows that convert to IO actions.
-- class Arrow (~>) => ToIO (~>) where
-- -- Result type is restricted to () to allow arr types that yield more
-- -- (or fewer) than one value.
-- toIO :: () ~> () -> IO ()
-- | Handy wrapping pattern. For instance, @wrapF show read@ turns a
-- string function into value function.
-- wrapF :: (c->d) -> (a->b) -> ((b->c) -> (a->d))
-- wrapF after before f = after . f . before
| conal/TV | src/Interface/TV/Misc.hs | bsd-3-clause | 1,361 | 0 | 10 | 294 | 100 | 66 | 34 | 6 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
module Main where
import Control.Monad.Except
import Data.Foldable
import System.Environment
import System.IO
import Language.Scheme.Pretty
import Language.Scheme.Reader
data FormatError = FormatError deriving (Eq, Show)
type FormatM = ExceptT FormatError IO
-- FIXME: Don't ignore blank lines
formatString :: String -> FormatM String
formatString s = do
case readExprList s of
Left _ -> throwError FormatError
Right exprs -> pure $ prettyPrint exprs
-- FIXME: Add IO exception handling
formatFile :: FilePath -> IO ()
formatFile path = do
contents <- readFile path
res <- runExceptT (formatString contents)
case res of
Left e -> print e
Right newContents -> writeFile path newContents
usage :: String
usage = "Usage: schemefmt [path ...]"
main :: IO ()
main = do
args <- getArgs
if null args
then putStrLn usage
else traverse_ formatFile args
| alldne/scheme | schemefmt/Main.hs | bsd-3-clause | 923 | 0 | 11 | 179 | 262 | 132 | 130 | 30 | 2 |
-- -- | Values with a @public@ boolean accessor.
module Data.Geo.OSM.Lens.PublicL where
import Control.Lens.Lens
class PublicL a where
publicL ::
Lens' a Bool
| tonymorris/geo-osm | src/Data/Geo/OSM/Lens/PublicL.hs | bsd-3-clause | 168 | 0 | 7 | 32 | 35 | 21 | 14 | 5 | 0 |
module Parser (
dimacs,
CNF, Clause, Lit, Var,
) where
import Control.Applicative
import Control.Monad
import Text.Trifecta
type CNF = [Clause]
type Clause = [Lit]
type Lit = Int
type Var = Int
dimacs :: Parser CNF
dimacs = do
skipMany $ char 'c' >> manyTill anyChar newline
(_nvar, nclause) <- (,) <$ symbol "p" <* symbol "cnf" <*> integral <*> integral
replicateM nclause $ manyTill integral $ (try $ integral >>= guard . (==0))
where
integral = fmap fromIntegral (spaces >> integer)
| tanakh/necosat | src/Parser.hs | bsd-3-clause | 537 | 0 | 12 | 133 | 192 | 106 | 86 | 16 | 1 |
{-# LANGUAGE ViewPatterns, TupleSections, RecordWildCards, ScopedTypeVariables, PatternGuards, DeriveDataTypeable #-}
module Output.Items(writeItems, lookupItem) where
import Language.Haskell.Exts
import System.IO.Extra
import Data.List.Extra
import System.FilePath
import Control.Monad.Extra
import Data.Maybe
import Data.IORef
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import qualified Data.ByteString.Lazy.UTF8 as UTF8
import Input.Type
import General.Util
import General.Store
outputItem :: (Id, ItemEx) -> [String]
outputItem (i, ItemEx{..}) =
[show i ++ " " ++ showItem itemItem
,if null itemURL then "." else itemURL
,maybe "." (joinPair " ") itemPackage
,maybe "." (joinPair " ") itemModule] ++
replace [""] ["."] (lines itemDocs)
inputItem :: [String] -> (Id, ItemEx)
inputItem ((word1 -> (i,name)):url:pkg:modu:docs) = (,) (read i) $ ItemEx
(fromMaybe (error $ "Failed to reparse: " ++ name) $ readItem name)
(if url == "." then "" else url)
(f pkg) (f modu) (unlines docs)
where
f "." = Nothing
f x = Just (word1 x)
data Items = Items deriving Typeable
-- write all the URLs, docs and enough info to pretty print it to a result
-- and replace each with an identifier (index in the space) - big reduction in memory
writeItems :: StoreOut -> FilePath -> [Either String ItemEx] -> IO [(Maybe Id, Item)]
writeItems store file xs = do
warns <- newIORef 0
pos <- newIORef 0
res <- writeStoreType store Items $ writeStoreParts store $ do
withBinaryFile (file <.> "warn") WriteMode $ \herr -> do
hSetEncoding herr utf8
flip mapMaybeM xs $ \x -> case x of
Right item@ItemEx{..} | f itemItem -> do
i <- readIORef pos
let bs = BS.concat $ LBS.toChunks $ UTF8.fromString $ unlines $ outputItem (Id i, item)
writeStoreBS store $ intToBS $ BS.length bs
writeStoreBS store bs
writeIORef pos $ i + fromIntegral (intSize + BS.length bs)
return $ Just (Just $ Id i, itemItem)
Right ItemEx{..} -> return $ Just (Nothing, itemItem)
Left err -> do modifyIORef warns (+1); hPutStrLn herr err; return Nothing
warns <- readIORef warns
unless (warns == 0) $
putStrLn $ "Failed to parse " ++ show warns ++ " definitions, see " ++ file <.> "warn"
return res
where
f :: Item -> Bool
f (IDecl i@InstDecl{}) = False
f x = True
lookupItem :: StoreIn -> IO (Id -> IO ItemEx)
lookupItem store = do
let x = readStoreBS $ readStoreType Items store
return $ \(Id i) -> do
let i2 = fromIntegral i
let n = intFromBS $ BS.take intSize $ BS.drop i2 x
return $ snd $ inputItem $ lines $ UTF8.toString $ LBS.fromChunks $ return $ BS.take n $ BS.drop (i2 + intSize) x
| ndmitchell/hogle-dead | src/Output/Items.hs | bsd-3-clause | 2,938 | 0 | 30 | 778 | 1,035 | 526 | 509 | 61 | 4 |
-- | This module exports functionality for generating a call graph of
-- an Futhark program.
module Futhark.Analysis.CallGraph
( CallGraph
, buildCallGraph
, FunctionTable
, buildFunctionTable
)
where
import Control.Monad.Reader
import qualified Data.HashMap.Lazy as HM
import Futhark.Representation.SOACS
type FunctionTable = HM.HashMap Name FunDef
buildFunctionTable :: Prog -> FunctionTable
buildFunctionTable =
foldl expand HM.empty . progFunctions
where expand ftab f =
HM.insert (funDefName f) f ftab
-- | The symbol table for functions
data CGEnv = CGEnv { envFtable :: FunctionTable }
type CGM = Reader CGEnv
-- | Building the call grah runs in this monad. There is no
-- mutable state.
runCGM :: CGM a -> CGEnv -> a
runCGM = runReader
-- | The call graph is just a mapping from a function name, i.e., the
-- caller, to a list of the names of functions called by the function.
-- The order of this list is not significant.
type CallGraph = HM.HashMap Name [Name]
-- | @buildCallGraph prog@ build the program's Call Graph. The representation
-- is a hashtable that maps function names to a list of callee names.
buildCallGraph :: Prog -> CallGraph
buildCallGraph prog = do
let ftable = buildFunctionTable prog
runCGM (foldM buildCGfun HM.empty entry_points) $ CGEnv ftable
where entry_points = map funDefName $ filter funDefEntryPoint $ progFunctions prog
-- | @buildCallGraph cg f@ updates Call Graph @cg@ with the contributions of function
-- @fname@, and recursively, with the contributions of the callees of @fname@.
buildCGfun :: CallGraph -> Name -> CGM CallGraph
buildCGfun cg fname = do
bnd <- asks $ HM.lookup fname . envFtable
case bnd of
Nothing -> return cg -- Must be builtin or similar.
Just f ->
case HM.lookup fname cg of
Just _ -> return cg
Nothing -> do let callees = buildCGbody [] $ funDefBody f
let cg' = HM.insert fname callees cg
-- recursively build the callees
foldM buildCGfun cg' callees
buildCGbody :: [Name] -> Body -> [Name]
buildCGbody callees = foldl (\x -> buildCGexp x . bindingExp) callees . bodyBindings
buildCGexp :: [Name] -> Exp -> [Name]
buildCGexp callees (Apply fname _ _)
| fname `elem` callees = callees
| otherwise = fname:callees
buildCGexp callees (Op op) =
case op of Map _ _ lam _ ->
buildCGbody callees $ lambdaBody lam
Reduce _ _ _ lam _ ->
buildCGbody callees $ lambdaBody lam
Scan _ _ lam _ ->
buildCGbody callees $ lambdaBody lam
Redomap _ _ _ lam0 lam1 _ _ ->
buildCGbody (buildCGbody callees $ lambdaBody lam0) (lambdaBody lam1)
Scanomap _ _ lam0 lam1 _ _ ->
buildCGbody (buildCGbody callees $ lambdaBody lam0) (lambdaBody lam1)
Stream _ _ (RedLike _ _ lam0 _) lam _ ->
buildCGbody (buildCGbody callees $ lambdaBody lam0) (extLambdaBody lam)
Stream _ _ _ lam _ ->
buildCGbody callees (extLambdaBody lam)
Write {} ->
callees
buildCGexp callees e =
foldExp folder callees e
where folder =
identityFolder { foldOnBody = \x body -> return $ buildCGbody x body
}
| mrakgr/futhark | src/Futhark/Analysis/CallGraph.hs | bsd-3-clause | 3,338 | 0 | 20 | 908 | 848 | 426 | 422 | 62 | 8 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Rank2Types, GeneralizedNewtypeDeriving, TemplateHaskell, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, UndecidableInstances #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.AD.Internal.Types
-- Copyright : (c) Edward Kmett 2010
-- License : BSD3
-- Maintainer : ekmett@gmail.com
-- Stability : experimental
-- Portability : GHC only
--
-----------------------------------------------------------------------------
module Numeric.AD.Internal.Types
( AD(..)
) where
#ifndef MIN_VERSION_base
#define MIN_VERSION_base (x,y,z) 1
#endif
import Data.Data (Data(..), mkDataType, DataType, mkConstr, Constr, constrIndex, Fixity(..))
#if MIN_VERSION_base(4,4,0)
import Data.Typeable (Typeable1(..), Typeable(..), TyCon, mkTyCon3, mkTyConApp, gcast1)
#else
import Data.Typeable (Typeable1(..), Typeable(..), TyCon, mkTyCon, mkTyConApp, gcast1)
#endif
import Language.Haskell.TH
import Numeric.AD.Internal.Classes
-- | 'AD' serves as a common wrapper for different 'Mode' instances, exposing a traditional
-- numerical tower. Universal quantification is used to limit the actions in user code to
-- machinery that will return the same answers under all AD modes, allowing us to use modes
-- interchangeably as both the type level \"brand\" and dictionary, providing a common API.
newtype AD f a = AD { runAD :: f a } deriving (Iso (f a), Lifted, Mode, Primal)
-- > instance (Lifted f, Num a) => Num (AD f a)
-- etc.
let f = varT (mkName "f") in
deriveNumeric
(classP ''Lifted [f]:)
(conT ''AD `appT` f)
instance Typeable1 f => Typeable1 (AD f) where
typeOf1 tfa = mkTyConApp adTyCon [typeOf1 (undefined `asArgsType` tfa)]
where asArgsType :: f a -> t f a -> f a
asArgsType = const
adTyCon :: TyCon
#if MIN_VERSION_base(4,4,0)
adTyCon = mkTyCon3 "ad" "Numeric.AD.Internal.Types" "AD"
#else
adTyCon = mkTyCon "Numeric.AD.Internal.Types.AD"
#endif
{-# NOINLINE adTyCon #-}
adConstr :: Constr
adConstr = mkConstr adDataType "AD" [] Prefix
{-# NOINLINE adConstr #-}
adDataType :: DataType
adDataType = mkDataType "Numeric.AD.Internal.Types.AD" [adConstr]
{-# NOINLINE adDataType #-}
instance (Typeable1 f, Typeable a, Data (f a), Data a) => Data (AD f a) where
gfoldl f z (AD a) = z AD `f` a
toConstr _ = adConstr
gunfold k z c = case constrIndex c of
1 -> k (z AD)
_ -> error "gunfold"
dataTypeOf _ = adDataType
dataCast1 f = gcast1 f
| yairchu/ad | src/Numeric/AD/Internal/Types.hs | bsd-3-clause | 2,573 | 0 | 11 | 461 | 513 | 294 | 219 | -1 | -1 |
module Serv.Server
( bootstrap
) where
import Control.Concurrent.Async
import Serv.Server.Core.Runtime
import Serv.Server.Features.Runtime
import Serv.Server.ServerEnv
bootstrap :: IO ()
bootstrap = do
serverEnv <- setupServerEnv
concurrently_ (runCore serverEnv) (runFeatures serverEnv)
| orangefiredragon/bear | src/Serv/Server.hs | bsd-3-clause | 353 | 0 | 9 | 94 | 79 | 45 | 34 | 10 | 1 |
module Text.Liquid.Generators where
import Control.Monad (join)
import Data.List.NonEmpty
import Data.Monoid
import Data.Scientific
import Data.Text
import Prelude hiding (null)
import Test.QuickCheck
import Text.Liquid.Types
-- | Any allowed char in a variable == a-z, _-
newtype VariableChars
= VariableChars { varChars :: Text }
deriving (Eq, Show)
instance Arbitrary VariableChars where
arbitrary = VariableChars . pack <$>
(listOf1 $ elements (['a'..'z'] <>
['A'..'Z'] <>
['_','-']))
-- | Any alpha char
newtype AlphaChars
= AlphaChars { alphaChars :: Text }
deriving (Eq, Show)
instance Arbitrary AlphaChars where
arbitrary = AlphaChars . pack <$>
(listOf1 $ elements (['a'..'z'] <> ['A'..'Z']))
-- | Any allowed char type
newtype AnyChar
= AnyChar { anyChars :: Text }
deriving (Eq, Show)
instance Arbitrary AnyChar where
arbitrary = AnyChar . pack <$>
arbitrary
-- | Test helper for scientific values
sc :: Double -> Scientific
sc d = fromFloatDigits d
genJsonAddress :: Gen (JsonVarPath)
genJsonAddress = do
h <- hd
b <- bd
return $ fromList (h:b)
where hd = ObjectIndex . varChars <$> arbitrary
bd = resize 3 $ listOf $
oneof [ ObjectIndex . varChars <$> arbitrary
, ArrayIndex <$> suchThat arbitrary ((<) 0)
]
genRawText :: Gen Expr
genRawText = RawText . alphaChars <$> suchThat arbitrary (not . null . alphaChars)
genNum :: Gen Expr
genNum = Num . sc <$> arbitrary
genVariable :: Gen Expr
genVariable = Variable <$> genJsonAddress
genQuoteString :: Gen Expr
genQuoteString = QuoteString . alphaChars <$> arbitrary
genCompare :: Gen Expr
genCompare =
elements [ Equal
, NotEqual
, GtEqual
, LtEqual
, Gt
, Lt
] <*> anyVal <*> anyVal
where anyVal = oneof [ genNum
, genVariable
, genQuoteString
, pure Null
, pure Nil
, pure Trueth
, pure Falseth
]
genBooleanLogic :: Gen Expr
genBooleanLogic = oneof [ cp, or, ad, cn, trth ]
where cp = genCompare
or = Or <$> genCompare <*> genCompare
ad = And <$> genCompare <*> genCompare
cn = Contains <$> genVariable <*> oneof [ genNum, genQuoteString ]
trth = oneof [ Truthy <$> genNum
, Truthy <$> genQuoteString
, Truthy <$> genVariable
, pure Trueth
, pure Falseth
, pure Nil
, pure Null
]
genIfClause :: Gen Expr
genIfClause = IfClause <$> genBooleanLogic
genIfKeyClause :: Gen Expr
genIfKeyClause = IfKeyClause <$> genVariable
genElsIfClause :: Gen Expr
genElsIfClause = ElsIfClause <$> genBooleanLogic
genElse :: Gen Expr
genElse = pure Else
genFilterCell :: Gen Expr
genFilterCell = oneof [
pure $ FilterCell "toUpper" []
, pure $ FilterCell "toLower" []
, pure $ FilterCell "toTitle" []
, FilterCell "replace" <$> sequence [ genQuoteString, genQuoteString ]
, pure $ FilterCell "first" []
, pure $ FilterCell "last" []
, FilterCell "firstOrDefault" <$> oneof [ pure <$> genQuoteString, pure <$> genNum ]
, FilterCell "lastOrDefault" <$> oneof [ pure <$> genQuoteString, pure <$> genNum ]
, FilterCell "renderWithSeparator" <$> sequence [ genQuoteString ]
, FilterCell "toSentenceWithSeparator" <$> sequence [ genQuoteString, genQuoteString ]
, pure $ FilterCell "countElements" []
]
genFilter :: Gen Expr
genFilter = Filter <$>
oneof [ genQuoteString, genVariable ] <*>
resize 2 (listOf1 genFilterCell)
genOutput :: Gen Expr
genOutput = Output <$>
oneof [ genFilter
, genVariable
, genQuoteString
]
genTrueStatements :: Gen Expr
genTrueStatements = TrueStatements <$> arrangements
where arrangements = oneof [
sequence [ genRawText ]
, sequence [ genOutput ]
, sequence [ genRawText, genOutput ]
, sequence [ genOutput, genRawText ]
, sequence [ genRawText, genOutput, genRawText ]
, sequence [ genOutput, genRawText, genOutput ]
]
genIfLogic :: Gen Expr
genIfLogic = oneof [ styleA
, styleB
, styleC
, styleD
]
where styleA = IfLogic <$> oneof[genIfClause, genIfKeyClause] <*> genTrueStatements
styleB = IfLogic <$> (IfLogic <$> oneof[genIfClause, genIfKeyClause] <*> genTrueStatements)
<*> (IfLogic <$> genElse <*> genTrueStatements)
styleC = IfLogic <$> (IfLogic <$> oneof[genIfClause, genIfKeyClause] <*> genTrueStatements)
<*> (IfLogic <$> genElsIfClause <*> genTrueStatements)
styleD = IfLogic <$> (IfLogic <$> oneof[genIfClause, genIfKeyClause] <*> genTrueStatements)
<*> (IfLogic <$> (IfLogic <$> genElsIfClause <*> genTrueStatements)
<*> (IfLogic <$> genElse <*> genTrueStatements))
genCaseLogic :: Gen Expr
genCaseLogic = CaseLogic <$>
genVariable <*>
((resize 2 $ listOf1 tup) >>=
\l -> ((<>) l <$> oneof [ pure <$> ((,) <$> genElse <*> genTrueStatements)
, pure []
]))
where tup = (,) <$> oneof [genQuoteString, genNum] <*> genTrueStatements
genExpr :: Gen Expr
genExpr = oneof [ genRawText
, genIfLogic
, genOutput
, genCaseLogic
]
genTemplateExpr :: Gen [Expr]
genTemplateExpr =
concatAdj <$> listOf1 genExpr
-- | Concatenate adjacent RawText expressions in a list
-- This isn't a valid outcome from parsing and as such is illegal
concatAdj :: [Expr]
-> [Expr]
concatAdj [] = []
concatAdj (x:[]) = [x]
concatAdj ((RawText x):(RawText y):[]) = [RawText $ x <> y]
concatAdj (x:y:[]) = [x, y]
concatAdj ((RawText x):(RawText y):xs) = concatAdj ((RawText $ x <> y):xs)
concatAdj (x:y:xs) = x:concatAdj (y:xs)
| projectorhq/haskell-liquid | test/Text/Liquid/Generators.hs | bsd-3-clause | 6,564 | 0 | 17 | 2,274 | 1,736 | 943 | 793 | 152 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Control.Exception.Enclosed.Either
( eTry
, eExIO
, eIoTry
, eIOExIO
, eTextTry
, eTxIO
, eIOExTxIO
) where
import Data.Text as T
import Data.Maybe
import Data.Monoid
import Data.Either.Combinators
import Control.DeepSeq
import Control.Monad.Trans.Control
import Control.Exception.Enclosed
import Control.Exception.Lifted
import Control.Monad.Trans.Either
fromIOException' :: SomeException -> IOException
fromIOException' e
= fromMaybe (throw . AssertionFailed $ "Not an IOException:" <> show e)
$ fromException e
-- | Runs provided @IO@ action, captures synchronous exceptions as @Left@ values,
-- re-throws asynchronous exceptions.
--
-- /Note:/ value @a@ if fully evaluated, and as such it should be a member of the
-- @NFData@ typeclass
eTry , eExIO :: (MonadBaseControl IO (EitherT e IO), NFData a)
=> IO a -> EitherT SomeException IO a
eTry = EitherT . tryAnyDeep
eExIO = EitherT . tryAnyDeep
-- | Runs provided @IO@ action, captures synchronous @IOException@ as @Left@
-- values, re-throws asynchronous exceptions (and synchronous non-IOExceptions).
--
-- /note:/ value @a@ if fully evaluated, and as such it should be a member of the
-- @nfdata@ typeclass
eIoTry, eIOExIO :: (MonadBaseControl IO (EitherT e IO), NFData a)
=> IO a -> EitherT IOException IO a
eIoTry = EitherT . fmap (mapLeft fromIOException') . tryAnyDeep
eIOExIO = EitherT . fmap (mapLeft fromIOException') . tryAnyDeep
-- | Runs provided @IO@ action, captures synchronous @IOException@ as left @Text@
-- values, re-throws asynchronous exceptions (and synchronous non-IOExceptions).
--
-- /note:/ value @a@ if fully evaluated, and as such it should be a member of the
-- @nfdata@ typeclass
eTextTry, eTxIO, eIOExTxIO
:: (MonadBaseControl IO (EitherT e IO), NFData a)
=> IO a -> EitherT Text IO a
eTextTry = bimapEitherT (T.pack . show) id . eIOExIO
eTxIO = eTextTry
eIOExTxIO= eTextTry
| jcristovao/enclosed-exceptions-either | src/Control/Exception/Enclosed/Either.hs | bsd-3-clause | 2,114 | 0 | 10 | 387 | 384 | 222 | 162 | 39 | 1 |
module T271 where
import Data.Kind (Type)
import Data.Singletons.Base.TH
$(singletons
[d| newtype Constant (a :: Type) (b :: Type) =
Constant a deriving (Eq, Ord)
data Identity :: Type -> Type where
Identity :: a -> Identity a
deriving (Eq, Ord)
|])
| goldfirere/singletons | singletons-base/tests/compile-and-dump/Singletons/T271.hs | bsd-3-clause | 301 | 0 | 7 | 92 | 35 | 22 | 13 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -Wno-unused-top-binds #-}
module Nix.String
( NixString
, principledGetContext
, principledMakeNixString
, principledMempty
, StringContext(..)
, ContextFlavor(..)
, NixLikeContext(..)
, NixLikeContextValue(..)
, toNixLikeContext
, fromNixLikeContext
, stringHasContext
, principledIntercalateNixString
, hackyGetStringNoContext
, principledGetStringNoContext
, principledStringIgnoreContext
, hackyStringIgnoreContext
, hackyMakeNixStringWithoutContext
, principledMakeNixStringWithoutContext
, principledMakeNixStringWithSingletonContext
, principledModifyNixContents
, principledStringMappend
, principledStringMempty
, principledStringMConcat
, WithStringContext
, WithStringContextT(..)
, extractNixString
, addStringContext
, addSingletonStringContext
, runWithStringContextT
, runWithStringContext
)
where
import Control.Monad.Writer
import Data.Functor.Identity
import qualified Data.HashMap.Lazy as M
import qualified Data.HashSet as S
import Data.Hashable
import Data.Text ( Text )
import qualified Data.Text as Text
import GHC.Generics
-- {-# WARNING hackyGetStringNoContext, hackyStringIgnoreContext, hackyMakeNixStringWithoutContext "This NixString function needs to be replaced" #-}
-- | A 'ContextFlavor' describes the sum of possible derivations for string contexts
data ContextFlavor =
DirectPath
| AllOutputs
| DerivationOutput !Text
deriving (Show, Eq, Ord, Generic)
instance Hashable ContextFlavor
-- | A 'StringContext' ...
data StringContext =
StringContext { scPath :: !Text
, scFlavor :: !ContextFlavor
} deriving (Eq, Ord, Show, Generic)
instance Hashable StringContext
data NixString = NixString
{ nsContents :: !Text
, nsContext :: !(S.HashSet StringContext)
} deriving (Eq, Ord, Show, Generic)
instance Hashable NixString
newtype NixLikeContext = NixLikeContext
{ getNixLikeContext :: M.HashMap Text NixLikeContextValue
} deriving (Eq, Ord, Show, Generic)
data NixLikeContextValue = NixLikeContextValue
{ nlcvPath :: !Bool
, nlcvAllOutputs :: !Bool
, nlcvOutputs :: ![Text]
} deriving (Show, Eq, Ord, Generic)
instance Semigroup NixLikeContextValue where
a <> b = NixLikeContextValue
{ nlcvPath = nlcvPath a || nlcvPath b
, nlcvAllOutputs = nlcvAllOutputs a || nlcvAllOutputs b
, nlcvOutputs = nlcvOutputs a <> nlcvOutputs b
}
instance Monoid NixLikeContextValue where
mempty = NixLikeContextValue False False []
toStringContexts :: (Text, NixLikeContextValue) -> [StringContext]
toStringContexts (path, nlcv) = case nlcv of
NixLikeContextValue True _ _ -> StringContext path DirectPath
: toStringContexts (path, nlcv { nlcvPath = False })
NixLikeContextValue _ True _ -> StringContext path AllOutputs
: toStringContexts (path, nlcv { nlcvAllOutputs = False })
NixLikeContextValue _ _ ls | not (null ls) ->
map (StringContext path . DerivationOutput) ls
_ -> []
toNixLikeContextValue :: StringContext -> (Text, NixLikeContextValue)
toNixLikeContextValue sc = (,) (scPath sc) $ case scFlavor sc of
DirectPath -> NixLikeContextValue True False []
AllOutputs -> NixLikeContextValue False True []
DerivationOutput t -> NixLikeContextValue False False [t]
toNixLikeContext :: S.HashSet StringContext -> NixLikeContext
toNixLikeContext stringContext = NixLikeContext
$ S.foldr go mempty stringContext
where
go sc hm =
let (t, nlcv) = toNixLikeContextValue sc in M.insertWith (<>) t nlcv hm
fromNixLikeContext :: NixLikeContext -> S.HashSet StringContext
fromNixLikeContext =
S.fromList . join . map toStringContexts . M.toList . getNixLikeContext
principledGetContext :: NixString -> S.HashSet StringContext
principledGetContext = nsContext
-- | Combine two NixStrings using mappend
principledMempty :: NixString
principledMempty = NixString "" mempty
-- | Combine two NixStrings using mappend
principledStringMappend :: NixString -> NixString -> NixString
principledStringMappend (NixString s1 t1) (NixString s2 t2) =
NixString (s1 <> s2) (t1 <> t2)
-- | Combine two NixStrings using mappend
hackyStringMappend :: NixString -> NixString -> NixString
hackyStringMappend (NixString s1 t1) (NixString s2 t2) =
NixString (s1 <> s2) (t1 <> t2)
-- | Combine NixStrings with a separator
principledIntercalateNixString :: NixString -> [NixString] -> NixString
principledIntercalateNixString _ [] = principledMempty
principledIntercalateNixString _ [ns] = ns
principledIntercalateNixString sep nss = NixString contents ctx
where
contents = Text.intercalate (nsContents sep) (map nsContents nss)
ctx = S.unions (nsContext sep : map nsContext nss)
-- | Combine NixStrings using mconcat
hackyStringMConcat :: [NixString] -> NixString
hackyStringMConcat = foldr hackyStringMappend (NixString mempty mempty)
-- | Empty string with empty context.
principledStringMempty :: NixString
principledStringMempty = NixString mempty mempty
-- | Combine NixStrings using mconcat
principledStringMConcat :: [NixString] -> NixString
principledStringMConcat =
foldr principledStringMappend (NixString mempty mempty)
--instance Semigroup NixString where
--NixString s1 t1 <> NixString s2 t2 = NixString (s1 <> s2) (t1 <> t2)
--instance Monoid NixString where
-- mempty = NixString mempty mempty
-- mappend = (<>)
-- | Extract the string contents from a NixString that has no context
hackyGetStringNoContext :: NixString -> Maybe Text
hackyGetStringNoContext (NixString s c) | null c = Just s
| otherwise = Nothing
-- | Extract the string contents from a NixString that has no context
principledGetStringNoContext :: NixString -> Maybe Text
principledGetStringNoContext (NixString s c) | null c = Just s
| otherwise = Nothing
-- | Extract the string contents from a NixString even if the NixString has an associated context
principledStringIgnoreContext :: NixString -> Text
principledStringIgnoreContext (NixString s _) = s
-- | Extract the string contents from a NixString even if the NixString has an associated context
hackyStringIgnoreContext :: NixString -> Text
hackyStringIgnoreContext (NixString s _) = s
-- | Returns True if the NixString has an associated context
stringHasContext :: NixString -> Bool
stringHasContext (NixString _ c) = not (null c)
-- | Constructs a NixString without a context
hackyMakeNixStringWithoutContext :: Text -> NixString
hackyMakeNixStringWithoutContext = flip NixString mempty
-- | Constructs a NixString without a context
principledMakeNixStringWithoutContext :: Text -> NixString
principledMakeNixStringWithoutContext = flip NixString mempty
-- | Modify the string part of the NixString, leaving the context unchanged
principledModifyNixContents :: (Text -> Text) -> NixString -> NixString
principledModifyNixContents f (NixString s c) = NixString (f s) c
-- | Create a NixString using a singleton context
principledMakeNixStringWithSingletonContext
:: Text -> StringContext -> NixString
principledMakeNixStringWithSingletonContext s c = NixString s (S.singleton c)
-- | Create a NixString from a Text and context
principledMakeNixString :: Text -> S.HashSet StringContext -> NixString
principledMakeNixString s c = NixString s c
-- | A monad for accumulating string context while producing a result string.
newtype WithStringContextT m a = WithStringContextT (WriterT (S.HashSet StringContext) m a)
deriving (Functor, Applicative, Monad, MonadTrans, MonadWriter (S.HashSet StringContext))
type WithStringContext = WithStringContextT Identity
-- | Add 'StringContext's into the resulting set.
addStringContext
:: Monad m => S.HashSet StringContext -> WithStringContextT m ()
addStringContext = WithStringContextT . tell
-- | Add a 'StringContext' into the resulting set.
addSingletonStringContext :: Monad m => StringContext -> WithStringContextT m ()
addSingletonStringContext = WithStringContextT . tell . S.singleton
-- | Get the contents of a 'NixString' and write its context into the resulting set.
extractNixString :: Monad m => NixString -> WithStringContextT m Text
extractNixString (NixString s c) = WithStringContextT $ tell c >> return s
-- | Run an action producing a string with a context and put those into a 'NixString'.
runWithStringContextT :: Monad m => WithStringContextT m Text -> m NixString
runWithStringContextT (WithStringContextT m) =
uncurry NixString <$> runWriterT m
-- | Run an action producing a string with a context and put those into a 'NixString'.
runWithStringContext :: WithStringContextT Identity Text -> NixString
runWithStringContext = runIdentity . runWithStringContextT
| jwiegley/hnix | src/Nix/String.hs | bsd-3-clause | 8,970 | 0 | 13 | 1,597 | 1,862 | 992 | 870 | 173 | 4 |
import Control.Concurrent
import GHCJS.Marshal.Pure
import GHCJS.Require
helloWorld = putStrLn "[haskell] Hello World"
launchTheMissiles = do
threadDelay (1000 * 1000 * 5)
putStrLn "[haskell] OMG what did I do?!"
return $ Just $ pToJSVal (10 :: Double)
main = do
export0 "helloWorld" helloWorld
export "launchTheMissiles" launchTheMissiles
defaultMain
| beijaflor-io/ghcjs-commonjs | old-examples/ghcjs-loader-test/Main.hs | mit | 409 | 0 | 10 | 103 | 97 | 47 | 50 | 12 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Pos.DB.Epoch.Index
( writeEpochIndex
, getEpochBlundOffset
, SlotIndexOffset (..)
) where
import Universum
import Data.Binary (Binary, decode, encode)
import qualified Data.ByteString.Builder as Builder
import qualified Data.ByteString.Lazy as LBS
import Formatting (build, sformat, (%))
import System.IO (IOMode (..), SeekMode (..), hSeek, withBinaryFile)
import Pos.Core (LocalSlotIndex (..), SlotCount, localSlotIndices)
-- When we store all blocks for an epoch in a "epoch" file we need a fast and
-- simple way of extracting any single block from the epoch file without decoding
-- the whole file.
--
-- We do this by keeping a separate index file that for each slot, can give the
-- offset in the file where that block occurs. There are 21600 slots/blocks per
-- epoch (10 * blkSecurityParam) and in the first 62 epochs, the smallest number
-- of blocks in an epoch was 21562. The means the most disk storage efficient
-- and quickest to access way to store the slot index to file offset mapping in
-- file is as a dense vector of 64 bit file offsets indexed by the slot index,
-- even if that means that the file has to have sentinel values inserted at empty
-- slot indices.
--
-- We use 'maxBound' as the sentinel value. On read, if we get a value of
-- 'maxBound' we return 'Nothing', otherwise the offset is returned wrapped
-- in a 'Just'.
header :: LBS.ByteString
header = "Epoch Index v1\n\n"
headerLength :: Num a => a
headerLength = fromIntegral $ LBS.length header
hCheckHeader :: FilePath -> Handle -> IO ()
hCheckHeader fpath h = do
hSeek h AbsoluteSeek 0
headerBytes <- LBS.hGet h headerLength
when (headerBytes /= header) $ error $ sformat
("Invalid header in epoch index file " % build)
fpath
data SlotIndexOffset = SlotIndexOffset
{ sioSlotIndex :: !Word16
, sioOffset :: !Word64
} deriving (Eq, Generic, Show)
instance Binary SlotIndexOffset
-- | Write a list of @SlotIndexOffset@s to a dense @Binary@ representation
--
-- To make it dense we pad the list with @maxBound :: Word64@ whenever we see
-- a missing @LocalSlotIndex@
writeEpochIndex :: SlotCount -> FilePath -> [SlotIndexOffset] -> IO ()
writeEpochIndex epochSlots path =
withBinaryFile path WriteMode
. flip Builder.hPutBuilder
. (Builder.lazyByteString header <>)
. foldMap (Builder.lazyByteString . encode . sioOffset)
. padIndex epochSlots
-- | Pad a list of @SlotIndexOffset@s ordered by @LocalSlotIndex@
padIndex :: SlotCount -> [SlotIndexOffset] -> [SlotIndexOffset]
padIndex epochSlots = go
( flip SlotIndexOffset maxBound
. getSlotIndex
<$> localSlotIndices epochSlots
)
where
go [] _ = []
go xs [] = xs
go (x : xs) (y : ys) | sioSlotIndex x == sioSlotIndex y = y : go xs ys
| otherwise = x : go xs (y : ys)
getSlotIndexOffsetN :: FilePath -> LocalSlotIndex -> IO Word64
getSlotIndexOffsetN fpath (UnsafeLocalSlotIndex i) =
withBinaryFile fpath ReadMode $ \h -> do
hCheckHeader fpath h
hSeek h AbsoluteSeek (headerLength + fromIntegral i * 8)
decode <$> LBS.hGet h 8
getEpochBlundOffset :: FilePath -> LocalSlotIndex -> IO (Maybe Word64)
getEpochBlundOffset fpath lsi = do
off <- getSlotIndexOffsetN fpath lsi
-- 'maxBound' is the sentinel value which means there is no block
-- in the epoch file for the specified 'LocalSlotIndex'.
pure $ if off == maxBound then Nothing else Just off
| input-output-hk/pos-haskell-prototype | db/src/Pos/DB/Epoch/Index.hs | mit | 3,622 | 0 | 13 | 842 | 713 | 385 | 328 | 58 | 3 |
-- Copyright (c) 2010, Diego Souza
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
-- * Neither the name of the <ORGANIZATION> nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Yql.UI.CLI.Commands.Logout
( logout
) where
import Yql.Core.Session
import Yql.UI.CLI.Command
-- | Removes any saved oauth_token.
logout :: SessionMgr s => s -> Command ()
logout session = Command (const doc,const (const exe))
where doc = "Purges any saved oauth token previously"
exe = unlink session
| dgvncsz0f/iyql | src/main/haskell/Yql/UI/CLI/Commands/Logout.hs | gpl-3.0 | 1,909 | 0 | 9 | 347 | 118 | 77 | 41 | 8 | 1 |
-- Copyright (c) 2010, Diego Souza
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
-- * Neither the name of the <ORGANIZATION> nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Main where
import System.FilePath
import Yql.Core.Backend
import Yql.Core.Session
import Yql.UI.Cli
import Yql.Data.Cfg
import Network.OAuth.Consumer
main :: IO ()
main = do session <- fmap mkSession basedir
config <- usrCfg
iyql session (backend session config)
where backend session config = YqlBackend (Application cfgCKey cfgCSec OOB) session (tryCfgs config "env" []) yqlEndpoint
where cfgCKey = tryCfg config "oauth_consumer_key" "iyql"
cfgCSec = tryCfg config "oauth_consumer_sec" ""
yqlEndpoint = let (host,port) = break (==':') (tryCfg config "endpoint" "query.yahooapis.com:80")
in case port
of [] -> (host,80)
[':'] -> (host,80)
(_:p) -> (host,read p)
mkSession home = FileStorage (joinPath [home,"oauth_token"])
| dgvncsz0f/iyql | src/main/haskell/iyql.hs | gpl-3.0 | 2,535 | 0 | 15 | 600 | 303 | 173 | 130 | 20 | 3 |
{-# LANGUAGE QuasiQuotes #-}
module CUDA (cudaTests) where
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit (Assertion, (@?=))
import Language.C.Quote.CUDA
import Language.C.Syntax
import Data.Loc (noLoc)
mkDeclarator params mutability = LambdaDeclarator (Params params False noLoc) mutability Nothing noLoc
mkIntroducer :: [CaptureListEntry] -> LambdaIntroducer
mkIntroducer mode = (LambdaIntroducer mode noLoc)
emptyLambda = lambdaByCapture []
lambdaByCapture captureMode = Lambda (mkIntroducer captureMode) Nothing [] noLoc
lambdaByCaptureBody captureMode statements = Lambda (mkIntroducer captureMode) Nothing statements noLoc
lambdaByCaptureParams captureMode params = Lambda (mkIntroducer captureMode) (Just $ mkDeclarator params False) [] noLoc
lambdaByParams params = Lambda (mkIntroducer []) (Just $ mkDeclarator params False) [] noLoc
mutableLambdaByParams params = Lambda (mkIntroducer []) (Just $ mkDeclarator params True) [] noLoc
cudaTests :: Test
cudaTests = testGroup "CUDA"
$ map (testCase "lambda-expressions parsing") lambdas
where
lambdas :: [Assertion]
lambdas = [ [cexp|[=] {}|] @?= lambdaByCapture [DefaultByValue]
, [cexp|[&] {}|] @?= lambdaByCapture[DefaultByReference]
, [cexp|[] {}|] @?= lambdaByCapture []
, [cexp|[] {}|] @?= emptyLambda
, [cexp|[] () {}|] @?= lambdaByParams []
, [cexp|[] (int i) {}|] @?= lambdaByParams [param_int_i]
, [cexp|[] (int i, double j) {}|] @?= lambdaByParams [param_int_i, param_double_h]
, [cexp|[] ($param:param_int_i) {}|] @?= lambdaByParams [param_int_i]
, [cexp|[] (int i) mutable {}|] @?= mutableLambdaByParams [param_int_i]
, [cexp|[&] (int i) {}|] @?= lambdaByCaptureParams [DefaultByReference] [param_int_i]
, [cexp|[&] { $item:item_return_7 } |] @?= lambdaCapturingByRefAndReturning7
, [cexp|[&] { return $exp:exp_7; } |] @?= lambdaCapturingByRefAndReturning7
, [cexp|[]{}()|] @?= FnCall emptyLambda [] noLoc
, [cexp|[](){}()|] @?= FnCall (lambdaByParams []) [] noLoc
]
lambdaCapturingByRefAndReturning7 = lambdaByCaptureBody [DefaultByReference] [item_return_7]
exp_7 = [cexp|7|]
item_return_7 = [citem|return 7;|]
param_int_i = [cparam|int i|]
param_double_h = [cparam|double j|] | flowbox-public/language-c-quote | tests/unit/CUDA.hs | bsd-3-clause | 2,423 | 0 | 12 | 498 | 629 | 369 | 260 | 40 | 1 |
module T3132 where
import Data.Array.Unboxed
step :: UArray Int Double -> [Double]
step y = [y!1 + y!0]
| mpickering/ghc-exactprint | tests/examples/ghc710/T3132.hs | bsd-3-clause | 106 | 0 | 8 | 20 | 50 | 28 | 22 | 4 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Pattern-matching bindings (HsBinds and MonoBinds)
Handles @HsBinds@; those at the top level require different handling,
in that the @Rec@/@NonRec@/etc structure is thrown away (whereas at
lower levels it is preserved with @let@/@letrec@s).
-}
{-# LANGUAGE CPP #-}
module DsBinds ( dsTopLHsBinds, dsLHsBinds, decomposeRuleLhs, dsSpec,
dsHsWrapper, dsTcEvBinds, dsTcEvBinds_s, dsEvBinds
) where
#include "HsVersions.h"
import {-# SOURCE #-} DsExpr( dsLExpr )
import {-# SOURCE #-} Match( matchWrapper )
import DsMonad
import DsGRHSs
import DsUtils
import HsSyn -- lots of things
import CoreSyn -- lots of things
import Literal ( Literal(MachStr) )
import CoreSubst
import OccurAnal ( occurAnalyseExpr )
import MkCore
import CoreUtils
import CoreArity ( etaExpand )
import CoreUnfold
import CoreFVs
import UniqSupply
import Digraph
import PrelNames
import TysPrim ( mkProxyPrimTy )
import TyCon
import TcEvidence
import TcType
import Type
import Kind (returnsConstraintKind)
import Coercion hiding (substCo)
import TysWiredIn ( eqBoxDataCon, coercibleDataCon, mkListTy
, mkBoxedTupleTy, charTy, typeNatKind, typeSymbolKind )
import Id
import MkId(proxyHashId)
import Class
import DataCon ( dataConTyCon )
import Name
import IdInfo ( IdDetails(..) )
import Var
import VarSet
import Rules
import VarEnv
import Outputable
import Module
import SrcLoc
import Maybes
import OrdList
import Bag
import BasicTypes hiding ( TopLevel )
import DynFlags
import FastString
import Util
import MonadUtils
import Control.Monad(liftM)
import Fingerprint(Fingerprint(..), fingerprintString)
{-
************************************************************************
* *
\subsection[dsMonoBinds]{Desugaring a @MonoBinds@}
* *
************************************************************************
-}
dsTopLHsBinds :: LHsBinds Id -> DsM (OrdList (Id,CoreExpr))
dsTopLHsBinds binds = ds_lhs_binds binds
dsLHsBinds :: LHsBinds Id -> DsM [(Id,CoreExpr)]
dsLHsBinds binds = do { binds' <- ds_lhs_binds binds
; return (fromOL binds') }
------------------------
ds_lhs_binds :: LHsBinds Id -> DsM (OrdList (Id,CoreExpr))
ds_lhs_binds binds = do { ds_bs <- mapBagM dsLHsBind binds
; return (foldBag appOL id nilOL ds_bs) }
dsLHsBind :: LHsBind Id -> DsM (OrdList (Id,CoreExpr))
dsLHsBind (L loc bind) = putSrcSpanDs loc $ dsHsBind bind
dsHsBind :: HsBind Id -> DsM (OrdList (Id,CoreExpr))
dsHsBind (VarBind { var_id = var, var_rhs = expr, var_inline = inline_regardless })
= do { dflags <- getDynFlags
; core_expr <- dsLExpr expr
-- Dictionary bindings are always VarBinds,
-- so we only need do this here
; let var' | inline_regardless = var `setIdUnfolding` mkCompulsoryUnfolding core_expr
| otherwise = var
; return (unitOL (makeCorePair dflags var' False 0 core_expr)) }
dsHsBind (FunBind { fun_id = L _ fun, fun_matches = matches
, fun_co_fn = co_fn, fun_tick = tick
, fun_infix = inf })
= do { dflags <- getDynFlags
; (args, body) <- matchWrapper (FunRhs (idName fun) inf) matches
; let body' = mkOptTickBox tick body
; rhs <- dsHsWrapper co_fn (mkLams args body')
; {- pprTrace "dsHsBind" (ppr fun <+> ppr (idInlinePragma fun)) $ -}
return (unitOL (makeCorePair dflags fun False 0 rhs)) }
dsHsBind (PatBind { pat_lhs = pat, pat_rhs = grhss, pat_rhs_ty = ty
, pat_ticks = (rhs_tick, var_ticks) })
= do { body_expr <- dsGuarded grhss ty
; let body' = mkOptTickBox rhs_tick body_expr
; sel_binds <- mkSelectorBinds var_ticks pat body'
-- We silently ignore inline pragmas; no makeCorePair
-- Not so cool, but really doesn't matter
; return (toOL sel_binds) }
-- A common case: one exported variable
-- Non-recursive bindings come through this way
-- So do self-recursive bindings, and recursive bindings
-- that have been chopped up with type signatures
dsHsBind (AbsBinds { abs_tvs = tyvars, abs_ev_vars = dicts
, abs_exports = [export]
, abs_ev_binds = ev_binds, abs_binds = binds })
| ABE { abe_wrap = wrap, abe_poly = global
, abe_mono = local, abe_prags = prags } <- export
= do { dflags <- getDynFlags
; bind_prs <- ds_lhs_binds binds
; let core_bind = Rec (fromOL bind_prs)
; ds_binds <- dsTcEvBinds_s ev_binds
; rhs <- dsHsWrapper wrap $ -- Usually the identity
mkLams tyvars $ mkLams dicts $
mkCoreLets ds_binds $
Let core_bind $
Var local
; (spec_binds, rules) <- dsSpecs rhs prags
; let global' = addIdSpecialisations global rules
main_bind = makeCorePair dflags global' (isDefaultMethod prags)
(dictArity dicts) rhs
; return (main_bind `consOL` spec_binds) }
dsHsBind (AbsBinds { abs_tvs = tyvars, abs_ev_vars = dicts
, abs_exports = exports, abs_ev_binds = ev_binds
, abs_binds = binds })
-- See Note [Desugaring AbsBinds]
= do { dflags <- getDynFlags
; bind_prs <- ds_lhs_binds binds
; let core_bind = Rec [ makeCorePair dflags (add_inline lcl_id) False 0 rhs
| (lcl_id, rhs) <- fromOL bind_prs ]
-- Monomorphic recursion possible, hence Rec
locals = map abe_mono exports
tup_expr = mkBigCoreVarTup locals
tup_ty = exprType tup_expr
; ds_binds <- dsTcEvBinds_s ev_binds
; let poly_tup_rhs = mkLams tyvars $ mkLams dicts $
mkCoreLets ds_binds $
Let core_bind $
tup_expr
; poly_tup_id <- newSysLocalDs (exprType poly_tup_rhs)
; let mk_bind (ABE { abe_wrap = wrap, abe_poly = global
, abe_mono = local, abe_prags = spec_prags })
= do { tup_id <- newSysLocalDs tup_ty
; rhs <- dsHsWrapper wrap $
mkLams tyvars $ mkLams dicts $
mkTupleSelector locals local tup_id $
mkVarApps (Var poly_tup_id) (tyvars ++ dicts)
; let rhs_for_spec = Let (NonRec poly_tup_id poly_tup_rhs) rhs
; (spec_binds, rules) <- dsSpecs rhs_for_spec spec_prags
; let global' = (global `setInlinePragma` defaultInlinePragma)
`addIdSpecialisations` rules
-- Kill the INLINE pragma because it applies to
-- the user written (local) function. The global
-- Id is just the selector. Hmm.
; return ((global', rhs) `consOL` spec_binds) }
; export_binds_s <- mapM mk_bind exports
; return ((poly_tup_id, poly_tup_rhs) `consOL`
concatOL export_binds_s) }
where
inline_env :: IdEnv Id -- Maps a monomorphic local Id to one with
-- the inline pragma from the source
-- The type checker put the inline pragma
-- on the *global* Id, so we need to transfer it
inline_env = mkVarEnv [ (lcl_id, setInlinePragma lcl_id prag)
| ABE { abe_mono = lcl_id, abe_poly = gbl_id } <- exports
, let prag = idInlinePragma gbl_id ]
add_inline :: Id -> Id -- tran
add_inline lcl_id = lookupVarEnv inline_env lcl_id `orElse` lcl_id
dsHsBind (PatSynBind{}) = panic "dsHsBind: PatSynBind"
------------------------
makeCorePair :: DynFlags -> Id -> Bool -> Arity -> CoreExpr -> (Id, CoreExpr)
makeCorePair dflags gbl_id is_default_method dict_arity rhs
| is_default_method -- Default methods are *always* inlined
= (gbl_id `setIdUnfolding` mkCompulsoryUnfolding rhs, rhs)
| DFunId is_newtype <- idDetails gbl_id
= (mk_dfun_w_stuff is_newtype, rhs)
| otherwise
= case inlinePragmaSpec inline_prag of
EmptyInlineSpec -> (gbl_id, rhs)
NoInline -> (gbl_id, rhs)
Inlinable -> (gbl_id `setIdUnfolding` inlinable_unf, rhs)
Inline -> inline_pair
where
inline_prag = idInlinePragma gbl_id
inlinable_unf = mkInlinableUnfolding dflags rhs
inline_pair
| Just arity <- inlinePragmaSat inline_prag
-- Add an Unfolding for an INLINE (but not for NOINLINE)
-- And eta-expand the RHS; see Note [Eta-expanding INLINE things]
, let real_arity = dict_arity + arity
-- NB: The arity in the InlineRule takes account of the dictionaries
= ( gbl_id `setIdUnfolding` mkInlineUnfolding (Just real_arity) rhs
, etaExpand real_arity rhs)
| otherwise
= pprTrace "makeCorePair: arity missing" (ppr gbl_id) $
(gbl_id `setIdUnfolding` mkInlineUnfolding Nothing rhs, rhs)
-- See Note [ClassOp/DFun selection] in TcInstDcls
-- See Note [Single-method classes] in TcInstDcls
mk_dfun_w_stuff is_newtype
| is_newtype
= gbl_id `setIdUnfolding` mkInlineUnfolding (Just 0) rhs
`setInlinePragma` alwaysInlinePragma { inl_sat = Just 0 }
| otherwise
= gbl_id `setIdUnfolding` mkDFunUnfolding dfun_bndrs dfun_constr dfun_args
`setInlinePragma` dfunInlinePragma
(dfun_bndrs, dfun_body) = collectBinders (simpleOptExpr rhs)
(dfun_con, dfun_args) = collectArgs dfun_body
dfun_constr | Var id <- dfun_con
, DataConWorkId con <- idDetails id
= con
| otherwise = pprPanic "makeCorePair: dfun" (ppr rhs)
dictArity :: [Var] -> Arity
-- Don't count coercion variables in arity
dictArity dicts = count isId dicts
{-
[Desugaring AbsBinds]
~~~~~~~~~~~~~~~~~~~~~
In the general AbsBinds case we desugar the binding to this:
tup a (d:Num a) = let fm = ...gm...
gm = ...fm...
in (fm,gm)
f a d = case tup a d of { (fm,gm) -> fm }
g a d = case tup a d of { (fm,gm) -> fm }
Note [Rules and inlining]
~~~~~~~~~~~~~~~~~~~~~~~~~
Common special case: no type or dictionary abstraction
This is a bit less trivial than you might suppose
The naive way woudl be to desguar to something like
f_lcl = ...f_lcl... -- The "binds" from AbsBinds
M.f = f_lcl -- Generated from "exports"
But we don't want that, because if M.f isn't exported,
it'll be inlined unconditionally at every call site (its rhs is
trivial). That would be ok unless it has RULES, which would
thereby be completely lost. Bad, bad, bad.
Instead we want to generate
M.f = ...f_lcl...
f_lcl = M.f
Now all is cool. The RULES are attached to M.f (by SimplCore),
and f_lcl is rapidly inlined away.
This does not happen in the same way to polymorphic binds,
because they desugar to
M.f = /\a. let f_lcl = ...f_lcl... in f_lcl
Although I'm a bit worried about whether full laziness might
float the f_lcl binding out and then inline M.f at its call site
Note [Specialising in no-dict case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Even if there are no tyvars or dicts, we may have specialisation pragmas.
Class methods can generate
AbsBinds [] [] [( ... spec-prag]
{ AbsBinds [tvs] [dicts] ...blah }
So the overloading is in the nested AbsBinds. A good example is in GHC.Float:
class (Real a, Fractional a) => RealFrac a where
round :: (Integral b) => a -> b
instance RealFrac Float where
{-# SPECIALIZE round :: Float -> Int #-}
The top-level AbsBinds for $cround has no tyvars or dicts (because the
instance does not). But the method is locally overloaded!
Note [Abstracting over tyvars only]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When abstracting over type variable only (not dictionaries), we don't really need to
built a tuple and select from it, as we do in the general case. Instead we can take
AbsBinds [a,b] [ ([a,b], fg, fl, _),
([b], gg, gl, _) ]
{ fl = e1
gl = e2
h = e3 }
and desugar it to
fg = /\ab. let B in e1
gg = /\b. let a = () in let B in S(e2)
h = /\ab. let B in e3
where B is the *non-recursive* binding
fl = fg a b
gl = gg b
h = h a b -- See (b); note shadowing!
Notice (a) g has a different number of type variables to f, so we must
use the mkArbitraryType thing to fill in the gaps.
We use a type-let to do that.
(b) The local variable h isn't in the exports, and rather than
clone a fresh copy we simply replace h by (h a b), where
the two h's have different types! Shadowing happens here,
which looks confusing but works fine.
(c) The result is *still* quadratic-sized if there are a lot of
small bindings. So if there are more than some small
number (10), we filter the binding set B by the free
variables of the particular RHS. Tiresome.
Why got to this trouble? It's a common case, and it removes the
quadratic-sized tuple desugaring. Less clutter, hopefully faster
compilation, especially in a case where there are a *lot* of
bindings.
Note [Eta-expanding INLINE things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
foo :: Eq a => a -> a
{-# INLINE foo #-}
foo x = ...
If (foo d) ever gets floated out as a common sub-expression (which can
happen as a result of method sharing), there's a danger that we never
get to do the inlining, which is a Terribly Bad thing given that the
user said "inline"!
To avoid this we pre-emptively eta-expand the definition, so that foo
has the arity with which it is declared in the source code. In this
example it has arity 2 (one for the Eq and one for x). Doing this
should mean that (foo d) is a PAP and we don't share it.
Note [Nested arities]
~~~~~~~~~~~~~~~~~~~~~
For reasons that are not entirely clear, method bindings come out looking like
this:
AbsBinds [] [] [$cfromT <= [] fromT]
$cfromT [InlPrag=INLINE] :: T Bool -> Bool
{ AbsBinds [] [] [fromT <= [] fromT_1]
fromT :: T Bool -> Bool
{ fromT_1 ((TBool b)) = not b } } }
Note the nested AbsBind. The arity for the InlineRule on $cfromT should be
gotten from the binding for fromT_1.
It might be better to have just one level of AbsBinds, but that requires more
thought!
-}
------------------------
dsSpecs :: CoreExpr -- Its rhs
-> TcSpecPrags
-> DsM ( OrdList (Id,CoreExpr) -- Binding for specialised Ids
, [CoreRule] ) -- Rules for the Global Ids
-- See Note [Handling SPECIALISE pragmas] in TcBinds
dsSpecs _ IsDefaultMethod = return (nilOL, [])
dsSpecs poly_rhs (SpecPrags sps)
= do { pairs <- mapMaybeM (dsSpec (Just poly_rhs)) sps
; let (spec_binds_s, rules) = unzip pairs
; return (concatOL spec_binds_s, rules) }
dsSpec :: Maybe CoreExpr -- Just rhs => RULE is for a local binding
-- Nothing => RULE is for an imported Id
-- rhs is in the Id's unfolding
-> Located TcSpecPrag
-> DsM (Maybe (OrdList (Id,CoreExpr), CoreRule))
dsSpec mb_poly_rhs (L loc (SpecPrag poly_id spec_co spec_inl))
| isJust (isClassOpId_maybe poly_id)
= putSrcSpanDs loc $
do { warnDs (ptext (sLit "Ignoring useless SPECIALISE pragma for class method selector")
<+> quotes (ppr poly_id))
; return Nothing } -- There is no point in trying to specialise a class op
-- Moreover, classops don't (currently) have an inl_sat arity set
-- (it would be Just 0) and that in turn makes makeCorePair bleat
| no_act_spec && isNeverActive rule_act
= putSrcSpanDs loc $
do { warnDs (ptext (sLit "Ignoring useless SPECIALISE pragma for NOINLINE function:")
<+> quotes (ppr poly_id))
; return Nothing } -- Function is NOINLINE, and the specialiation inherits that
-- See Note [Activation pragmas for SPECIALISE]
| otherwise
= putSrcSpanDs loc $
do { uniq <- newUnique
; let poly_name = idName poly_id
spec_occ = mkSpecOcc (getOccName poly_name)
spec_name = mkInternalName uniq spec_occ (getSrcSpan poly_name)
; (bndrs, ds_lhs) <- liftM collectBinders
(dsHsWrapper spec_co (Var poly_id))
; let spec_ty = mkPiTypes bndrs (exprType ds_lhs)
; -- pprTrace "dsRule" (vcat [ ptext (sLit "Id:") <+> ppr poly_id
-- , ptext (sLit "spec_co:") <+> ppr spec_co
-- , ptext (sLit "ds_rhs:") <+> ppr ds_lhs ]) $
case decomposeRuleLhs bndrs ds_lhs of {
Left msg -> do { warnDs msg; return Nothing } ;
Right (rule_bndrs, _fn, args) -> do
{ dflags <- getDynFlags
; this_mod <- getModule
; let fn_unf = realIdUnfolding poly_id
unf_fvs = stableUnfoldingVars fn_unf `orElse` emptyVarSet
in_scope = mkInScopeSet (unf_fvs `unionVarSet` exprsFreeVars args)
spec_unf = specUnfolding dflags (mkEmptySubst in_scope) bndrs args fn_unf
spec_id = mkLocalId spec_name spec_ty
`setInlinePragma` inl_prag
`setIdUnfolding` spec_unf
rule = mkRule this_mod False {- Not auto -} is_local_id
(mkFastString ("SPEC " ++ showPpr dflags poly_name))
rule_act poly_name
rule_bndrs args
(mkVarApps (Var spec_id) bndrs)
; spec_rhs <- dsHsWrapper spec_co poly_rhs
-- Commented out: see Note [SPECIALISE on INLINE functions]
-- ; when (isInlinePragma id_inl)
-- (warnDs $ ptext (sLit "SPECIALISE pragma on INLINE function probably won't fire:")
-- <+> quotes (ppr poly_name))
; return (Just (unitOL (spec_id, spec_rhs), rule))
-- NB: do *not* use makeCorePair on (spec_id,spec_rhs), because
-- makeCorePair overwrites the unfolding, which we have
-- just created using specUnfolding
} } }
where
is_local_id = isJust mb_poly_rhs
poly_rhs | Just rhs <- mb_poly_rhs
= rhs -- Local Id; this is its rhs
| Just unfolding <- maybeUnfoldingTemplate (realIdUnfolding poly_id)
= unfolding -- Imported Id; this is its unfolding
-- Use realIdUnfolding so we get the unfolding
-- even when it is a loop breaker.
-- We want to specialise recursive functions!
| otherwise = pprPanic "dsImpSpecs" (ppr poly_id)
-- The type checker has checked that it *has* an unfolding
id_inl = idInlinePragma poly_id
-- See Note [Activation pragmas for SPECIALISE]
inl_prag | not (isDefaultInlinePragma spec_inl) = spec_inl
| not is_local_id -- See Note [Specialising imported functions]
-- in OccurAnal
, isStrongLoopBreaker (idOccInfo poly_id) = neverInlinePragma
| otherwise = id_inl
-- Get the INLINE pragma from SPECIALISE declaration, or,
-- failing that, from the original Id
spec_prag_act = inlinePragmaActivation spec_inl
-- See Note [Activation pragmas for SPECIALISE]
-- no_act_spec is True if the user didn't write an explicit
-- phase specification in the SPECIALISE pragma
no_act_spec = case inlinePragmaSpec spec_inl of
NoInline -> isNeverActive spec_prag_act
_ -> isAlwaysActive spec_prag_act
rule_act | no_act_spec = inlinePragmaActivation id_inl -- Inherit
| otherwise = spec_prag_act -- Specified by user
{- Note [SPECIALISE on INLINE functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to warn that using SPECIALISE for a function marked INLINE
would be a no-op; but it isn't! Especially with worker/wrapper split
we might have
{-# INLINE f #-}
f :: Ord a => Int -> a -> ...
f d x y = case x of I# x' -> $wf d x' y
We might want to specialise 'f' so that we in turn specialise '$wf'.
We can't even /name/ '$wf' in the source code, so we can't specialise
it even if we wanted to. Trac #10721 is a case in point.
Note [Activation pragmas for SPECIALISE]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From a user SPECIALISE pragma for f, we generate
a) A top-level binding spec_fn = rhs
b) A RULE f dOrd = spec_fn
We need two pragma-like things:
* spec_fn's inline pragma: inherited from f's inline pragma (ignoring
activation on SPEC), unless overriden by SPEC INLINE
* Activation of RULE: from SPECIALISE pragma (if activation given)
otherwise from f's inline pragma
This is not obvious (see Trac #5237)!
Examples Rule activation Inline prag on spec'd fn
---------------------------------------------------------------------
SPEC [n] f :: ty [n] Always, or NOINLINE [n]
copy f's prag
NOINLINE f
SPEC [n] f :: ty [n] NOINLINE
copy f's prag
NOINLINE [k] f
SPEC [n] f :: ty [n] NOINLINE [k]
copy f's prag
INLINE [k] f
SPEC [n] f :: ty [n] INLINE [k]
copy f's prag
SPEC INLINE [n] f :: ty [n] INLINE [n]
(ignore INLINE prag on f,
same activation for rule and spec'd fn)
NOINLINE [k] f
SPEC f :: ty [n] INLINE [k]
************************************************************************
* *
\subsection{Adding inline pragmas}
* *
************************************************************************
-}
decomposeRuleLhs :: [Var] -> CoreExpr -> Either SDoc ([Var], Id, [CoreExpr])
-- (decomposeRuleLhs bndrs lhs) takes apart the LHS of a RULE,
-- The 'bndrs' are the quantified binders of the rules, but decomposeRuleLhs
-- may add some extra dictionary binders (see Note [Free dictionaries])
--
-- Returns Nothing if the LHS isn't of the expected shape
-- Note [Decomposing the left-hand side of a RULE]
decomposeRuleLhs orig_bndrs orig_lhs
| not (null unbound) -- Check for things unbound on LHS
-- See Note [Unused spec binders]
= Left (vcat (map dead_msg unbound))
| Just (fn_id, args) <- decompose fun2 args2
, let extra_dict_bndrs = mk_extra_dict_bndrs fn_id args
= -- pprTrace "decmposeRuleLhs" (vcat [ ptext (sLit "orig_bndrs:") <+> ppr orig_bndrs
-- , ptext (sLit "orig_lhs:") <+> ppr orig_lhs
-- , ptext (sLit "lhs1:") <+> ppr lhs1
-- , ptext (sLit "extra_dict_bndrs:") <+> ppr extra_dict_bndrs
-- , ptext (sLit "fn_id:") <+> ppr fn_id
-- , ptext (sLit "args:") <+> ppr args]) $
Right (orig_bndrs ++ extra_dict_bndrs, fn_id, args)
| otherwise
= Left bad_shape_msg
where
lhs1 = drop_dicts orig_lhs
lhs2 = simpleOptExpr lhs1 -- See Note [Simplify rule LHS]
(fun2,args2) = collectArgs lhs2
lhs_fvs = exprFreeVars lhs2
unbound = filterOut (`elemVarSet` lhs_fvs) orig_bndrs
orig_bndr_set = mkVarSet orig_bndrs
-- Add extra dict binders: Note [Free dictionaries]
mk_extra_dict_bndrs fn_id args
= [ mkLocalId (localiseName (idName d)) (idType d)
| d <- varSetElems (exprsFreeVars args `delVarSetList` (fn_id : orig_bndrs))
-- fn_id: do not quantify over the function itself, which may
-- itself be a dictionary (in pathological cases, Trac #10251)
, isDictId d ]
decompose (Var fn_id) args
| not (fn_id `elemVarSet` orig_bndr_set)
= Just (fn_id, args)
decompose _ _ = Nothing
bad_shape_msg = hang (ptext (sLit "RULE left-hand side too complicated to desugar"))
2 (vcat [ text "Optimised lhs:" <+> ppr lhs2
, text "Orig lhs:" <+> ppr orig_lhs])
dead_msg bndr = hang (sep [ ptext (sLit "Forall'd") <+> pp_bndr bndr
, ptext (sLit "is not bound in RULE lhs")])
2 (vcat [ text "Orig bndrs:" <+> ppr orig_bndrs
, text "Orig lhs:" <+> ppr orig_lhs
, text "optimised lhs:" <+> ppr lhs2 ])
pp_bndr bndr
| isTyVar bndr = ptext (sLit "type variable") <+> quotes (ppr bndr)
| Just pred <- evVarPred_maybe bndr = ptext (sLit "constraint") <+> quotes (ppr pred)
| otherwise = ptext (sLit "variable") <+> quotes (ppr bndr)
drop_dicts :: CoreExpr -> CoreExpr
drop_dicts e
= wrap_lets needed bnds body
where
needed = orig_bndr_set `minusVarSet` exprFreeVars body
(bnds, body) = split_lets (occurAnalyseExpr e)
-- The occurAnalyseExpr drops dead bindings which is
-- crucial to ensure that every binding is used later;
-- which in turn makes wrap_lets work right
split_lets :: CoreExpr -> ([(DictId,CoreExpr)], CoreExpr)
split_lets e
| Let (NonRec d r) body <- e
, isDictId d
, (bs, body') <- split_lets body
= ((d,r):bs, body')
| otherwise
= ([], e)
wrap_lets :: VarSet -> [(DictId,CoreExpr)] -> CoreExpr -> CoreExpr
wrap_lets _ [] body = body
wrap_lets needed ((d, r) : bs) body
| rhs_fvs `intersectsVarSet` needed = Let (NonRec d r) (wrap_lets needed' bs body)
| otherwise = wrap_lets needed bs body
where
rhs_fvs = exprFreeVars r
needed' = (needed `minusVarSet` rhs_fvs) `extendVarSet` d
{-
Note [Decomposing the left-hand side of a RULE]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are several things going on here.
* drop_dicts: see Note [Drop dictionary bindings on rule LHS]
* simpleOptExpr: see Note [Simplify rule LHS]
* extra_dict_bndrs: see Note [Free dictionaries]
Note [Drop dictionary bindings on rule LHS]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
drop_dicts drops dictionary bindings on the LHS where possible.
E.g. let d:Eq [Int] = $fEqList $fEqInt in f d
--> f d
Reasoning here is that there is only one d:Eq [Int], and so we can
quantify over it. That makes 'd' free in the LHS, but that is later
picked up by extra_dict_bndrs (Note [Dead spec binders]).
NB 1: We can only drop the binding if the RHS doesn't bind
one of the orig_bndrs, which we assume occur on RHS.
Example
f :: (Eq a) => b -> a -> a
{-# SPECIALISE f :: Eq a => b -> [a] -> [a] #-}
Here we want to end up with
RULE forall d:Eq a. f ($dfEqList d) = f_spec d
Of course, the ($dfEqlist d) in the pattern makes it less likely
to match, but there is no other way to get d:Eq a
NB 2: We do drop_dicts *before* simplOptEpxr, so that we expect all
the evidence bindings to be wrapped around the outside of the
LHS. (After simplOptExpr they'll usually have been inlined.)
dsHsWrapper does dependency analysis, so that civilised ones
will be simple NonRec bindings. We don't handle recursive
dictionaries!
NB3: In the common case of a non-overloaded, but perhaps-polymorphic
specialisation, we don't need to bind *any* dictionaries for use
in the RHS. For example (Trac #8331)
{-# SPECIALIZE INLINE useAbstractMonad :: ReaderST s Int #-}
useAbstractMonad :: MonadAbstractIOST m => m Int
Here, deriving (MonadAbstractIOST (ReaderST s)) is a lot of code
but the RHS uses no dictionaries, so we want to end up with
RULE forall s (d :: MonadAbstractIOST (ReaderT s)).
useAbstractMonad (ReaderT s) d = $suseAbstractMonad s
Trac #8848 is a good example of where there are some intersting
dictionary bindings to discard.
The drop_dicts algorithm is based on these observations:
* Given (let d = rhs in e) where d is a DictId,
matching 'e' will bind e's free variables.
* So we want to keep the binding if one of the needed variables (for
which we need a binding) is in fv(rhs) but not already in fv(e).
* The "needed variables" are simply the orig_bndrs. Consider
f :: (Eq a, Show b) => a -> b -> String
... SPECIALISE f :: (Show b) => Int -> b -> String ...
Then orig_bndrs includes the *quantified* dictionaries of the type
namely (dsb::Show b), but not the one for Eq Int
So we work inside out, applying the above criterion at each step.
Note [Simplify rule LHS]
~~~~~~~~~~~~~~~~~~~~~~~~
simplOptExpr occurrence-analyses and simplifies the LHS:
(a) Inline any remaining dictionary bindings (which hopefully
occur just once)
(b) Substitute trivial lets so that they don't get in the way
Note that we substitute the function too; we might
have this as a LHS: let f71 = M.f Int in f71
(c) Do eta reduction. To see why, consider the fold/build rule,
which without simplification looked like:
fold k z (build (/\a. g a)) ==> ...
This doesn't match unless you do eta reduction on the build argument.
Similarly for a LHS like
augment g (build h)
we do not want to get
augment (\a. g a) (build h)
otherwise we don't match when given an argument like
augment (\a. h a a) (build h)
Note [Matching seqId]
~~~~~~~~~~~~~~~~~~~
The desugarer turns (seq e r) into (case e of _ -> r), via a special-case hack
and this code turns it back into an application of seq!
See Note [Rules for seq] in MkId for the details.
Note [Unused spec binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f :: a -> a
... SPECIALISE f :: Eq a => a -> a ...
It's true that this *is* a more specialised type, but the rule
we get is something like this:
f_spec d = f
RULE: f = f_spec d
Note that the rule is bogus, because it mentions a 'd' that is
not bound on the LHS! But it's a silly specialisation anyway, because
the constraint is unused. We could bind 'd' to (error "unused")
but it seems better to reject the program because it's almost certainly
a mistake. That's what the isDeadBinder call detects.
Note [Free dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~
When the LHS of a specialisation rule, (/\as\ds. f es) has a free dict,
which is presumably in scope at the function definition site, we can quantify
over it too. *Any* dict with that type will do.
So for example when you have
f :: Eq a => a -> a
f = <rhs>
... SPECIALISE f :: Int -> Int ...
Then we get the SpecPrag
SpecPrag (f Int dInt)
And from that we want the rule
RULE forall dInt. f Int dInt = f_spec
f_spec = let f = <rhs> in f Int dInt
But be careful! That dInt might be GHC.Base.$fOrdInt, which is an External
Name, and you can't bind them in a lambda or forall without getting things
confused. Likewise it might have an InlineRule or something, which would be
utterly bogus. So we really make a fresh Id, with the same unique and type
as the old one, but with an Internal name and no IdInfo.
************************************************************************
* *
Desugaring evidence
* *
************************************************************************
-}
dsHsWrapper :: HsWrapper -> CoreExpr -> DsM CoreExpr
dsHsWrapper WpHole e = return e
dsHsWrapper (WpTyApp ty) e = return $ App e (Type ty)
dsHsWrapper (WpLet ev_binds) e = do bs <- dsTcEvBinds ev_binds
return (mkCoreLets bs e)
dsHsWrapper (WpCompose c1 c2) e = do { e1 <- dsHsWrapper c2 e
; dsHsWrapper c1 e1 }
dsHsWrapper (WpFun c1 c2 t1 _) e = do { x <- newSysLocalDs t1
; e1 <- dsHsWrapper c1 (Var x)
; e2 <- dsHsWrapper c2 (e `mkCoreAppDs` e1)
; return (Lam x e2) }
dsHsWrapper (WpCast co) e = ASSERT(tcCoercionRole co == Representational)
dsTcCoercion co (mkCastDs e)
dsHsWrapper (WpEvLam ev) e = return $ Lam ev e
dsHsWrapper (WpTyLam tv) e = return $ Lam tv e
dsHsWrapper (WpEvApp tm) e = liftM (App e) (dsEvTerm tm)
--------------------------------------
dsTcEvBinds_s :: [TcEvBinds] -> DsM [CoreBind]
dsTcEvBinds_s [] = return []
dsTcEvBinds_s (b:rest) = ASSERT( null rest ) -- Zonker ensures null
dsTcEvBinds b
dsTcEvBinds :: TcEvBinds -> DsM [CoreBind]
dsTcEvBinds (TcEvBinds {}) = panic "dsEvBinds" -- Zonker has got rid of this
dsTcEvBinds (EvBinds bs) = dsEvBinds bs
dsEvBinds :: Bag EvBind -> DsM [CoreBind]
dsEvBinds bs = mapM ds_scc (sccEvBinds bs)
where
ds_scc (AcyclicSCC (EvBind { eb_lhs = v, eb_rhs = r }))
= liftM (NonRec v) (dsEvTerm r)
ds_scc (CyclicSCC bs) = liftM Rec (mapM ds_pair bs)
ds_pair (EvBind { eb_lhs = v, eb_rhs = r }) = liftM ((,) v) (dsEvTerm r)
sccEvBinds :: Bag EvBind -> [SCC EvBind]
sccEvBinds bs = stronglyConnCompFromEdgedVertices edges
where
edges :: [(EvBind, EvVar, [EvVar])]
edges = foldrBag ((:) . mk_node) [] bs
mk_node :: EvBind -> (EvBind, EvVar, [EvVar])
mk_node b@(EvBind { eb_lhs = var, eb_rhs = term })
= (b, var, varSetElems (evVarsOfTerm term))
---------------------------------------
dsEvTerm :: EvTerm -> DsM CoreExpr
dsEvTerm (EvId v) = return (Var v)
dsEvTerm (EvCast tm co)
= do { tm' <- dsEvTerm tm
; dsTcCoercion co $ mkCastDs tm' }
-- 'v' is always a lifted evidence variable so it is
-- unnecessary to call varToCoreExpr v here.
dsEvTerm (EvDFunApp df tys tms) = return (Var df `mkTyApps` tys `mkApps` (map Var tms))
dsEvTerm (EvCoercion (TcCoVarCo v)) = return (Var v) -- See Note [Simple coercions]
dsEvTerm (EvCoercion co) = dsTcCoercion co mkEqBox
dsEvTerm (EvSuperClass d n)
= do { d' <- dsEvTerm d
; let (cls, tys) = getClassPredTys (exprType d')
sc_sel_id = classSCSelId cls n -- Zero-indexed
; return $ Var sc_sel_id `mkTyApps` tys `App` d' }
dsEvTerm (EvDelayedError ty msg) = return $ Var errorId `mkTyApps` [ty] `mkApps` [litMsg]
where
errorId = tYPE_ERROR_ID
litMsg = Lit (MachStr (fastStringToByteString msg))
dsEvTerm (EvLit l) =
case l of
EvNum n -> mkIntegerExpr n
EvStr s -> mkStringExprFS s
dsEvTerm (EvCallStack cs) = dsEvCallStack cs
dsEvTerm (EvTypeable ev) = dsEvTypeable ev
dsEvTypeable :: EvTypeable -> DsM CoreExpr
dsEvTypeable ev =
do tyCl <- dsLookupTyCon typeableClassName
typeRepTc <- dsLookupTyCon typeRepTyConName
let tyRepType = mkTyConApp typeRepTc []
(ty, rep) <-
case ev of
EvTypeableTyCon tc ks ->
do ctr <- dsLookupGlobalId mkPolyTyConAppName
mkTyCon <- dsLookupGlobalId mkTyConName
dflags <- getDynFlags
let mkRep cRep kReps tReps =
mkApps (Var ctr) [ cRep, mkListExpr tyRepType kReps
, mkListExpr tyRepType tReps ]
let kindRep k =
case splitTyConApp_maybe k of
Nothing -> panic "dsEvTypeable: not a kind constructor"
Just (kc,ks) ->
do kcRep <- tyConRep dflags mkTyCon kc
reps <- mapM kindRep ks
return (mkRep kcRep [] reps)
tcRep <- tyConRep dflags mkTyCon tc
kReps <- mapM kindRep ks
return ( mkTyConApp tc ks
, mkRep tcRep kReps []
)
EvTypeableTyApp t1 t2 ->
do e1 <- getRep tyCl t1
e2 <- getRep tyCl t2
ctr <- dsLookupGlobalId mkAppTyName
return ( mkAppTy (snd t1) (snd t2)
, mkApps (Var ctr) [ e1, e2 ]
)
EvTypeableTyLit t ->
do e <- tyLitRep t
return (snd t, e)
-- TyRep -> Typeable t
-- see also: Note [Memoising typeOf]
repName <- newSysLocalDs tyRepType
let proxyT = mkProxyPrimTy (typeKind ty) ty
method = bindNonRec repName rep
$ mkLams [mkWildValBinder proxyT] (Var repName)
-- package up the method as `Typeable` dictionary
return $ mkCastDs method $ mkSymCo $ getTypeableCo tyCl ty
where
-- co: method -> Typeable k t
getTypeableCo tc t =
case instNewTyCon_maybe tc [typeKind t, t] of
Just (_,co) -> co
_ -> panic "Class `Typeable` is not a `newtype`."
-- Typeable t -> TyRep
getRep tc (ev,t) =
do typeableExpr <- dsEvTerm ev
let co = getTypeableCo tc t
method = mkCastDs typeableExpr co
proxy = mkTyApps (Var proxyHashId) [typeKind t, t]
return (mkApps method [proxy])
-- KnownNat t -> TyRep (also used for KnownSymbol)
tyLitRep (ev,t) =
do dict <- dsEvTerm ev
fun <- dsLookupGlobalId $
case typeKind t of
k | eqType k typeNatKind -> typeNatTypeRepName
| eqType k typeSymbolKind -> typeSymbolTypeRepName
| otherwise -> panic "dsEvTypeable: unknown type lit kind"
let finst = mkTyApps (Var fun) [t]
proxy = mkTyApps (Var proxyHashId) [typeKind t, t]
return (mkApps finst [ dict, proxy ])
-- This part could be cached
tyConRep dflags mkTyCon tc =
do pkgStr <- mkStringExprFS pkg_fs
modStr <- mkStringExprFS modl_fs
nameStr <- mkStringExprFS name_fs
return (mkApps (Var mkTyCon) [ int64 high, int64 low
, pkgStr, modStr, nameStr
])
where
tycon_name = tyConName tc
modl = nameModule tycon_name
pkg = modulePackageKey modl
modl_fs = moduleNameFS (moduleName modl)
pkg_fs = packageKeyFS pkg
name_fs = occNameFS (nameOccName tycon_name)
hash_name_fs
| isPromotedTyCon tc = appendFS (mkFastString "$k") name_fs
| isPromotedDataCon tc = appendFS (mkFastString "$c") name_fs
| isTupleTyCon tc &&
returnsConstraintKind (tyConKind tc)
= appendFS (mkFastString "$p") name_fs
| otherwise = name_fs
hashThis = unwords $ map unpackFS [pkg_fs, modl_fs, hash_name_fs]
Fingerprint high low = fingerprintString hashThis
int64
| wORD_SIZE dflags == 4 = mkWord64LitWord64
| otherwise = mkWordLit dflags . fromIntegral
{- Note [Memoising typeOf]
~~~~~~~~~~~~~~~~~~~~~~~~~~
See #3245, #9203
IMPORTANT: we don't want to recalculate the TypeRep once per call with
the proxy argument. This is what went wrong in #3245 and #9203. So we
help GHC by manually keeping the 'rep' *outside* the lambda.
-}
dsEvCallStack :: EvCallStack -> DsM CoreExpr
-- See Note [Overview of implicit CallStacks] in TcEvidence.hs
dsEvCallStack cs = do
df <- getDynFlags
m <- getModule
srcLocDataCon <- dsLookupDataCon srcLocDataConName
let srcLocTyCon = dataConTyCon srcLocDataCon
let srcLocTy = mkTyConTy srcLocTyCon
let mkSrcLoc l =
liftM (mkCoreConApps srcLocDataCon)
(sequence [ mkStringExpr (showPpr df $ modulePackageKey m)
, mkStringExprFS (moduleNameFS $ moduleName m)
, mkStringExprFS (srcSpanFile l)
, return $ mkIntExprInt df (srcSpanStartLine l)
, return $ mkIntExprInt df (srcSpanStartCol l)
, return $ mkIntExprInt df (srcSpanEndLine l)
, return $ mkIntExprInt df (srcSpanEndCol l)
])
-- Be careful to use [Char] instead of String here to avoid
-- unnecessary dependencies on GHC.Base, particularly when
-- building GHC.Err.absentError
let callSiteTy = mkBoxedTupleTy [mkListTy charTy, srcLocTy]
matchId <- newSysLocalDs $ mkListTy callSiteTy
callStackDataCon <- dsLookupDataCon callStackDataConName
let callStackTyCon = dataConTyCon callStackDataCon
let callStackTy = mkTyConTy callStackTyCon
let emptyCS = mkCoreConApps callStackDataCon [mkNilExpr callSiteTy]
let pushCS name loc rest =
mkWildCase rest callStackTy callStackTy
[( DataAlt callStackDataCon
, [matchId]
, mkCoreConApps callStackDataCon
[mkConsExpr callSiteTy
(mkCoreTup [name, loc])
(Var matchId)]
)]
let mkPush name loc tm = do
nameExpr <- mkStringExprFS name
locExpr <- mkSrcLoc loc
case tm of
EvCallStack EvCsEmpty -> return (pushCS nameExpr locExpr emptyCS)
_ -> do tmExpr <- dsEvTerm tm
-- at this point tmExpr :: IP sym CallStack
-- but we need the actual CallStack to pass to pushCS,
-- so we use unwrapIP to strip the dictionary wrapper
-- See Note [Overview of implicit CallStacks]
let ip_co = unwrapIP (exprType tmExpr)
return (pushCS nameExpr locExpr (mkCastDs tmExpr ip_co))
case cs of
EvCsTop name loc tm -> mkPush name loc tm
EvCsPushCall name loc tm -> mkPush (occNameFS $ getOccName name) loc tm
EvCsEmpty -> panic "Cannot have an empty CallStack"
---------------------------------------
dsTcCoercion :: TcCoercion -> (Coercion -> CoreExpr) -> DsM CoreExpr
-- This is the crucial function that moves
-- from TcCoercions to Coercions; see Note [TcCoercions] in Coercion
-- e.g. dsTcCoercion (trans g1 g2) k
-- = case g1 of EqBox g1# ->
-- case g2 of EqBox g2# ->
-- k (trans g1# g2#)
-- thing_inside will get a coercion at the role requested
dsTcCoercion co thing_inside
= do { us <- newUniqueSupply
; let eqvs_covs :: [(EqVar,CoVar)]
eqvs_covs = zipWith mk_co_var (varSetElems (coVarsOfTcCo co))
(uniqsFromSupply us)
subst = mkCvSubst emptyInScopeSet [(eqv, mkCoVarCo cov) | (eqv, cov) <- eqvs_covs]
result_expr = thing_inside (ds_tc_coercion subst co)
result_ty = exprType result_expr
; return (foldr (wrap_in_case result_ty) result_expr eqvs_covs) }
where
mk_co_var :: Id -> Unique -> (Id, Id)
mk_co_var eqv uniq = (eqv, mkUserLocal occ uniq ty loc)
where
eq_nm = idName eqv
occ = nameOccName eq_nm
loc = nameSrcSpan eq_nm
ty = mkCoercionType (getEqPredRole (evVarPred eqv)) ty1 ty2
(ty1, ty2) = getEqPredTys (evVarPred eqv)
wrap_in_case result_ty (eqv, cov) body
= case getEqPredRole (evVarPred eqv) of
Nominal -> Case (Var eqv) eqv result_ty [(DataAlt eqBoxDataCon, [cov], body)]
Representational -> Case (Var eqv) eqv result_ty [(DataAlt coercibleDataCon, [cov], body)]
Phantom -> panic "wrap_in_case/phantom"
ds_tc_coercion :: CvSubst -> TcCoercion -> Coercion
-- If the incoming TcCoercion if of type (a ~ b) (resp. Coercible a b)
-- the result is of type (a ~# b) (reps. a ~# b)
-- The VarEnv maps EqVars of type (a ~ b) to Coercions of type (a ~# b) (resp. and so on)
-- No need for InScope set etc because the
ds_tc_coercion subst tc_co
= go tc_co
where
go (TcRefl r ty) = Refl r (Coercion.substTy subst ty)
go (TcTyConAppCo r tc cos) = mkTyConAppCo r tc (map go cos)
go (TcAppCo co1 co2) = mkAppCo (go co1) (go co2)
go (TcForAllCo tv co) = mkForAllCo tv' (ds_tc_coercion subst' co)
where
(subst', tv') = Coercion.substTyVarBndr subst tv
go (TcAxiomInstCo ax ind cos)
= AxiomInstCo ax ind (map go cos)
go (TcPhantomCo ty1 ty2) = UnivCo (fsLit "ds_tc_coercion") Phantom ty1 ty2
go (TcSymCo co) = mkSymCo (go co)
go (TcTransCo co1 co2) = mkTransCo (go co1) (go co2)
go (TcNthCo n co) = mkNthCo n (go co)
go (TcLRCo lr co) = mkLRCo lr (go co)
go (TcSubCo co) = mkSubCo (go co)
go (TcLetCo bs co) = ds_tc_coercion (ds_co_binds bs) co
go (TcCastCo co1 co2) = mkCoCast (go co1) (go co2)
go (TcCoVarCo v) = ds_ev_id subst v
go (TcAxiomRuleCo co ts cs) = AxiomRuleCo co (map (Coercion.substTy subst) ts) (map go cs)
go (TcCoercion co) = co
ds_co_binds :: TcEvBinds -> CvSubst
ds_co_binds (EvBinds bs) = foldl ds_scc subst (sccEvBinds bs)
ds_co_binds eb@(TcEvBinds {}) = pprPanic "ds_co_binds" (ppr eb)
ds_scc :: CvSubst -> SCC EvBind -> CvSubst
ds_scc subst (AcyclicSCC (EvBind { eb_lhs = v, eb_rhs = ev_term }))
= extendCvSubstAndInScope subst v (ds_co_term subst ev_term)
ds_scc _ (CyclicSCC other) = pprPanic "ds_scc:cyclic" (ppr other $$ ppr tc_co)
ds_co_term :: CvSubst -> EvTerm -> Coercion
ds_co_term subst (EvCoercion tc_co) = ds_tc_coercion subst tc_co
ds_co_term subst (EvId v) = ds_ev_id subst v
ds_co_term subst (EvCast tm co) = mkCoCast (ds_co_term subst tm) (ds_tc_coercion subst co)
ds_co_term _ other = pprPanic "ds_co_term" (ppr other $$ ppr tc_co)
ds_ev_id :: CvSubst -> EqVar -> Coercion
ds_ev_id subst v
| Just co <- Coercion.lookupCoVar subst v = co
| otherwise = pprPanic "ds_tc_coercion" (ppr v $$ ppr tc_co)
{-
Note [Simple coercions]
~~~~~~~~~~~~~~~~~~~~~~~
We have a special case for coercions that are simple variables.
Suppose cv :: a ~ b is in scope
Lacking the special case, if we see
f a b cv
we'd desguar to
f a b (case cv of EqBox (cv# :: a ~# b) -> EqBox cv#)
which is a bit stupid. The special case does the obvious thing.
This turns out to be important when desugaring the LHS of a RULE
(see Trac #7837). Suppose we have
normalise :: (a ~ Scalar a) => a -> a
normalise_Double :: Double -> Double
{-# RULES "normalise" normalise = normalise_Double #-}
Then the RULE we want looks like
forall a, (cv:a~Scalar a).
normalise a cv = normalise_Double
But without the special case we generate the redundant box/unbox,
which simpleOpt (currently) doesn't remove. So the rule never matches.
Maybe simpleOpt should be smarter. But it seems like a good plan
to simply never generate the redundant box/unbox in the first place.
-}
| ml9951/ghc | compiler/deSugar/DsBinds.hs | bsd-3-clause | 48,481 | 0 | 24 | 15,268 | 8,297 | 4,237 | 4,060 | 543 | 21 |
-- | Devel web server.
module DevelMain where
import HL.Dispatch ()
import HL.Foundation
import Control.Concurrent
import Data.IORef
import Foreign.Store
import Network.Wai.Handler.Warp
import System.Environment (getEnvironment)
import Yesod
import Yesod.Static
-- | Start the web server.
main :: IO (Store (IORef Application))
main =
do s <- static "static"
c <- newChan
app <- toWaiApp (App s c)
ref <- newIORef app
env <- getEnvironment
let port = maybe 1990 read $ lookup "PORT" env
tid <- forkIO
(runSettings
(setPort port defaultSettings)
(\req ->
do handler <- readIORef ref
handler req))
_ <- newStore tid
ref' <- newStore ref
_ <- newStore c
return ref'
-- | Update the server, start it if not running.
update :: IO (Store (IORef Application))
update =
do m <- lookupStore 1
case m of
Nothing -> main
Just store ->
do ref <- readStore store
c <- readStore (Store 2)
writeChan c ()
s <- static "static"
app <- toWaiApp (App s c)
writeIORef ref app
return store
| mietek/hl | src/DevelMain.hs | bsd-3-clause | 1,219 | 0 | 16 | 413 | 381 | 180 | 201 | 41 | 2 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) - 2017 Róman Joost <roman@bromeco.de>
This file is part of gtfsschedule.
gtfsschedule is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
gtfsschedule is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with gtfsschedule. If not, see <http://www.gnu.org/licenses/>.
-}
module CSV.Import.Route where
import CSV.Import.Util (maybeToPersist)
import Data.Csv (DefaultOrdered, FromNamedRecord)
import GHC.Generics
import qualified Data.Text as T
import Database.Persist (PersistValue (..))
data Route = Route { route_id :: !T.Text
, route_short_name :: !T.Text
, route_long_name :: !T.Text
, route_desc :: Maybe T.Text
, route_type :: !T.Text
, route_url :: Maybe T.Text
, route_color :: Maybe T.Text
, route_text_color :: Maybe T.Text
}
deriving (Eq, Generic, Show)
instance FromNamedRecord Route
instance DefaultOrdered Route
prepareSQL ::
T.Text
prepareSQL = "insert into route (route_id, short_name, long_name, desc, type, url, color, text_color)\
\ values (?, ?, ?, ?, ?, ?, ?, ?);"
convertToValues ::
Route
-> [PersistValue]
convertToValues r = [ PersistText $ route_id r
, PersistText $ route_short_name r
, PersistText $ route_long_name r
, maybeToPersist PersistText $ route_desc r
, PersistText $ route_type r
, maybeToPersist PersistText $ route_url r
, maybeToPersist PersistText $ route_color r
, maybeToPersist PersistText $ route_text_color r
]
| romanofski/gtfsbrisbane | src/CSV/Import/Route.hs | bsd-3-clause | 2,302 | 0 | 10 | 718 | 303 | 166 | 137 | 41 | 1 |
-- |
-- Module : Basement.These
-- License : BSD-style
-- Maintainer : Nicolas Di Prima <nicolas@primetype.co.uk>
-- Stability : stable
-- Portability : portable
--
-- @These a b@, sum type to represent either @a@ or @b@ or both.
--
module Basement.These
( These(..)
) where
import Basement.Compat.Base
import Basement.NormalForm
import Basement.Compat.Bifunctor
-- | Either a or b or both.
data These a b
= This a
| That b
| These a b
deriving (Eq, Ord, Show, Typeable)
instance (NormalForm a, NormalForm b) => NormalForm (These a b) where
toNormalForm (This a) = toNormalForm a
toNormalForm (That b) = toNormalForm b
toNormalForm (These a b) = toNormalForm a `seq` toNormalForm b
instance Bifunctor These where
bimap fa _ (This a) = This (fa a)
bimap _ fb (That b) = That (fb b)
bimap fa fb (These a b) = These (fa a) (fb b)
instance Functor (These a) where
fmap = second
| vincenthz/hs-foundation | basement/Basement/These.hs | bsd-3-clause | 951 | 0 | 8 | 232 | 295 | 159 | 136 | 20 | 0 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module GHC.TypeLits (module M) where
import "base" GHC.TypeLits as M
| xwysp/codeworld | codeworld-base/src/GHC/TypeLits.hs | apache-2.0 | 739 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
{-# LANGUAGE CPP
, GADTs
, KindSignatures
, DataKinds
, Rank2Types
, ScopedTypeVariables
, MultiParamTypeClasses
, FlexibleContexts
, FlexibleInstances
#-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
----------------------------------------------------------------
-- 2016.05.24
-- |
-- Module : Language.Hakaru.Evaluation.EvalMonad
-- Copyright : Copyright (c) 2016 the Hakaru team
-- License : BSD3
-- Maintainer : wren@community.haskell.org
-- Stability : experimental
-- Portability : GHC-only
--
--
----------------------------------------------------------------
module Language.Hakaru.Evaluation.EvalMonad
( runPureEvaluate
, pureEvaluate
-- * The pure-evaluation monad
-- ** List-based version
, ListContext(..), PureAns, Eval(..), runEval
, residualizePureListContext
-- ** TODO: IntMap-based version
) where
import Prelude hiding (id, (.))
import Control.Category (Category(..))
#if __GLASGOW_HASKELL__ < 710
import Data.Functor ((<$>))
import Control.Applicative (Applicative(..))
#endif
import qualified Data.Foldable as F
import Language.Hakaru.Syntax.IClasses (Some2(..))
import Language.Hakaru.Syntax.Variable (memberVarSet)
import Language.Hakaru.Syntax.ABT (ABT(..), subst, maxNextFree)
import Language.Hakaru.Syntax.DatumABT
import Language.Hakaru.Syntax.AST
import Language.Hakaru.Evaluation.Types
import Language.Hakaru.Evaluation.Lazy (evaluate)
import Language.Hakaru.Evaluation.PEvalMonad (ListContext(..))
-- The rest of these are just for the emit code, which isn't currently exported.
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Traversable as T
import Language.Hakaru.Syntax.IClasses (Functor11(..))
import Language.Hakaru.Syntax.Variable (Variable(), toAssocs1)
import Language.Hakaru.Syntax.ABT (caseVarSyn, caseBinds, substs)
import Language.Hakaru.Types.DataKind
import Language.Hakaru.Types.Sing (Sing, sUnPair)
import Language.Hakaru.Syntax.TypeOf (typeOf)
import Language.Hakaru.Syntax.Datum
import Language.Hakaru.Evaluation.Lazy (reifyPair)
#ifdef __TRACE_DISINTEGRATE__
import Debug.Trace (trace)
#endif
----------------------------------------------------------------
----------------------------------------------------------------
-- | Call 'evaluate' on a term. This variant returns an @abt@ expression itself so you needn't worry about the 'Eval' monad. For the monadic-version, see 'pureEvaluate'.
--
-- BUG: now that we've indexed 'ListContext' by a 'Purity', does exposing the implementation details still enable clients to break our invariants?
runPureEvaluate :: (ABT Term abt) => abt '[] a -> abt '[] a
runPureEvaluate e = runEval (fromWhnf <$> pureEvaluate e) [Some2 e]
-- 'evaluate' itself can never @lub@ or @bot@, as captured by the
-- fact that it's type doesn't include 'Alternative' nor 'MonadPlus'
-- constraints. So non-singularity of results could only come from
-- calling @perform@. However, we will never call perform because: (a) the initial heap must be 'Pure' so we will never call @perform@ for a statement on the initial heap, and (b) 'evaluate' itself will never push impure statements so we will never call @perform@ for the statements we push either.
--
-- | Call 'evaluate' on a term. This variant returns something in the 'Eval' monad so you can string multiple evaluation calls together. For the non-monadic version, see 'runPureEvaluate'.
pureEvaluate :: (ABT Term abt) => TermEvaluator abt (Eval abt)
pureEvaluate = evaluate (brokenInvariant "perform")
----------------------------------------------------------------
type PureAns abt a = ListContext abt 'Pure -> abt '[] a
newtype Eval abt x =
Eval { unEval :: forall a. (x -> PureAns abt a) -> PureAns abt a }
brokenInvariant :: String -> a
brokenInvariant loc = error (loc ++ ": Eval's invariant broken")
-- | Run a computation in the 'Eval' monad, residualizing out all the
-- statements in the final evaluation context. The second argument
-- should include all the terms altered by the 'Eval' expression; this
-- is necessary to ensure proper hygiene; for example(s):
--
-- > runEval (pureEvaluate e) [Some2 e]
--
-- We use 'Some2' on the inputs because it doesn't matter what their
-- type or locally-bound variables are, so we want to allow @f@ to
-- contain terms with different indices.
runEval :: (ABT Term abt, F.Foldable f)
=> Eval abt (abt '[] a)
-> f (Some2 abt)
-> abt '[] a
runEval (Eval m) es =
m residualizePureListContext (ListContext (maxNextFree es) [])
residualizePureListContext
:: forall abt a
. (ABT Term abt)
=> abt '[] a
-> ListContext abt 'Pure
-> abt '[] a
residualizePureListContext e0 =
foldl step e0 . statements
where
-- TODO: make paremetric in the purity, so we can combine 'residualizeListContext' with this function.
step :: abt '[] a -> Statement abt Location 'Pure -> abt '[] a
step e s =
case s of
SLet (Location x) body _
| not (x `memberVarSet` freeVars e) -> e
-- TODO: if used exactly once in @e@, then inline.
| otherwise ->
case getLazyVariable body of
Just y -> subst x (var y) e
Nothing ->
case getLazyLiteral body of
Just v -> subst x (syn $ Literal_ v) e
Nothing ->
syn (Let_ :$ fromLazy body :* bind x e :* End)
----------------------------------------------------------------
instance Functor (Eval abt) where
fmap f (Eval m) = Eval $ \c -> m (c . f)
instance Applicative (Eval abt) where
pure x = Eval $ \c -> c x
Eval mf <*> Eval mx = Eval $ \c -> mf $ \f -> mx $ \x -> c (f x)
instance Monad (Eval abt) where
return = pure
Eval m >>= k = Eval $ \c -> m $ \x -> unEval (k x) c
instance (ABT Term abt) => EvaluationMonad abt (Eval abt) 'Pure where
freshNat =
Eval $ \c (ListContext i ss) ->
c i (ListContext (i+1) ss)
unsafePush s =
Eval $ \c (ListContext i ss) ->
c () (ListContext i (s:ss))
-- N.B., the use of 'reverse' is necessary so that the order
-- of pushing matches that of 'pushes'
unsafePushes ss =
Eval $ \c (ListContext i ss') ->
c () (ListContext i (reverse ss ++ ss'))
select x p = loop []
where
-- TODO: use a DList to avoid reversing inside 'unsafePushes'
loop ss = do
ms <- unsafePop
case ms of
Nothing -> do
unsafePushes ss
return Nothing
Just s ->
-- Alas, @p@ will have to recheck 'isBoundBy'
-- in order to grab the 'Refl' proof we erased;
-- but there's nothing to be done for it.
case x `isBoundBy` s >> p s of
Nothing -> loop (s:ss)
Just mr -> do
r <- mr
unsafePushes ss
return (Just r)
-- TODO: make parametric in the purity
-- | Not exported because we only need it for defining 'select' on 'Eval'.
unsafePop :: Eval abt (Maybe (Statement abt Location 'Pure))
unsafePop =
Eval $ \c h@(ListContext i ss) ->
case ss of
[] -> c Nothing h
s:ss' -> c (Just s) (ListContext i ss')
----------------------------------------------------------------
----------------------------------------------------------------
-- | Emit some code that binds a variable, and return the variable
-- thus bound. The function says what to wrap the result of the
-- continuation with; i.e., what we're actually emitting.
emit
:: (ABT Term abt)
=> Text
-> Sing a
-> (forall r. abt '[a] r -> abt '[] r)
-> Eval abt (Variable a)
emit hint typ f = do
x <- freshVar hint typ
Eval $ \c h -> (f . bind x) $ c x h
-- | A smart constructor for emitting let-bindings. If the input
-- is already a variable then we just return it; otherwise we emit
-- the let-binding. N.B., this function provides the invariant that
-- the result is in fact a variable; whereas 'emitLet'' does not.
emitLet :: (ABT Term abt) => abt '[] a -> Eval abt (Variable a)
emitLet e =
caseVarSyn e return $ \_ ->
emit Text.empty (typeOf e) $ \f ->
syn (Let_ :$ e :* f :* End)
-- | A smart constructor for emitting let-bindings. If the input
-- is already a variable or a literal constant, then we just return
-- it; otherwise we emit the let-binding. N.B., this function
-- provides weaker guarantees on the type of the result; if you
-- require the result to always be a variable, then see 'emitLet'
-- instead.
emitLet' :: (ABT Term abt) => abt '[] a -> Eval abt (abt '[] a)
emitLet' e =
caseVarSyn e (const $ return e) $ \t ->
case t of
Literal_ _ -> return e
_ -> do
x <- emit Text.empty (typeOf e) $ \f ->
syn (Let_ :$ e :* f :* End)
return (var x)
-- | A smart constructor for emitting \"unpair\". If the input
-- argument is actually a constructor then we project out the two
-- components; otherwise we emit the case-binding and return the
-- two variables.
emitUnpair
:: (ABT Term abt)
=> Whnf abt (HPair a b)
-> Eval abt (abt '[] a, abt '[] b)
emitUnpair (Head_ w) = return $ reifyPair w
emitUnpair (Neutral e) = do
let (a,b) = sUnPair (typeOf e)
x <- freshVar Text.empty a
y <- freshVar Text.empty b
emitUnpair_ x y e
emitUnpair_
:: forall abt a b
. (ABT Term abt)
=> Variable a
-> Variable b
-> abt '[] (HPair a b)
-> Eval abt (abt '[] a, abt '[] b)
emitUnpair_ x y = loop
where
done :: abt '[] (HPair a b) -> Eval abt (abt '[] a, abt '[] b)
done e =
#ifdef __TRACE_DISINTEGRATE__
trace "-- emitUnpair: done (term is not Datum_ nor Case_)" $
#endif
Eval $ \c h ->
( syn
. Case_ e
. (:[])
. Branch (pPair PVar PVar)
. bind x
. bind y
) $ c (var x, var y) h
loop :: abt '[] (HPair a b) -> Eval abt (abt '[] a, abt '[] b)
loop e0 =
caseVarSyn e0 (done . var) $ \t ->
case t of
Datum_ d -> do
#ifdef __TRACE_DISINTEGRATE__
trace "-- emitUnpair: found Datum_" $ return ()
#endif
return $ reifyPair (WDatum d)
Case_ e bs -> do
#ifdef __TRACE_DISINTEGRATE__
trace "-- emitUnpair: going under Case_" $ return ()
#endif
-- TODO: we want this to duplicate the current
-- continuation for (the evaluation of @loop@ in)
-- all branches. So far our traces all end up
-- returning @bot@ on the first branch, and hence
-- @bot@ for the whole case-expression, so we can't
-- quite tell whether it does what is intended.
--
-- N.B., the only 'Eval'-effects in 'applyBranch'
-- are to freshen variables; thus this use of
-- 'traverse' is perfectly sound.
emitCaseWith loop e bs
_ -> done e0
-- TODO: emitUneither
-- | Run each of the elements of the traversable using the same
-- heap and continuation for each one, then pass the results to a
-- function for emitting code.
emitFork_
:: (ABT Term abt, T.Traversable t)
=> (forall r. t (abt '[] r) -> abt '[] r)
-> t (Eval abt a)
-> Eval abt a
emitFork_ f ms =
Eval $ \c h -> f $ fmap (\m -> unEval m c h) ms
emitCaseWith
:: (ABT Term abt)
=> (abt '[] b -> Eval abt r)
-> abt '[] a
-> [Branch a abt b]
-> Eval abt r
emitCaseWith f e bs = do
gms <- T.for bs $ \(Branch pat body) ->
let (vars, body') = caseBinds body
in (\vars' ->
let rho = toAssocs1 vars (fmap11 var vars')
in GBranch pat vars' (f $ substs rho body')
) <$> freshenVars vars
Eval $ \c h ->
syn (Case_ e
(map (fromGBranch . fmap (\m -> unEval m c h)) gms))
{-# INLINE emitCaseWith #-}
----------------------------------------------------------------
----------------------------------------------------------- fin.
| zaxtax/hakaru | haskell/Language/Hakaru/Evaluation/EvalMonad.hs | bsd-3-clause | 12,638 | 0 | 22 | 3,646 | 2,985 | 1,579 | 1,406 | 197 | 3 |
foo f = (\ g x -> f (g x))
| bitemyapp/tandoori | input/lambda.hs | bsd-3-clause | 27 | 0 | 9 | 10 | 28 | 14 | 14 | 1 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-|
Module : Stack.Sig
Description : GPG Signatures for Stack
Copyright : (c) 2015-2018, Stack contributors
License : BSD3
Maintainer : Tim Dysinger <tim@fpcomplete.com>
Stability : experimental
Portability : POSIX
-}
module Stack.Sig (module Sig) where
import Stack.Sig.GPG as Sig
import Stack.Sig.Sign as Sig
| anton-dessiatov/stack | src/Stack/Sig.hs | bsd-3-clause | 362 | 0 | 4 | 64 | 30 | 22 | 8 | 4 | 0 |
haskell comment {-
haskell comment {- 3 lines of comments total! -}
haskell blank
haskell comment -}
haskell blank
| scriptum/ohcount | test/expected_dir/haskell3.hs | gpl-2.0 | 131 | 6 | 4 | 35 | 15 | 8 | 7 | -1 | -1 |
{-# LANGUAGE DeriveGeneric, ScopedTypeVariables #-}
module GCoArbitraryExample where
import GHC.Generics (Generic)
import Test.QuickCheck
import Test.QuickCheck.Function
data D a = C1 a | C2 deriving (Eq, Show, Read, Generic)
instance Arbitrary a => Arbitrary (D a)
instance CoArbitrary a => CoArbitrary (D a)
instance (Show a, Read a) => Function (D a) where
function = functionShow
main :: IO ()
main = quickCheck $ \(Fun _ f) ->
f (C1 (2::Int)) `elem` [0, 1 :: Int]
| srhb/quickcheck | tests/GCoArbitraryExample.hs | bsd-3-clause | 480 | 0 | 11 | 86 | 194 | 106 | 88 | 13 | 1 |
{-# LANGUAGE Rank2Types, ScopedTypeVariables #-}
-- Test the handling of conditionals in rank-n stuff
-- Should fail, regardless of branch ordering
module ShouldFail where
-- These two are ok
f1 = (\ (x :: forall a. a->a) -> x)
f2 = (\ (x :: forall a. a->a) -> x) id 'c'
-- These fail
f3 v = (if v then
(\ (x :: forall a. a->a) -> x)
else
(\ x -> x)
) id 'c'
f4 v = (if v then
(\ x -> x)
else
(\ (x :: forall a. a->a) -> x)
) id 'c'
| hvr/jhc | regress/tests/1_typecheck/4_fail/ghc/tcfail104.hs | mit | 470 | 12 | 13 | 135 | 196 | 114 | 82 | 12 | 2 |
{-# LANGUAGE BangPatterns, CPP, Rank2Types #-}
-- |
-- Module : Data.Text.Internal.Encoding.Fusion
-- Copyright : (c) Tom Harper 2008-2009,
-- (c) Bryan O'Sullivan 2009,
-- (c) Duncan Coutts 2009
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : portable
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Fusible 'Stream'-oriented functions for converting between 'Text'
-- and several common encodings.
module Data.Text.Internal.Encoding.Fusion
(
-- * Streaming
streamASCII
, streamUtf8
, streamUtf16LE
, streamUtf16BE
, streamUtf32LE
, streamUtf32BE
-- * Unstreaming
, unstream
, module Data.Text.Internal.Encoding.Fusion.Common
) where
#if defined(ASSERTS)
import Control.Exception (assert)
#endif
import Data.ByteString.Internal (ByteString(..), mallocByteString, memcpy)
import Data.Text.Internal.Fusion (Step(..), Stream(..))
import Data.Text.Internal.Fusion.Size
import Data.Text.Encoding.Error
import Data.Text.Internal.Encoding.Fusion.Common
import Data.Text.Internal.Unsafe.Char (unsafeChr, unsafeChr8, unsafeChr32)
import Data.Text.Internal.Unsafe.Shift (shiftL, shiftR)
import Data.Word (Word8, Word16, Word32)
import Foreign.ForeignPtr (withForeignPtr, ForeignPtr)
import Foreign.Storable (pokeByteOff)
import qualified Data.ByteString as B
import qualified Data.ByteString.Unsafe as B
import qualified Data.Text.Internal.Encoding.Utf8 as U8
import qualified Data.Text.Internal.Encoding.Utf16 as U16
import qualified Data.Text.Internal.Encoding.Utf32 as U32
import Data.Text.Unsafe (unsafeDupablePerformIO)
streamASCII :: ByteString -> Stream Char
streamASCII bs = Stream next 0 (maxSize l)
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| otherwise = Yield (unsafeChr8 x1) (i+1)
where
x1 = B.unsafeIndex bs i
{-# DEPRECATED streamASCII "Do not use this function" #-}
{-# INLINE [0] streamASCII #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using UTF-8
-- encoding.
streamUtf8 :: OnDecodeError -> ByteString -> Stream Char
streamUtf8 onErr bs = Stream next 0 (maxSize l)
where
l = B.length bs
next i
| i >= l = Done
| U8.validate1 x1 = Yield (unsafeChr8 x1) (i+1)
| i+1 < l && U8.validate2 x1 x2 = Yield (U8.chr2 x1 x2) (i+2)
| i+2 < l && U8.validate3 x1 x2 x3 = Yield (U8.chr3 x1 x2 x3) (i+3)
| i+3 < l && U8.validate4 x1 x2 x3 x4 = Yield (U8.chr4 x1 x2 x3 x4) (i+4)
| otherwise = decodeError "streamUtf8" "UTF-8" onErr (Just x1) (i+1)
where
x1 = idx i
x2 = idx (i + 1)
x3 = idx (i + 2)
x4 = idx (i + 3)
idx = B.unsafeIndex bs
{-# INLINE [0] streamUtf8 #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using little
-- endian UTF-16 encoding.
streamUtf16LE :: OnDecodeError -> ByteString -> Stream Char
streamUtf16LE onErr bs = Stream next 0 (maxSize (l `shiftR` 1))
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| i+1 < l && U16.validate1 x1 = Yield (unsafeChr x1) (i+2)
| i+3 < l && U16.validate2 x1 x2 = Yield (U16.chr2 x1 x2) (i+4)
| otherwise = decodeError "streamUtf16LE" "UTF-16LE" onErr Nothing (i+1)
where
x1 = idx i + (idx (i + 1) `shiftL` 8)
x2 = idx (i + 2) + (idx (i + 3) `shiftL` 8)
idx = fromIntegral . B.unsafeIndex bs :: Int -> Word16
{-# INLINE [0] streamUtf16LE #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using big
-- endian UTF-16 encoding.
streamUtf16BE :: OnDecodeError -> ByteString -> Stream Char
streamUtf16BE onErr bs = Stream next 0 (maxSize (l `shiftR` 1))
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| i+1 < l && U16.validate1 x1 = Yield (unsafeChr x1) (i+2)
| i+3 < l && U16.validate2 x1 x2 = Yield (U16.chr2 x1 x2) (i+4)
| otherwise = decodeError "streamUtf16BE" "UTF-16BE" onErr Nothing (i+1)
where
x1 = (idx i `shiftL` 8) + idx (i + 1)
x2 = (idx (i + 2) `shiftL` 8) + idx (i + 3)
idx = fromIntegral . B.unsafeIndex bs :: Int -> Word16
{-# INLINE [0] streamUtf16BE #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using big
-- endian UTF-32 encoding.
streamUtf32BE :: OnDecodeError -> ByteString -> Stream Char
streamUtf32BE onErr bs = Stream next 0 (maxSize (l `shiftR` 2))
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| i+3 < l && U32.validate x = Yield (unsafeChr32 x) (i+4)
| otherwise = decodeError "streamUtf32BE" "UTF-32BE" onErr Nothing (i+1)
where
x = shiftL x1 24 + shiftL x2 16 + shiftL x3 8 + x4
x1 = idx i
x2 = idx (i+1)
x3 = idx (i+2)
x4 = idx (i+3)
idx = fromIntegral . B.unsafeIndex bs :: Int -> Word32
{-# INLINE [0] streamUtf32BE #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using little
-- endian UTF-32 encoding.
streamUtf32LE :: OnDecodeError -> ByteString -> Stream Char
streamUtf32LE onErr bs = Stream next 0 (maxSize (l `shiftR` 2))
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| i+3 < l && U32.validate x = Yield (unsafeChr32 x) (i+4)
| otherwise = decodeError "streamUtf32LE" "UTF-32LE" onErr Nothing (i+1)
where
x = shiftL x4 24 + shiftL x3 16 + shiftL x2 8 + x1
x1 = idx i
x2 = idx $ i+1
x3 = idx $ i+2
x4 = idx $ i+3
idx = fromIntegral . B.unsafeIndex bs :: Int -> Word32
{-# INLINE [0] streamUtf32LE #-}
-- | /O(n)/ Convert a 'Stream' 'Word8' to a 'ByteString'.
unstream :: Stream Word8 -> ByteString
unstream (Stream next s0 len) = unsafeDupablePerformIO $ do
let mlen = upperBound 4 len
mallocByteString mlen >>= loop mlen 0 s0
where
loop !n !off !s fp = case next s of
Done -> trimUp fp n off
Skip s' -> loop n off s' fp
Yield x s'
| off == n -> realloc fp n off s' x
| otherwise -> do
withForeignPtr fp $ \p -> pokeByteOff p off x
loop n (off+1) s' fp
{-# NOINLINE realloc #-}
realloc fp n off s x = do
let n' = n+n
fp' <- copy0 fp n n'
withForeignPtr fp' $ \p -> pokeByteOff p off x
loop n' (off+1) s fp'
{-# NOINLINE trimUp #-}
trimUp fp _ off = return $! PS fp 0 off
copy0 :: ForeignPtr Word8 -> Int -> Int -> IO (ForeignPtr Word8)
copy0 !src !srcLen !destLen =
#if defined(ASSERTS)
assert (srcLen <= destLen) $
#endif
do
dest <- mallocByteString destLen
withForeignPtr src $ \src' ->
withForeignPtr dest $ \dest' ->
memcpy dest' src' (fromIntegral srcLen)
return dest
decodeError :: forall s. String -> String -> OnDecodeError -> Maybe Word8
-> s -> Step s Char
decodeError func kind onErr mb i =
case onErr desc mb of
Nothing -> Skip i
Just c -> Yield c i
where desc = "Data.Text.Internal.Encoding.Fusion." ++ func ++ ": Invalid " ++
kind ++ " stream"
| beni55/text | Data/Text/Internal/Encoding/Fusion.hs | bsd-2-clause | 7,772 | 0 | 16 | 2,388 | 2,393 | 1,248 | 1,145 | 145 | 3 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Utility functions on @Core@ syntax
-}
{-# LANGUAGE CPP #-}
module CoreSubst (
-- * Main data types
Subst(..), -- Implementation exported for supercompiler's Renaming.hs only
TvSubstEnv, IdSubstEnv, InScopeSet,
-- ** Substituting into expressions and related types
deShadowBinds, substSpec, substRulesForImportedIds,
substTy, substCo, substExpr, substExprSC, substBind, substBindSC,
substUnfolding, substUnfoldingSC,
lookupIdSubst, lookupTvSubst, lookupCvSubst, substIdOcc,
substTickish, substVarSet,
-- ** Operations on substitutions
emptySubst, mkEmptySubst, mkSubst, mkOpenSubst, substInScope, isEmptySubst,
extendIdSubst, extendIdSubstList, extendTvSubst, extendTvSubstList,
extendCvSubst, extendCvSubstList,
extendSubst, extendSubstList, extendSubstWithVar, zapSubstEnv,
addInScopeSet, extendInScope, extendInScopeList, extendInScopeIds,
isInScope, setInScope,
delBndr, delBndrs,
-- ** Substituting and cloning binders
substBndr, substBndrs, substRecBndrs,
cloneBndr, cloneBndrs, cloneIdBndr, cloneIdBndrs, cloneRecIdBndrs,
-- ** Simple expression optimiser
simpleOptPgm, simpleOptExpr, simpleOptExprWith,
exprIsConApp_maybe, exprIsLiteral_maybe, exprIsLambda_maybe,
) where
#include "HsVersions.h"
import CoreSyn
import CoreFVs
import CoreSeq
import CoreUtils
import Literal ( Literal(MachStr) )
import qualified Data.ByteString as BS
import OccurAnal( occurAnalyseExpr, occurAnalysePgm )
import qualified Type
import qualified Coercion
-- We are defining local versions
import Type hiding ( substTy, extendTvSubst, extendTvSubstList
, isInScope, substTyVarBndr, cloneTyVarBndr )
import Coercion hiding ( substTy, substCo, extendTvSubst, substTyVarBndr, substCoVarBndr )
import TyCon ( tyConArity )
import DataCon
import PrelNames ( eqBoxDataConKey, coercibleDataConKey, unpackCStringIdKey
, unpackCStringUtf8IdKey )
import OptCoercion ( optCoercion )
import PprCore ( pprCoreBindings, pprRules )
import Module ( Module )
import VarSet
import VarEnv
import Id
import Name ( Name )
import Var
import IdInfo
import Unique
import UniqSupply
import Maybes
import ErrUtils
import DynFlags
import BasicTypes ( isAlwaysActive )
import Util
import Pair
import Outputable
import PprCore () -- Instances
import FastString
import Data.List
import TysWiredIn
{-
************************************************************************
* *
\subsection{Substitutions}
* *
************************************************************************
-}
-- | A substitution environment, containing both 'Id' and 'TyVar' substitutions.
--
-- Some invariants apply to how you use the substitution:
--
-- 1. #in_scope_invariant# The in-scope set contains at least those 'Id's and 'TyVar's that will be in scope /after/
-- applying the substitution to a term. Precisely, the in-scope set must be a superset of the free vars of the
-- substitution range that might possibly clash with locally-bound variables in the thing being substituted in.
--
-- 2. #apply_once# You may apply the substitution only /once/
--
-- There are various ways of setting up the in-scope set such that the first of these invariants hold:
--
-- * Arrange that the in-scope set really is all the things in scope
--
-- * Arrange that it's the free vars of the range of the substitution
--
-- * Make it empty, if you know that all the free vars of the substitution are fresh, and hence can't possibly clash
data Subst
= Subst InScopeSet -- Variables in in scope (both Ids and TyVars) /after/
-- applying the substitution
IdSubstEnv -- Substitution for Ids
TvSubstEnv -- Substitution from TyVars to Types
CvSubstEnv -- Substitution from CoVars to Coercions
-- INVARIANT 1: See #in_scope_invariant#
-- This is what lets us deal with name capture properly
-- It's a hard invariant to check...
--
-- INVARIANT 2: The substitution is apply-once; see Note [Apply once] with
-- Types.TvSubstEnv
--
-- INVARIANT 3: See Note [Extending the Subst]
{-
Note [Extending the Subst]
~~~~~~~~~~~~~~~~~~~~~~~~~~
For a core Subst, which binds Ids as well, we make a different choice for Ids
than we do for TyVars.
For TyVars, see Note [Extending the TvSubst] with Type.TvSubstEnv
For Ids, we have a different invariant
The IdSubstEnv is extended *only* when the Unique on an Id changes
Otherwise, we just extend the InScopeSet
In consequence:
* If the TvSubstEnv and IdSubstEnv are both empty, substExpr would be a
no-op, so substExprSC ("short cut") does nothing.
However, substExpr still goes ahead and substitutes. Reason: we may
want to replace existing Ids with new ones from the in-scope set, to
avoid space leaks.
* In substIdBndr, we extend the IdSubstEnv only when the unique changes
* If the CvSubstEnv, TvSubstEnv and IdSubstEnv are all empty,
substExpr does nothing (Note that the above rule for substIdBndr
maintains this property. If the incoming envts are both empty, then
substituting the type and IdInfo can't change anything.)
* In lookupIdSubst, we *must* look up the Id in the in-scope set, because
it may contain non-trivial changes. Example:
(/\a. \x:a. ...x...) Int
We extend the TvSubstEnv with [a |-> Int]; but x's unique does not change
so we only extend the in-scope set. Then we must look up in the in-scope
set when we find the occurrence of x.
* The requirement to look up the Id in the in-scope set means that we
must NOT take no-op short cut when the IdSubst is empty.
We must still look up every Id in the in-scope set.
* (However, we don't need to do so for expressions found in the IdSubst
itself, whose range is assumed to be correct wrt the in-scope set.)
Why do we make a different choice for the IdSubstEnv than the
TvSubstEnv and CvSubstEnv?
* For Ids, we change the IdInfo all the time (e.g. deleting the
unfolding), and adding it back later, so using the TyVar convention
would entail extending the substitution almost all the time
* The simplifier wants to look up in the in-scope set anyway, in case it
can see a better unfolding from an enclosing case expression
* For TyVars, only coercion variables can possibly change, and they are
easy to spot
-}
-- | An environment for substituting for 'Id's
type IdSubstEnv = IdEnv CoreExpr
----------------------------
isEmptySubst :: Subst -> Bool
isEmptySubst (Subst _ id_env tv_env cv_env)
= isEmptyVarEnv id_env && isEmptyVarEnv tv_env && isEmptyVarEnv cv_env
emptySubst :: Subst
emptySubst = Subst emptyInScopeSet emptyVarEnv emptyVarEnv emptyVarEnv
mkEmptySubst :: InScopeSet -> Subst
mkEmptySubst in_scope = Subst in_scope emptyVarEnv emptyVarEnv emptyVarEnv
mkSubst :: InScopeSet -> TvSubstEnv -> CvSubstEnv -> IdSubstEnv -> Subst
mkSubst in_scope tvs cvs ids = Subst in_scope ids tvs cvs
-- | Find the in-scope set: see "CoreSubst#in_scope_invariant"
substInScope :: Subst -> InScopeSet
substInScope (Subst in_scope _ _ _) = in_scope
-- | Remove all substitutions for 'Id's and 'Var's that might have been built up
-- while preserving the in-scope set
zapSubstEnv :: Subst -> Subst
zapSubstEnv (Subst in_scope _ _ _) = Subst in_scope emptyVarEnv emptyVarEnv emptyVarEnv
-- | Add a substitution for an 'Id' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendIdSubst :: Subst -> Id -> CoreExpr -> Subst
-- ToDo: add an ASSERT that fvs(subst-result) is already in the in-scope set
extendIdSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope (extendVarEnv ids v r) tvs cvs
-- | Adds multiple 'Id' substitutions to the 'Subst': see also 'extendIdSubst'
extendIdSubstList :: Subst -> [(Id, CoreExpr)] -> Subst
extendIdSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope (extendVarEnvList ids prs) tvs cvs
-- | Add a substitution for a 'TyVar' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendTvSubst :: Subst -> TyVar -> Type -> Subst
extendTvSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope ids (extendVarEnv tvs v r) cvs
-- | Adds multiple 'TyVar' substitutions to the 'Subst': see also 'extendTvSubst'
extendTvSubstList :: Subst -> [(TyVar,Type)] -> Subst
extendTvSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope ids (extendVarEnvList tvs prs) cvs
-- | Add a substitution from a 'CoVar' to a 'Coercion' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendCvSubst :: Subst -> CoVar -> Coercion -> Subst
extendCvSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope ids tvs (extendVarEnv cvs v r)
-- | Adds multiple 'CoVar' -> 'Coercion' substitutions to the
-- 'Subst': see also 'extendCvSubst'
extendCvSubstList :: Subst -> [(CoVar,Coercion)] -> Subst
extendCvSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope ids tvs (extendVarEnvList cvs prs)
-- | Add a substitution appropriate to the thing being substituted
-- (whether an expression, type, or coercion). See also
-- 'extendIdSubst', 'extendTvSubst', and 'extendCvSubst'.
extendSubst :: Subst -> Var -> CoreArg -> Subst
extendSubst subst var arg
= case arg of
Type ty -> ASSERT( isTyVar var ) extendTvSubst subst var ty
Coercion co -> ASSERT( isCoVar var ) extendCvSubst subst var co
_ -> ASSERT( isId var ) extendIdSubst subst var arg
extendSubstWithVar :: Subst -> Var -> Var -> Subst
extendSubstWithVar subst v1 v2
| isTyVar v1 = ASSERT( isTyVar v2 ) extendTvSubst subst v1 (mkTyVarTy v2)
| isCoVar v1 = ASSERT( isCoVar v2 ) extendCvSubst subst v1 (mkCoVarCo v2)
| otherwise = ASSERT( isId v2 ) extendIdSubst subst v1 (Var v2)
-- | Add a substitution as appropriate to each of the terms being
-- substituted (whether expressions, types, or coercions). See also
-- 'extendSubst'.
extendSubstList :: Subst -> [(Var,CoreArg)] -> Subst
extendSubstList subst [] = subst
extendSubstList subst ((var,rhs):prs) = extendSubstList (extendSubst subst var rhs) prs
-- | Find the substitution for an 'Id' in the 'Subst'
lookupIdSubst :: SDoc -> Subst -> Id -> CoreExpr
lookupIdSubst doc (Subst in_scope ids _ _) v
| not (isLocalId v) = Var v
| Just e <- lookupVarEnv ids v = e
| Just v' <- lookupInScope in_scope v = Var v'
-- Vital! See Note [Extending the Subst]
| otherwise = WARN( True, ptext (sLit "CoreSubst.lookupIdSubst") <+> doc <+> ppr v
$$ ppr in_scope)
Var v
-- | Find the substitution for a 'TyVar' in the 'Subst'
lookupTvSubst :: Subst -> TyVar -> Type
lookupTvSubst (Subst _ _ tvs _) v = ASSERT( isTyVar v) lookupVarEnv tvs v `orElse` Type.mkTyVarTy v
-- | Find the coercion substitution for a 'CoVar' in the 'Subst'
lookupCvSubst :: Subst -> CoVar -> Coercion
lookupCvSubst (Subst _ _ _ cvs) v = ASSERT( isCoVar v ) lookupVarEnv cvs v `orElse` mkCoVarCo v
delBndr :: Subst -> Var -> Subst
delBndr (Subst in_scope ids tvs cvs) v
| isCoVar v = Subst in_scope ids tvs (delVarEnv cvs v)
| isTyVar v = Subst in_scope ids (delVarEnv tvs v) cvs
| otherwise = Subst in_scope (delVarEnv ids v) tvs cvs
delBndrs :: Subst -> [Var] -> Subst
delBndrs (Subst in_scope ids tvs cvs) vs
= Subst in_scope (delVarEnvList ids vs) (delVarEnvList tvs vs) (delVarEnvList cvs vs)
-- Easiest thing is just delete all from all!
-- | Simultaneously substitute for a bunch of variables
-- No left-right shadowing
-- ie the substitution for (\x \y. e) a1 a2
-- so neither x nor y scope over a1 a2
mkOpenSubst :: InScopeSet -> [(Var,CoreArg)] -> Subst
mkOpenSubst in_scope pairs = Subst in_scope
(mkVarEnv [(id,e) | (id, e) <- pairs, isId id])
(mkVarEnv [(tv,ty) | (tv, Type ty) <- pairs])
(mkVarEnv [(v,co) | (v, Coercion co) <- pairs])
------------------------------
isInScope :: Var -> Subst -> Bool
isInScope v (Subst in_scope _ _ _) = v `elemInScopeSet` in_scope
-- | Add the 'Var' to the in-scope set, but do not remove
-- any existing substitutions for it
addInScopeSet :: Subst -> VarSet -> Subst
addInScopeSet (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetSet` vs) ids tvs cvs
-- | Add the 'Var' to the in-scope set: as a side effect,
-- and remove any existing substitutions for it
extendInScope :: Subst -> Var -> Subst
extendInScope (Subst in_scope ids tvs cvs) v
= Subst (in_scope `extendInScopeSet` v)
(ids `delVarEnv` v) (tvs `delVarEnv` v) (cvs `delVarEnv` v)
-- | Add the 'Var's to the in-scope set: see also 'extendInScope'
extendInScopeList :: Subst -> [Var] -> Subst
extendInScopeList (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetList` vs)
(ids `delVarEnvList` vs) (tvs `delVarEnvList` vs) (cvs `delVarEnvList` vs)
-- | Optimized version of 'extendInScopeList' that can be used if you are certain
-- all the things being added are 'Id's and hence none are 'TyVar's or 'CoVar's
extendInScopeIds :: Subst -> [Id] -> Subst
extendInScopeIds (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetList` vs)
(ids `delVarEnvList` vs) tvs cvs
setInScope :: Subst -> InScopeSet -> Subst
setInScope (Subst _ ids tvs cvs) in_scope = Subst in_scope ids tvs cvs
-- Pretty printing, for debugging only
instance Outputable Subst where
ppr (Subst in_scope ids tvs cvs)
= ptext (sLit "<InScope =") <+> braces (fsep (map ppr (varEnvElts (getInScopeVars in_scope))))
$$ ptext (sLit " IdSubst =") <+> ppr ids
$$ ptext (sLit " TvSubst =") <+> ppr tvs
$$ ptext (sLit " CvSubst =") <+> ppr cvs
<> char '>'
{-
************************************************************************
* *
Substituting expressions
* *
************************************************************************
-}
-- | Apply a substitution to an entire 'CoreExpr'. Remember, you may only
-- apply the substitution /once/: see "CoreSubst#apply_once"
--
-- Do *not* attempt to short-cut in the case of an empty substitution!
-- See Note [Extending the Subst]
substExprSC :: SDoc -> Subst -> CoreExpr -> CoreExpr
substExprSC _doc subst orig_expr
| isEmptySubst subst = orig_expr
| otherwise = -- pprTrace "enter subst-expr" (doc $$ ppr orig_expr) $
subst_expr subst orig_expr
substExpr :: SDoc -> Subst -> CoreExpr -> CoreExpr
substExpr _doc subst orig_expr = subst_expr subst orig_expr
subst_expr :: Subst -> CoreExpr -> CoreExpr
subst_expr subst expr
= go expr
where
go (Var v) = lookupIdSubst (text "subst_expr") subst v
go (Type ty) = Type (substTy subst ty)
go (Coercion co) = Coercion (substCo subst co)
go (Lit lit) = Lit lit
go (App fun arg) = App (go fun) (go arg)
go (Tick tickish e) = mkTick (substTickish subst tickish) (go e)
go (Cast e co) = Cast (go e) (substCo subst co)
-- Do not optimise even identity coercions
-- Reason: substitution applies to the LHS of RULES, and
-- if you "optimise" an identity coercion, you may
-- lose a binder. We optimise the LHS of rules at
-- construction time
go (Lam bndr body) = Lam bndr' (subst_expr subst' body)
where
(subst', bndr') = substBndr subst bndr
go (Let bind body) = Let bind' (subst_expr subst' body)
where
(subst', bind') = substBind subst bind
go (Case scrut bndr ty alts) = Case (go scrut) bndr' (substTy subst ty) (map (go_alt subst') alts)
where
(subst', bndr') = substBndr subst bndr
go_alt subst (con, bndrs, rhs) = (con, bndrs', subst_expr subst' rhs)
where
(subst', bndrs') = substBndrs subst bndrs
-- | Apply a substitution to an entire 'CoreBind', additionally returning an updated 'Subst'
-- that should be used by subsequent substitutions.
substBind, substBindSC :: Subst -> CoreBind -> (Subst, CoreBind)
substBindSC subst bind -- Short-cut if the substitution is empty
| not (isEmptySubst subst)
= substBind subst bind
| otherwise
= case bind of
NonRec bndr rhs -> (subst', NonRec bndr' rhs)
where
(subst', bndr') = substBndr subst bndr
Rec pairs -> (subst', Rec (bndrs' `zip` rhss'))
where
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' | isEmptySubst subst' = rhss
| otherwise = map (subst_expr subst') rhss
substBind subst (NonRec bndr rhs) = (subst', NonRec bndr' (subst_expr subst rhs))
where
(subst', bndr') = substBndr subst bndr
substBind subst (Rec pairs) = (subst', Rec (bndrs' `zip` rhss'))
where
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' = map (subst_expr subst') rhss
-- | De-shadowing the program is sometimes a useful pre-pass. It can be done simply
-- by running over the bindings with an empty substitution, because substitution
-- returns a result that has no-shadowing guaranteed.
--
-- (Actually, within a single /type/ there might still be shadowing, because
-- 'substTy' is a no-op for the empty substitution, but that's probably OK.)
--
-- [Aug 09] This function is not used in GHC at the moment, but seems so
-- short and simple that I'm going to leave it here
deShadowBinds :: CoreProgram -> CoreProgram
deShadowBinds binds = snd (mapAccumL substBind emptySubst binds)
{-
************************************************************************
* *
Substituting binders
* *
************************************************************************
Remember that substBndr and friends are used when doing expression
substitution only. Their only business is substitution, so they
preserve all IdInfo (suitably substituted). For example, we *want* to
preserve occ info in rules.
-}
-- | Substitutes a 'Var' for another one according to the 'Subst' given, returning
-- the result and an updated 'Subst' that should be used by subsequent substitutions.
-- 'IdInfo' is preserved by this process, although it is substituted into appropriately.
substBndr :: Subst -> Var -> (Subst, Var)
substBndr subst bndr
| isTyVar bndr = substTyVarBndr subst bndr
| isCoVar bndr = substCoVarBndr subst bndr
| otherwise = substIdBndr (text "var-bndr") subst subst bndr
-- | Applies 'substBndr' to a number of 'Var's, accumulating a new 'Subst' left-to-right
substBndrs :: Subst -> [Var] -> (Subst, [Var])
substBndrs subst bndrs = mapAccumL substBndr subst bndrs
-- | Substitute in a mutually recursive group of 'Id's
substRecBndrs :: Subst -> [Id] -> (Subst, [Id])
substRecBndrs subst bndrs
= (new_subst, new_bndrs)
where -- Here's the reason we need to pass rec_subst to subst_id
(new_subst, new_bndrs) = mapAccumL (substIdBndr (text "rec-bndr") new_subst) subst bndrs
substIdBndr :: SDoc
-> Subst -- ^ Substitution to use for the IdInfo
-> Subst -> Id -- ^ Substitution and Id to transform
-> (Subst, Id) -- ^ Transformed pair
-- NB: unfolding may be zapped
substIdBndr _doc rec_subst subst@(Subst in_scope env tvs cvs) old_id
= -- pprTrace "substIdBndr" (doc $$ ppr old_id $$ ppr in_scope) $
(Subst (in_scope `extendInScopeSet` new_id) new_env tvs cvs, new_id)
where
id1 = uniqAway in_scope old_id -- id1 is cloned if necessary
id2 | no_type_change = id1
| otherwise = setIdType id1 (substTy subst old_ty)
old_ty = idType old_id
no_type_change = isEmptyVarEnv tvs ||
isEmptyVarSet (Type.tyVarsOfType old_ty)
-- new_id has the right IdInfo
-- The lazy-set is because we're in a loop here, with
-- rec_subst, when dealing with a mutually-recursive group
new_id = maybeModifyIdInfo mb_new_info id2
mb_new_info = substIdInfo rec_subst id2 (idInfo id2)
-- NB: unfolding info may be zapped
-- Extend the substitution if the unique has changed
-- See the notes with substTyVarBndr for the delVarEnv
new_env | no_change = delVarEnv env old_id
| otherwise = extendVarEnv env old_id (Var new_id)
no_change = id1 == old_id
-- See Note [Extending the Subst]
-- it's /not/ necessary to check mb_new_info and no_type_change
{-
Now a variant that unconditionally allocates a new unique.
It also unconditionally zaps the OccInfo.
-}
-- | Very similar to 'substBndr', but it always allocates a new 'Unique' for
-- each variable in its output. It substitutes the IdInfo though.
cloneIdBndr :: Subst -> UniqSupply -> Id -> (Subst, Id)
cloneIdBndr subst us old_id
= clone_id subst subst (old_id, uniqFromSupply us)
-- | Applies 'cloneIdBndr' to a number of 'Id's, accumulating a final
-- substitution from left to right
cloneIdBndrs :: Subst -> UniqSupply -> [Id] -> (Subst, [Id])
cloneIdBndrs subst us ids
= mapAccumL (clone_id subst) subst (ids `zip` uniqsFromSupply us)
cloneBndrs :: Subst -> UniqSupply -> [Var] -> (Subst, [Var])
-- Works for all kinds of variables (typically case binders)
-- not just Ids
cloneBndrs subst us vs
= mapAccumL (\subst (v, u) -> cloneBndr subst u v) subst (vs `zip` uniqsFromSupply us)
cloneBndr :: Subst -> Unique -> Var -> (Subst, Var)
cloneBndr subst uniq v
| isTyVar v = cloneTyVarBndr subst v uniq
| otherwise = clone_id subst subst (v,uniq) -- Works for coercion variables too
-- | Clone a mutually recursive group of 'Id's
cloneRecIdBndrs :: Subst -> UniqSupply -> [Id] -> (Subst, [Id])
cloneRecIdBndrs subst us ids
= (subst', ids')
where
(subst', ids') = mapAccumL (clone_id subst') subst
(ids `zip` uniqsFromSupply us)
-- Just like substIdBndr, except that it always makes a new unique
-- It is given the unique to use
clone_id :: Subst -- Substitution for the IdInfo
-> Subst -> (Id, Unique) -- Substitution and Id to transform
-> (Subst, Id) -- Transformed pair
clone_id rec_subst subst@(Subst in_scope idvs tvs cvs) (old_id, uniq)
= (Subst (in_scope `extendInScopeSet` new_id) new_idvs tvs new_cvs, new_id)
where
id1 = setVarUnique old_id uniq
id2 = substIdType subst id1
new_id = maybeModifyIdInfo (substIdInfo rec_subst id2 (idInfo old_id)) id2
(new_idvs, new_cvs) | isCoVar old_id = (idvs, extendVarEnv cvs old_id (mkCoVarCo new_id))
| otherwise = (extendVarEnv idvs old_id (Var new_id), cvs)
{-
************************************************************************
* *
Types and Coercions
* *
************************************************************************
For types and coercions we just call the corresponding functions in
Type and Coercion, but we have to repackage the substitution, from a
Subst to a TvSubst.
-}
substTyVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substTyVarBndr (Subst in_scope id_env tv_env cv_env) tv
= case Type.substTyVarBndr (TvSubst in_scope tv_env) tv of
(TvSubst in_scope' tv_env', tv')
-> (Subst in_scope' id_env tv_env' cv_env, tv')
cloneTyVarBndr :: Subst -> TyVar -> Unique -> (Subst, TyVar)
cloneTyVarBndr (Subst in_scope id_env tv_env cv_env) tv uniq
= case Type.cloneTyVarBndr (TvSubst in_scope tv_env) tv uniq of
(TvSubst in_scope' tv_env', tv')
-> (Subst in_scope' id_env tv_env' cv_env, tv')
substCoVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substCoVarBndr (Subst in_scope id_env tv_env cv_env) cv
= case Coercion.substCoVarBndr (CvSubst in_scope tv_env cv_env) cv of
(CvSubst in_scope' tv_env' cv_env', cv')
-> (Subst in_scope' id_env tv_env' cv_env', cv')
-- | See 'Type.substTy'
substTy :: Subst -> Type -> Type
substTy subst ty = Type.substTy (getTvSubst subst) ty
getTvSubst :: Subst -> TvSubst
getTvSubst (Subst in_scope _ tenv _) = TvSubst in_scope tenv
getCvSubst :: Subst -> CvSubst
getCvSubst (Subst in_scope _ tenv cenv) = CvSubst in_scope tenv cenv
-- | See 'Coercion.substCo'
substCo :: Subst -> Coercion -> Coercion
substCo subst co = Coercion.substCo (getCvSubst subst) co
{-
************************************************************************
* *
\section{IdInfo substitution}
* *
************************************************************************
-}
substIdType :: Subst -> Id -> Id
substIdType subst@(Subst _ _ tv_env cv_env) id
| (isEmptyVarEnv tv_env && isEmptyVarEnv cv_env) || isEmptyVarSet (Type.tyVarsOfType old_ty) = id
| otherwise = setIdType id (substTy subst old_ty)
-- The tyVarsOfType is cheaper than it looks
-- because we cache the free tyvars of the type
-- in a Note in the id's type itself
where
old_ty = idType id
------------------
-- | Substitute into some 'IdInfo' with regard to the supplied new 'Id'.
substIdInfo :: Subst -> Id -> IdInfo -> Maybe IdInfo
substIdInfo subst new_id info
| nothing_to_do = Nothing
| otherwise = Just (info `setSpecInfo` substSpec subst new_id old_rules
`setUnfoldingInfo` substUnfolding subst old_unf)
where
old_rules = specInfo info
old_unf = unfoldingInfo info
nothing_to_do = isEmptySpecInfo old_rules && isClosedUnfolding old_unf
------------------
-- | Substitutes for the 'Id's within an unfolding
substUnfolding, substUnfoldingSC :: Subst -> Unfolding -> Unfolding
-- Seq'ing on the returned Unfolding is enough to cause
-- all the substitutions to happen completely
substUnfoldingSC subst unf -- Short-cut version
| isEmptySubst subst = unf
| otherwise = substUnfolding subst unf
substUnfolding subst df@(DFunUnfolding { df_bndrs = bndrs, df_args = args })
= df { df_bndrs = bndrs', df_args = args' }
where
(subst',bndrs') = substBndrs subst bndrs
args' = map (substExpr (text "subst-unf:dfun") subst') args
substUnfolding subst unf@(CoreUnfolding { uf_tmpl = tmpl, uf_src = src })
-- Retain an InlineRule!
| not (isStableSource src) -- Zap an unstable unfolding, to save substitution work
= NoUnfolding
| otherwise -- But keep a stable one!
= seqExpr new_tmpl `seq`
unf { uf_tmpl = new_tmpl }
where
new_tmpl = substExpr (text "subst-unf") subst tmpl
substUnfolding _ unf = unf -- NoUnfolding, OtherCon
------------------
substIdOcc :: Subst -> Id -> Id
-- These Ids should not be substituted to non-Ids
substIdOcc subst v = case lookupIdSubst (text "substIdOcc") subst v of
Var v' -> v'
other -> pprPanic "substIdOcc" (vcat [ppr v <+> ppr other, ppr subst])
------------------
-- | Substitutes for the 'Id's within the 'WorkerInfo' given the new function 'Id'
substSpec :: Subst -> Id -> SpecInfo -> SpecInfo
substSpec subst new_id (SpecInfo rules rhs_fvs)
= seqSpecInfo new_spec `seq` new_spec
where
subst_ru_fn = const (idName new_id)
new_spec = SpecInfo (map (substRule subst subst_ru_fn) rules)
(substVarSet subst rhs_fvs)
------------------
substRulesForImportedIds :: Subst -> [CoreRule] -> [CoreRule]
substRulesForImportedIds subst rules
= map (substRule subst not_needed) rules
where
not_needed name = pprPanic "substRulesForImportedIds" (ppr name)
------------------
substRule :: Subst -> (Name -> Name) -> CoreRule -> CoreRule
-- The subst_ru_fn argument is applied to substitute the ru_fn field
-- of the rule:
-- - Rules for *imported* Ids never change ru_fn
-- - Rules for *local* Ids are in the IdInfo for that Id,
-- and the ru_fn field is simply replaced by the new name
-- of the Id
substRule _ _ rule@(BuiltinRule {}) = rule
substRule subst subst_ru_fn rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs
, ru_local = is_local })
= rule { ru_bndrs = bndrs'
, ru_fn = if is_local
then subst_ru_fn fn_name
else fn_name
, ru_args = map (substExpr doc subst') args
, ru_rhs = substExpr (text "foo") subst' rhs }
-- Do NOT optimise the RHS (previously we did simplOptExpr here)
-- See Note [Substitute lazily]
where
doc = ptext (sLit "subst-rule") <+> ppr fn_name
(subst', bndrs') = substBndrs subst bndrs
------------------
substVects :: Subst -> [CoreVect] -> [CoreVect]
substVects subst = map (substVect subst)
------------------
substVect :: Subst -> CoreVect -> CoreVect
substVect subst (Vect v rhs) = Vect v (simpleOptExprWith subst rhs)
substVect _subst vd@(NoVect _) = vd
substVect _subst vd@(VectType _ _ _) = vd
substVect _subst vd@(VectClass _) = vd
substVect _subst vd@(VectInst _) = vd
------------------
substVarSet :: Subst -> VarSet -> VarSet
substVarSet subst fvs
= foldVarSet (unionVarSet . subst_fv subst) emptyVarSet fvs
where
subst_fv subst fv
| isId fv = exprFreeVars (lookupIdSubst (text "substVarSet") subst fv)
| otherwise = Type.tyVarsOfType (lookupTvSubst subst fv)
------------------
substTickish :: Subst -> Tickish Id -> Tickish Id
substTickish subst (Breakpoint n ids) = Breakpoint n (map do_one ids)
where do_one = getIdFromTrivialExpr . lookupIdSubst (text "subst_tickish") subst
substTickish _subst other = other
{- Note [Substitute lazily]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The functions that substitute over IdInfo must be pretty lazy, becuause
they are knot-tied by substRecBndrs.
One case in point was Trac #10627 in which a rule for a function 'f'
referred to 'f' (at a differnet type) on the RHS. But instead of just
substituting in the rhs of the rule, we were calling simpleOptExpr, which
looked at the idInfo for 'f'; result <<loop>>.
In any case we don't need to optimise the RHS of rules, or unfoldings,
because the simplifier will do that.
Note [substTickish]
~~~~~~~~~~~~~~~~~~~~~~
A Breakpoint contains a list of Ids. What happens if we ever want to
substitute an expression for one of these Ids?
First, we ensure that we only ever substitute trivial expressions for
these Ids, by marking them as NoOccInfo in the occurrence analyser.
Then, when substituting for the Id, we unwrap any type applications
and abstractions to get back to an Id, with getIdFromTrivialExpr.
Second, we have to ensure that we never try to substitute a literal
for an Id in a breakpoint. We ensure this by never storing an Id with
an unlifted type in a Breakpoint - see Coverage.mkTickish.
Breakpoints can't handle free variables with unlifted types anyway.
-}
{-
Note [Worker inlining]
~~~~~~~~~~~~~~~~~~~~~~
A worker can get sustituted away entirely.
- it might be trivial
- it might simply be very small
We do not treat an InlWrapper as an 'occurrence' in the occurrence
analyser, so it's possible that the worker is not even in scope any more.
In all all these cases we simply drop the special case, returning to
InlVanilla. The WARN is just so I can see if it happens a lot.
************************************************************************
* *
The Very Simple Optimiser
* *
************************************************************************
Note [Optimise coercion boxes aggressively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The simple expression optimiser needs to deal with Eq# boxes as follows:
1. If the result of optimising the RHS of a non-recursive binding is an
Eq# box, that box is substituted rather than turned into a let, just as
if it were trivial.
let eqv = Eq# co in e ==> e[Eq# co/eqv]
2. If the result of optimising a case scrutinee is a Eq# box and the case
deconstructs it in a trivial way, we evaluate the case then and there.
case Eq# co of Eq# cov -> e ==> e[co/cov]
We do this for two reasons:
1. Bindings/case scrutinisation of this form is often created by the
evidence-binding mechanism and we need them to be inlined to be able
desugar RULE LHSes that involve equalities (see e.g. T2291)
2. The test T4356 fails Lint because it creates a coercion between types
of kind (* -> * -> *) and (?? -> ? -> *), which differ. If we do this
inlining aggressively we can collapse away the intermediate coercion between
these two types and hence pass Lint again. (This is a sort of a hack.)
In fact, our implementation uses slightly liberalised versions of the second rule
rule so that the optimisations are a bit more generally applicable. Precisely:
2a. We reduce any situation where we can spot a case-of-known-constructor
As a result, the only time we should get residual coercion boxes in the code is
when the type checker generates something like:
\eqv -> let eqv' = Eq# (case eqv of Eq# cov -> ... cov ...)
However, the case of lambda-bound equality evidence is fairly rare, so these two
rules should suffice for solving the rule LHS problem for now.
Annoyingly, we cannot use this modified rule 1a instead of 1:
1a. If we come across a let-bound constructor application with trivial arguments,
add an appropriate unfolding to the let binder. We spot constructor applications
by using exprIsConApp_maybe, so this would actually let rule 2a reduce more.
The reason is that we REALLY NEED coercion boxes to be substituted away. With rule 1a
we wouldn't simplify this expression at all:
let eqv = Eq# co
in foo eqv (bar eqv)
The rule LHS desugarer can't deal with Let at all, so we need to push that box into
the use sites.
-}
simpleOptExpr :: CoreExpr -> CoreExpr
-- Do simple optimisation on an expression
-- The optimisation is very straightforward: just
-- inline non-recursive bindings that are used only once,
-- or where the RHS is trivial
--
-- We also inline bindings that bind a Eq# box: see
-- See Note [Optimise coercion boxes aggressively].
--
-- The result is NOT guaranteed occurrence-analysed, because
-- in (let x = y in ....) we substitute for x; so y's occ-info
-- may change radically
simpleOptExpr expr
= -- pprTrace "simpleOptExpr" (ppr init_subst $$ ppr expr)
simpleOptExprWith init_subst expr
where
init_subst = mkEmptySubst (mkInScopeSet (exprFreeVars expr))
-- It's potentially important to make a proper in-scope set
-- Consider let x = ..y.. in \y. ...x...
-- Then we should remember to clone y before substituting
-- for x. It's very unlikely to occur, because we probably
-- won't *be* substituting for x if it occurs inside a
-- lambda.
--
-- It's a bit painful to call exprFreeVars, because it makes
-- three passes instead of two (occ-anal, and go)
simpleOptExprWith :: Subst -> InExpr -> OutExpr
simpleOptExprWith subst expr = simple_opt_expr subst (occurAnalyseExpr expr)
----------------------
simpleOptPgm :: DynFlags -> Module
-> CoreProgram -> [CoreRule] -> [CoreVect]
-> IO (CoreProgram, [CoreRule], [CoreVect])
simpleOptPgm dflags this_mod binds rules vects
= do { dumpIfSet_dyn dflags Opt_D_dump_occur_anal "Occurrence analysis"
(pprCoreBindings occ_anald_binds $$ pprRules rules );
; return (reverse binds', substRulesForImportedIds subst' rules, substVects subst' vects) }
where
occ_anald_binds = occurAnalysePgm this_mod (\_ -> False) {- No rules active -}
rules vects emptyVarEnv binds
(subst', binds') = foldl do_one (emptySubst, []) occ_anald_binds
do_one (subst, binds') bind
= case simple_opt_bind subst bind of
(subst', Nothing) -> (subst', binds')
(subst', Just bind') -> (subst', bind':binds')
----------------------
type InVar = Var
type OutVar = Var
type InId = Id
type OutId = Id
type InExpr = CoreExpr
type OutExpr = CoreExpr
-- In these functions the substitution maps InVar -> OutExpr
----------------------
simple_opt_expr :: Subst -> InExpr -> OutExpr
simple_opt_expr subst expr
= go expr
where
in_scope_env = (substInScope subst, simpleUnfoldingFun)
go (Var v) = lookupIdSubst (text "simpleOptExpr") subst v
go (App e1 e2) = simple_app subst e1 [go e2]
go (Type ty) = Type (substTy subst ty)
go (Coercion co) = Coercion (optCoercion (getCvSubst subst) co)
go (Lit lit) = Lit lit
go (Tick tickish e) = mkTick (substTickish subst tickish) (go e)
go (Cast e co) | isReflCo co' = go e
| otherwise = Cast (go e) co'
where
co' = optCoercion (getCvSubst subst) co
go (Let bind body) = case simple_opt_bind subst bind of
(subst', Nothing) -> simple_opt_expr subst' body
(subst', Just bind) -> Let bind (simple_opt_expr subst' body)
go lam@(Lam {}) = go_lam [] subst lam
go (Case e b ty as)
-- See Note [Optimise coercion boxes aggressively]
| isDeadBinder b
, Just (con, _tys, es) <- exprIsConApp_maybe in_scope_env e'
, Just (altcon, bs, rhs) <- findAlt (DataAlt con) as
= case altcon of
DEFAULT -> go rhs
_ -> mkLets (catMaybes mb_binds) $ simple_opt_expr subst' rhs
where (subst', mb_binds) = mapAccumL simple_opt_out_bind subst
(zipEqual "simpleOptExpr" bs es)
| otherwise
= Case e' b' (substTy subst ty)
(map (go_alt subst') as)
where
e' = go e
(subst', b') = subst_opt_bndr subst b
----------------------
go_alt subst (con, bndrs, rhs)
= (con, bndrs', simple_opt_expr subst' rhs)
where
(subst', bndrs') = subst_opt_bndrs subst bndrs
----------------------
-- go_lam tries eta reduction
go_lam bs' subst (Lam b e)
= go_lam (b':bs') subst' e
where
(subst', b') = subst_opt_bndr subst b
go_lam bs' subst e
| Just etad_e <- tryEtaReduce bs e' = etad_e
| otherwise = mkLams bs e'
where
bs = reverse bs'
e' = simple_opt_expr subst e
----------------------
-- simple_app collects arguments for beta reduction
simple_app :: Subst -> InExpr -> [OutExpr] -> CoreExpr
simple_app subst (App e1 e2) as
= simple_app subst e1 (simple_opt_expr subst e2 : as)
simple_app subst (Lam b e) (a:as)
= case maybe_substitute subst b a of
Just ext_subst -> simple_app ext_subst e as
Nothing -> Let (NonRec b2 a) (simple_app subst' e as)
where
(subst', b') = subst_opt_bndr subst b
b2 = add_info subst' b b'
simple_app subst (Var v) as
| isCompulsoryUnfolding (idUnfolding v)
, isAlwaysActive (idInlineActivation v)
-- See Note [Unfold compulsory unfoldings in LHSs]
= simple_app subst (unfoldingTemplate (idUnfolding v)) as
simple_app subst (Tick t e) as
-- Okay to do "(Tick t e) x ==> Tick t (e x)"?
| t `tickishScopesLike` SoftScope
= mkTick t $ simple_app subst e as
simple_app subst e as
= foldl App (simple_opt_expr subst e) as
----------------------
simple_opt_bind,simple_opt_bind' :: Subst -> CoreBind -> (Subst, Maybe CoreBind)
simple_opt_bind s b -- Can add trace stuff here
= simple_opt_bind' s b
simple_opt_bind' subst (Rec prs)
= (subst'', res_bind)
where
res_bind = Just (Rec (reverse rev_prs'))
(subst', bndrs') = subst_opt_bndrs subst (map fst prs)
(subst'', rev_prs') = foldl do_pr (subst', []) (prs `zip` bndrs')
do_pr (subst, prs) ((b,r), b')
= case maybe_substitute subst b r2 of
Just subst' -> (subst', prs)
Nothing -> (subst, (b2,r2):prs)
where
b2 = add_info subst b b'
r2 = simple_opt_expr subst r
simple_opt_bind' subst (NonRec b r)
= simple_opt_out_bind subst (b, simple_opt_expr subst r)
----------------------
simple_opt_out_bind :: Subst -> (InVar, OutExpr) -> (Subst, Maybe CoreBind)
simple_opt_out_bind subst (b, r')
| Just ext_subst <- maybe_substitute subst b r'
= (ext_subst, Nothing)
| otherwise
= (subst', Just (NonRec b2 r'))
where
(subst', b') = subst_opt_bndr subst b
b2 = add_info subst' b b'
----------------------
maybe_substitute :: Subst -> InVar -> OutExpr -> Maybe Subst
-- (maybe_substitute subst in_var out_rhs)
-- either extends subst with (in_var -> out_rhs)
-- or returns Nothing
maybe_substitute subst b r
| Type ty <- r -- let a::* = TYPE ty in <body>
= ASSERT( isTyVar b )
Just (extendTvSubst subst b ty)
| Coercion co <- r
= ASSERT( isCoVar b )
Just (extendCvSubst subst b co)
| isId b -- let x = e in <body>
, not (isCoVar b) -- See Note [Do not inline CoVars unconditionally]
-- in SimplUtils
, safe_to_inline (idOccInfo b)
, isAlwaysActive (idInlineActivation b) -- Note [Inline prag in simplOpt]
, not (isStableUnfolding (idUnfolding b))
, not (isExportedId b)
, not (isUnLiftedType (idType b)) || exprOkForSpeculation r
= Just (extendIdSubst subst b r)
| otherwise
= Nothing
where
-- Unconditionally safe to inline
safe_to_inline :: OccInfo -> Bool
safe_to_inline (IAmALoopBreaker {}) = False
safe_to_inline IAmDead = True
safe_to_inline (OneOcc in_lam one_br _) = (not in_lam && one_br) || trivial
safe_to_inline NoOccInfo = trivial
trivial | exprIsTrivial r = True
| (Var fun, args) <- collectArgs r
, Just dc <- isDataConWorkId_maybe fun
, dc `hasKey` eqBoxDataConKey || dc `hasKey` coercibleDataConKey
, all exprIsTrivial args = True -- See Note [Optimise coercion boxes aggressively]
| otherwise = False
----------------------
subst_opt_bndr :: Subst -> InVar -> (Subst, OutVar)
subst_opt_bndr subst bndr
| isTyVar bndr = substTyVarBndr subst bndr
| isCoVar bndr = substCoVarBndr subst bndr
| otherwise = subst_opt_id_bndr subst bndr
subst_opt_id_bndr :: Subst -> InId -> (Subst, OutId)
-- Nuke all fragile IdInfo, unfolding, and RULES;
-- it gets added back later by add_info
-- Rather like SimplEnv.substIdBndr
--
-- It's important to zap fragile OccInfo (which CoreSubst.substIdBndr
-- carefully does not do) because simplOptExpr invalidates it
subst_opt_id_bndr subst@(Subst in_scope id_subst tv_subst cv_subst) old_id
= (Subst new_in_scope new_id_subst tv_subst cv_subst, new_id)
where
id1 = uniqAway in_scope old_id
id2 = setIdType id1 (substTy subst (idType old_id))
new_id = zapFragileIdInfo id2 -- Zaps rules, worker-info, unfolding
-- and fragile OccInfo
new_in_scope = in_scope `extendInScopeSet` new_id
-- Extend the substitution if the unique has changed,
-- or there's some useful occurrence information
-- See the notes with substTyVarBndr for the delSubstEnv
new_id_subst | new_id /= old_id
= extendVarEnv id_subst old_id (Var new_id)
| otherwise
= delVarEnv id_subst old_id
----------------------
subst_opt_bndrs :: Subst -> [InVar] -> (Subst, [OutVar])
subst_opt_bndrs subst bndrs
= mapAccumL subst_opt_bndr subst bndrs
----------------------
add_info :: Subst -> InVar -> OutVar -> OutVar
add_info subst old_bndr new_bndr
| isTyVar old_bndr = new_bndr
| otherwise = maybeModifyIdInfo mb_new_info new_bndr
where mb_new_info = substIdInfo subst new_bndr (idInfo old_bndr)
simpleUnfoldingFun :: IdUnfoldingFun
simpleUnfoldingFun id
| isAlwaysActive (idInlineActivation id) = idUnfolding id
| otherwise = noUnfolding
{-
Note [Inline prag in simplOpt]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If there's an INLINE/NOINLINE pragma that restricts the phase in
which the binder can be inlined, we don't inline here; after all,
we don't know what phase we're in. Here's an example
foo :: Int -> Int -> Int
{-# INLINE foo #-}
foo m n = inner m
where
{-# INLINE [1] inner #-}
inner m = m+n
bar :: Int -> Int
bar n = foo n 1
When inlining 'foo' in 'bar' we want the let-binding for 'inner'
to remain visible until Phase 1
Note [Unfold compulsory unfoldings in LHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the user writes `RULES map coerce = coerce` as a rule, the rule
will only ever match if simpleOptExpr replaces coerce by its unfolding
on the LHS, because that is the core that the rule matching engine
will find. So do that for everything that has a compulsory
unfolding. Also see Note [Desugaring coerce as cast] in Desugar.
However, we don't want to inline 'seq', which happens to also have a
compulsory unfolding, so we only do this unfolding only for things
that are always-active. See Note [User-defined RULES for seq] in MkId.
************************************************************************
* *
exprIsConApp_maybe
* *
************************************************************************
Note [exprIsConApp_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsConApp_maybe is a very important function. There are two principal
uses:
* case e of { .... }
* cls_op e, where cls_op is a class operation
In both cases you want to know if e is of form (C e1..en) where C is
a data constructor.
However e might not *look* as if
Note [exprIsConApp_maybe on literal strings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #9400.
Conceptually, a string literal "abc" is just ('a':'b':'c':[]), but in Core
they are represented as unpackCString# "abc"# by MkCore.mkStringExprFS, or
unpackCStringUtf8# when the literal contains multi-byte UTF8 characters.
For optimizations we want to be able to treat it as a list, so they can be
decomposed when used in a case-statement. exprIsConApp_maybe detects those
calls to unpackCString# and returns:
Just (':', [Char], ['a', unpackCString# "bc"]).
We need to be careful about UTF8 strings here. ""# contains a ByteString, so
we must parse it back into a FastString to split off the first character.
That way we can treat unpackCString# and unpackCStringUtf8# in the same way.
-}
data ConCont = CC [CoreExpr] Coercion
-- Substitution already applied
-- | Returns @Just (dc, [t1..tk], [x1..xn])@ if the argument expression is
-- a *saturated* constructor application of the form @dc t1..tk x1 .. xn@,
-- where t1..tk are the *universally-qantified* type args of 'dc'
exprIsConApp_maybe :: InScopeEnv -> CoreExpr -> Maybe (DataCon, [Type], [CoreExpr])
exprIsConApp_maybe (in_scope, id_unf) expr
= go (Left in_scope) expr (CC [] (mkReflCo Representational (exprType expr)))
where
go :: Either InScopeSet Subst
-> CoreExpr -> ConCont
-> Maybe (DataCon, [Type], [CoreExpr])
go subst (Tick t expr) cont
| not (tickishIsCode t) = go subst expr cont
go subst (Cast expr co1) (CC [] co2)
= go subst expr (CC [] (subst_co subst co1 `mkTransCo` co2))
go subst (App fun arg) (CC args co)
= go subst fun (CC (subst_arg subst arg : args) co)
go subst (Lam var body) (CC (arg:args) co)
| exprIsTrivial arg -- Don't duplicate stuff!
= go (extend subst var arg) body (CC args co)
go (Right sub) (Var v) cont
= go (Left (substInScope sub))
(lookupIdSubst (text "exprIsConApp" <+> ppr expr) sub v)
cont
go (Left in_scope) (Var fun) cont@(CC args co)
| Just con <- isDataConWorkId_maybe fun
, count isValArg args == idArity fun
= dealWithCoercion co con args
-- Look through dictionary functions; see Note [Unfolding DFuns]
| DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = dfun_args } <- unfolding
, bndrs `equalLength` args -- See Note [DFun arity check]
, let subst = mkOpenSubst in_scope (bndrs `zip` args)
= dealWithCoercion co con (map (substExpr (text "exprIsConApp1") subst) dfun_args)
-- Look through unfoldings, but only arity-zero one;
-- if arity > 0 we are effectively inlining a function call,
-- and that is the business of callSiteInline.
-- In practice, without this test, most of the "hits" were
-- CPR'd workers getting inlined back into their wrappers,
| idArity fun == 0
, Just rhs <- expandUnfolding_maybe unfolding
, let in_scope' = extendInScopeSetSet in_scope (exprFreeVars rhs)
= go (Left in_scope') rhs cont
| (fun `hasKey` unpackCStringIdKey)
|| (fun `hasKey` unpackCStringUtf8IdKey)
, [Lit (MachStr str)] <- args
= dealWithStringLiteral fun str co
where
unfolding = id_unf fun
go _ _ _ = Nothing
----------------------------
-- Operations on the (Either InScopeSet CoreSubst)
-- The Left case is wildly dominant
subst_co (Left {}) co = co
subst_co (Right s) co = CoreSubst.substCo s co
subst_arg (Left {}) e = e
subst_arg (Right s) e = substExpr (text "exprIsConApp2") s e
extend (Left in_scope) v e = Right (extendSubst (mkEmptySubst in_scope) v e)
extend (Right s) v e = Right (extendSubst s v e)
-- See Note [exprIsConApp_maybe on literal strings]
dealWithStringLiteral :: Var -> BS.ByteString -> Coercion
-> Maybe (DataCon, [Type], [CoreExpr])
-- This is not possible with user-supplied empty literals, MkCore.mkStringExprFS
-- turns those into [] automatically, but just in case something else in GHC
-- generates a string literal directly.
dealWithStringLiteral _ str co
| BS.null str
= dealWithCoercion co nilDataCon [Type charTy]
dealWithStringLiteral fun str co
= let strFS = mkFastStringByteString str
char = mkConApp charDataCon [mkCharLit (headFS strFS)]
charTail = fastStringToByteString (tailFS strFS)
-- In singleton strings, just add [] instead of unpackCstring# ""#.
rest = if BS.null charTail
then mkConApp nilDataCon [Type charTy]
else App (Var fun)
(Lit (MachStr charTail))
in dealWithCoercion co consDataCon [Type charTy, char, rest]
dealWithCoercion :: Coercion -> DataCon -> [CoreExpr]
-> Maybe (DataCon, [Type], [CoreExpr])
dealWithCoercion co dc dc_args
| isReflCo co
, let (univ_ty_args, rest_args) = splitAtList (dataConUnivTyVars dc) dc_args
= Just (dc, stripTypeArgs univ_ty_args, rest_args)
| Pair _from_ty to_ty <- coercionKind co
, Just (to_tc, to_tc_arg_tys) <- splitTyConApp_maybe to_ty
, to_tc == dataConTyCon dc
-- These two tests can fail; we might see
-- (C x y) `cast` (g :: T a ~ S [a]),
-- where S is a type function. In fact, exprIsConApp
-- will probably not be called in such circumstances,
-- but there't nothing wrong with it
= -- Here we do the KPush reduction rule as described in the FC paper
-- The transformation applies iff we have
-- (C e1 ... en) `cast` co
-- where co :: (T t1 .. tn) ~ to_ty
-- The left-hand one must be a T, because exprIsConApp returned True
-- but the right-hand one might not be. (Though it usually will.)
let
tc_arity = tyConArity to_tc
dc_univ_tyvars = dataConUnivTyVars dc
dc_ex_tyvars = dataConExTyVars dc
arg_tys = dataConRepArgTys dc
non_univ_args = dropList dc_univ_tyvars dc_args
(ex_args, val_args) = splitAtList dc_ex_tyvars non_univ_args
-- Make the "theta" from Fig 3 of the paper
gammas = decomposeCo tc_arity co
theta_subst = liftCoSubstWith Representational
(dc_univ_tyvars ++ dc_ex_tyvars)
-- existentials are at role N
(gammas ++ map (mkReflCo Nominal)
(stripTypeArgs ex_args))
-- Cast the value arguments (which include dictionaries)
new_val_args = zipWith cast_arg arg_tys val_args
cast_arg arg_ty arg = mkCast arg (theta_subst arg_ty)
dump_doc = vcat [ppr dc, ppr dc_univ_tyvars, ppr dc_ex_tyvars,
ppr arg_tys, ppr dc_args,
ppr ex_args, ppr val_args, ppr co, ppr _from_ty, ppr to_ty, ppr to_tc ]
in
ASSERT2( eqType _from_ty (mkTyConApp to_tc (stripTypeArgs $ takeList dc_univ_tyvars dc_args))
, dump_doc )
ASSERT2( all isTypeArg ex_args, dump_doc )
ASSERT2( equalLength val_args arg_tys, dump_doc )
Just (dc, to_tc_arg_tys, ex_args ++ new_val_args)
| otherwise
= Nothing
stripTypeArgs :: [CoreExpr] -> [Type]
stripTypeArgs args = ASSERT2( all isTypeArg args, ppr args )
[ty | Type ty <- args]
-- We really do want isTypeArg here, not isTyCoArg!
{-
Note [Unfolding DFuns]
~~~~~~~~~~~~~~~~~~~~~~
DFuns look like
df :: forall a b. (Eq a, Eq b) -> Eq (a,b)
df a b d_a d_b = MkEqD (a,b) ($c1 a b d_a d_b)
($c2 a b d_a d_b)
So to split it up we just need to apply the ops $c1, $c2 etc
to the very same args as the dfun. It takes a little more work
to compute the type arguments to the dictionary constructor.
Note [DFun arity check]
~~~~~~~~~~~~~~~~~~~~~~~
Here we check that the total number of supplied arguments (inclding
type args) matches what the dfun is expecting. This may be *less*
than the ordinary arity of the dfun: see Note [DFun unfoldings] in CoreSyn
-}
exprIsLiteral_maybe :: InScopeEnv -> CoreExpr -> Maybe Literal
-- Same deal as exprIsConApp_maybe, but much simpler
-- Nevertheless we do need to look through unfoldings for
-- Integer literals, which are vigorously hoisted to top level
-- and not subsequently inlined
exprIsLiteral_maybe env@(_, id_unf) e
= case e of
Lit l -> Just l
Tick _ e' -> exprIsLiteral_maybe env e' -- dubious?
Var v | Just rhs <- expandUnfolding_maybe (id_unf v)
-> exprIsLiteral_maybe env rhs
_ -> Nothing
{-
Note [exprIsLambda_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsLambda_maybe will, given an expression `e`, try to turn it into the form
`Lam v e'` (returned as `Just (v,e')`). Besides using lambdas, it looks through
casts (using the Push rule), and it unfolds function calls if the unfolding
has a greater arity than arguments are present.
Currently, it is used in Rules.match, and is required to make
"map coerce = coerce" match.
-}
exprIsLambda_maybe :: InScopeEnv -> CoreExpr
-> Maybe (Var, CoreExpr,[Tickish Id])
-- See Note [exprIsLambda_maybe]
-- The simple case: It is a lambda already
exprIsLambda_maybe _ (Lam x e)
= Just (x, e, [])
-- Still straightforward: Ticks that we can float out of the way
exprIsLambda_maybe (in_scope_set, id_unf) (Tick t e)
| tickishFloatable t
, Just (x, e, ts) <- exprIsLambda_maybe (in_scope_set, id_unf) e
= Just (x, e, t:ts)
-- Also possible: A casted lambda. Push the coercion inside
exprIsLambda_maybe (in_scope_set, id_unf) (Cast casted_e co)
| Just (x, e,ts) <- exprIsLambda_maybe (in_scope_set, id_unf) casted_e
-- Only do value lambdas.
-- this implies that x is not in scope in gamma (makes this code simpler)
, not (isTyVar x) && not (isCoVar x)
, ASSERT( not $ x `elemVarSet` tyCoVarsOfCo co) True
, Just (x',e') <- pushCoercionIntoLambda in_scope_set x e co
, let res = Just (x',e',ts)
= --pprTrace "exprIsLambda_maybe:Cast" (vcat [ppr casted_e,ppr co,ppr res)])
res
-- Another attempt: See if we find a partial unfolding
exprIsLambda_maybe (in_scope_set, id_unf) e
| (Var f, as, ts) <- collectArgsTicks tickishFloatable e
, idArity f > length (filter isValArg as)
-- Make sure there is hope to get a lambda
, Just rhs <- expandUnfolding_maybe (id_unf f)
-- Optimize, for beta-reduction
, let e' = simpleOptExprWith (mkEmptySubst in_scope_set) (rhs `mkApps` as)
-- Recurse, because of possible casts
, Just (x', e'', ts') <- exprIsLambda_maybe (in_scope_set, id_unf) e'
, let res = Just (x', e'', ts++ts')
= -- pprTrace "exprIsLambda_maybe:Unfold" (vcat [ppr e, ppr (x',e'')])
res
exprIsLambda_maybe _ _e
= -- pprTrace "exprIsLambda_maybe:Fail" (vcat [ppr _e])
Nothing
pushCoercionIntoLambda
:: InScopeSet -> Var -> CoreExpr -> Coercion -> Maybe (Var, CoreExpr)
pushCoercionIntoLambda in_scope x e co
-- This implements the Push rule from the paper on coercions
-- Compare with simplCast in Simplify
| ASSERT(not (isTyVar x) && not (isCoVar x)) True
, Pair s1s2 t1t2 <- coercionKind co
, Just (_s1,_s2) <- splitFunTy_maybe s1s2
, Just (t1,_t2) <- splitFunTy_maybe t1t2
= let [co1, co2] = decomposeCo 2 co
-- Should we optimize the coercions here?
-- Otherwise they might not match too well
x' = x `setIdType` t1
in_scope' = in_scope `extendInScopeSet` x'
subst = extendIdSubst (mkEmptySubst in_scope')
x
(mkCast (Var x') co1)
in Just (x', subst_expr subst e `mkCast` co2)
| otherwise
= pprTrace "exprIsLambda_maybe: Unexpected lambda in case" (ppr (Lam x e))
Nothing
| acowley/ghc | compiler/coreSyn/CoreSubst.hs | bsd-3-clause | 59,475 | 1 | 22 | 15,090 | 11,390 | 5,987 | 5,403 | 667 | 13 |
module Parser where
import Ast
import Text.Trifecta
import Text.Trifecta.Delta
| Agrosis/haxpr | src/Parser.hs | mit | 81 | 0 | 4 | 11 | 18 | 12 | 6 | 4 | 0 |
{-# htermination filterM :: Monad m => (a -> m Bool) -> [a] -> m [a] #-}
import Monad
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Monad_filterM_2.hs | mit | 86 | 0 | 3 | 19 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Web.Scotty.Blaze (
blaze
, builder
) where
import Network.Wai
import Web.Scotty (ActionM, header)
import qualified Control.Monad.State as MS
import Text.Blaze.Html (Html)
import Blaze.ByteString.Builder (Builder)
import Text.Blaze.Html.Renderer.Utf8 (renderHtmlBuilder)
-- | Render some Blaze Html
--
blaze :: Html -> ActionM ()
blaze h = do
header "Content-Type" "text/html"
builder $ renderHtmlBuilder h
-- | Render a generic builder
--
builder :: Builder -> ActionM ()
builder = MS.modify . setContent
setContent :: Builder -> Response -> Response
setContent b (ResponseBuilder s h _) = ResponseBuilder s h b
setContent b (ResponseFile s h _ _) = ResponseBuilder s h b
setContent b (ResponseSource s h _) = ResponseBuilder s h b
| jb55/scotty-blaze | src/Web/Scotty/Blaze.hs | mit | 837 | 0 | 8 | 178 | 243 | 134 | 109 | 20 | 1 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, OverloadedStrings, TemplateHaskell, TypeSynonymInstances #-}
module SpaceWeather.Regressor.Linear where
import qualified Data.Aeson.TH as Aeson
import SpaceWeather.Prediction
data LinearOption = LinearOption deriving (Eq, Ord, Show, Read)
Aeson.deriveJSON Aeson.defaultOptions ''LinearOption
instance Predictor LinearOption where
performPrediction = undefined
| nushio3/UFCORIN | src/SpaceWeather/Regressor/Linear.hs | mit | 440 | 0 | 6 | 45 | 75 | 43 | 32 | 8 | 0 |
{-# htermination liftM2 :: Monad m => (a -> b -> c) -> (m a -> m b -> m c) #-}
import Monad
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Monad_liftM2_1.hs | mit | 92 | 0 | 3 | 24 | 5 | 3 | 2 | 1 | 0 |
-----------------------------------------------------------------------------
--
-- Module : SGC.Object
-- Copyright :
-- License : MIT
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE Rank2Types #-}
module SGC.Object where
import PhyQ
import Data.Typeable (Typeable)
import GHC.Exts (Constraint)
-----------------------------------------------------------------------------
class AnyObject obj where objId :: obj -> String
data SomeObject = forall obj . (Typeable obj, AnyObject obj) => SomeObject obj
data SomeObject' (c :: * -> Constraint) =
forall obj . (Typeable obj, AnyObject obj, c obj) =>
SomeObject' obj
fromSomeObject' :: (forall obj . (Typeable obj, AnyObject obj, c obj) => obj -> x)
-> SomeObject' c
-> x
fromSomeObject' f (SomeObject' obj) = f obj
-----------------------------------------------------------------------------
type Measure a v q = a -> Measurable q v
class HasMass v a where
objMass :: Measure a v Mass
class HasPosition sys vec a where
objPosition :: sys -> Measure a vec Position
objSpeed :: sys -> Measure a vec Speed
objDistance :: (HasPosition sys vec a, Ord vec, Num vec) =>
sys -> a -> a -> Measurable Position vec
objDistance sys x y = let px = objPosition sys x
py = objPosition sys y
in py $- px
-----------------------------------------------------------------------------
class (HasPosition sys vec a, HasMass v a) =>
MaterialPoint' sys vec v a | vec -> v
type MaterialPoint sys vec v = SomeObject' (MaterialPoint' sys vec v)
instance HasPosition sys vec (MaterialPoint sys vec v) where
objPosition sys = fromSomeObject' $ objPosition sys
objSpeed sys = fromSomeObject' $ objSpeed sys
instance HasMass v (MaterialPoint sys vec v) where
objMass = fromSomeObject' objMass
-----------------------------------------------------------------------------
-- class HasPosition sys vec a where
-- objPosition :: sys -> Measure a vec Position
-- objSpeed :: sys -> Measure a vec Speed
-----------------------------------------------------------------------------
| fehu/hsgc | SGC/Object_OLD.hs | mit | 2,381 | 0 | 9 | 479 | 533 | 291 | 242 | -1 | -1 |
newtype DiffList a = DiffList {getDiffList :: [a] -> [a]}
toDiffList :: [a] -> DiffList a
toDiffList xs = DiffList (xs++)
fromDiffList :: DiffList a -> [a]
fromDiffList (DiffList f) = f []
instance Monoid (DiffList a) where
mempty = DiffList (\xs -> [] ++ xs)
(DiffList f) `mappend` (DiffList g) = DiffList (\xs -> f (g xs)) | RAFIRAF/HASKELL | For a Few Monads More/difflist.hs | mit | 353 | 0 | 11 | 86 | 170 | 91 | 79 | 8 | 1 |
-- necessary for `ToParamSchema Core.EpochIndex`
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Cardano.Node.API.Swagger where
import Universum
import Control.Lens (at, (?~))
import Data.Swagger
import Servant
import Servant.Swagger
import Servant.Swagger.UI (SwaggerSchemaUI)
import Pos.Chain.Txp (TxIn, TxOut, TxOutAux)
import Pos.Chain.Update (SoftwareVersion)
import Pos.Util.Swagger (swaggerSchemaUIServer)
import Pos.Web (CConfirmedProposalState, serveDocImpl)
import Pos.Web.Types (TlsParams)
forkDocServer
:: HasSwagger a
=> Proxy a
-> SoftwareVersion
-> String
-> Word16
-> Maybe TlsParams
-> IO ()
forkDocServer prxy swVersion ip port' tlsParams =
serveDocImpl
(pure app)
ip
port'
tlsParams
Nothing
Nothing
where
app =
serve
(Proxy @("docs" :> "v1" :> SwaggerSchemaUI "index" "swagger.json"))
(swaggerSchemaUIServer (documentationApi swVersion prxy))
documentationApi
:: HasSwagger a
=> SoftwareVersion
-> Proxy a
-> Swagger
documentationApi curSoftwareVersion prxy = toSwagger prxy
& info.title .~ "Cardano Node API"
& info.version .~ fromString (show curSoftwareVersion)
& host ?~ "127.0.0.1:8083"
& info.license ?~ ("MIT" & url ?~ URL "https://raw.githubusercontent.com/input-output-hk/cardano-sl/develop/lib/LICENSE")
instance ToParamSchema TxIn where
toParamSchema _ = mempty
& type_ .~ SwaggerString
instance ToSchema TxIn where
declareNamedSchema = pure . paramSchemaToNamedSchema defaultSchemaOptions
instance ToSchema TxOut where
declareNamedSchema _ =
pure $ NamedSchema (Just "TxOut") $ mempty
& type_ .~ SwaggerObject
& required .~ ["coin", "address"]
& properties .~ (mempty
& at "coin" ?~ (Inline $ mempty
& type_ .~ SwaggerNumber
)
& at "address" ?~ (Inline $ mempty
& type_ .~ SwaggerString
)
)
instance ToSchema TxOutAux
instance ToSchema CConfirmedProposalState
| input-output-hk/pos-haskell-prototype | node/src/Cardano/Node/API/Swagger.hs | mit | 2,254 | 0 | 16 | 683 | 516 | 272 | 244 | -1 | -1 |
{{hs_copyright}}
module {{module_name}}App.Types
( Milliseconds (..)
) where
newtype Milliseconds = Milliseconds Int
| rcook/ptool-templates | elm-haskell/app_Types.hs | mit | 126 | 2 | 5 | 22 | 34 | 24 | 10 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-|
Module : Ogma.Api.Definition
Copyright : (c) Ogma Project, 2016
License : MIT
Stability : experimental
-}
module Ogma.Api.Definition where
import Data.Aeson
import Data.Aeson.Types
import Data.Aeson.TH
import Data.Proxy
import Data.Text (Text)
import Data.Time
import GHC.Generics
import GHC.Int
import Servant.API
import Data.Char (toLower)
data AccountNewPost = AccountNewPost { accountNewEmail :: Text
, accountNewLogin :: Text }
deriving (Generic)
deriveJSON (defaultOptions { fieldLabelModifier = map toLower . drop 10 }) ''AccountNewPost
data GetTokenPost = GetTokenPost { getTokenLogin :: Text }
deriving (Generic)
deriveJSON (defaultOptions { fieldLabelModifier = map toLower . drop 8 }) ''GetTokenPost
data GetTokenResponse = GetTokenResponse { getTokenAccess :: Text
, getTokenRefresh :: Text
, getTokenExpire :: UTCTime }
deriving (Generic)
deriveJSON (defaultOptions { fieldLabelModifier = map toLower . drop 8 }) ''GetTokenResponse
data DocumentPost = DocumentPost { postDocumentTitle :: Text
, postDocumentContent :: Text }
deriving (Generic)
deriveJSON (defaultOptions { fieldLabelModifier = map toLower . drop 12 }) ''DocumentPost
data GetDocument = GetDocument { getDocumentTitle :: Text
, getDocumentContent :: Text
, getDocumentModifiedOn :: UTCTime
, getDocumentCreatedOn :: UTCTime
, getDocumentPerm :: Text }
deriving (Generic)
deriveJSON (defaultOptions { fieldLabelModifier = map toLower . drop 11 }) ''GetDocument
type OgmaAPI = "account"
:> "new"
:> ReqBody '[JSON] AccountNewPost
:> PostCreated '[JSON] (Headers '[Header "resource-id" Int64] NoContent)
:<|> "get_token"
:> ReqBody '[JSON] GetTokenPost
:> Post '[JSON] GetTokenResponse
:<|> AuthProtect "ogma-identity" :>
("document"
:> "new"
:> ReqBody '[JSON] DocumentPost
:> PostCreated '[JSON] (Headers '[Header "resource-id" Int64] NoContent)
:<|> "document"
:> Capture "id" Int64
:> ReqBody '[JSON] DocumentPost
:> Put '[JSON] NoContent
:<|> "document"
:> Capture "id" Int64
:> Get '[JSON] GetDocument)
ogmaAPI :: Proxy OgmaAPI
ogmaAPI = Proxy
| lgeorget/ogma | api/src/Ogma/Api/Definition.hs | mit | 2,891 | 0 | 26 | 1,022 | 620 | 337 | 283 | 59 | 1 |
module MHMC.Error
(
--TODO: EVERYTHING
) where | killmous/MHMC | src/MHMC/Error.hs | mit | 47 | 0 | 3 | 7 | 10 | 7 | 3 | 2 | 0 |
module Graphics.CG.Primitives.Triangle(Triangle, makeTriangle, sortTrianglePoints) where
import Data.List (sort)
import Graphics.Gloss.Data.Point
type Triangle = (Point, Point, Point)
makeTriangle :: Point -> Point -> Point -> Triangle
makeTriangle a b c = (a, b, c)
sortTrianglePoints :: Triangle -> Triangle
sortTrianglePoints (a, b, c) = (\[a, b, c] -> (a, b, c)) $ sort [a, b, c]
| jagajaga/CG-Haskell | Graphics/CG/Primitives/Triangle.hs | mit | 425 | 0 | 8 | 96 | 157 | 96 | 61 | 8 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module TypeHierarchy where
import Data.List
import Data.Functor
import Data.Monoid
import Data.Binary
import Data.Serialize
import GHC.Generics as GGen
import Text.ParserCombinators.ReadPrec
import Text.ParserCombinators.Parsec.Token as P
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr as PE
import Text.ParserCombinators.Parsec.Language
import qualified Data.Map.Strict as Map
-- Base types with annotations
data AnnotT = AnnotT { baseType :: String, annotType :: Maybe String }
deriving (Eq,Show,Ord,GGen.Generic)
instance Binary AnnotT
renderAnnotT :: AnnotT -> String
renderAnnotT (AnnotT base Nothing) = base
renderAnnotT (AnnotT base (Just annot)) = base ++ "[" ++ annot ++ "]"
--moreGeneralThan :: AnnotT -> AnnotT -> Bool
--moreGeneralThan (AnnotT baseA Nothing) (AnnotT baseB t) =
-- baseA == baseB
--moreGeneralThan x y =
-- x == y
------------------------------------
-- Lambek types, without products --
------------------------------------
data LambekFun =
LFAtom AnnotT
| LFLeft LambekFun LambekFun
| LFRight LambekFun LambekFun
deriving (Eq,Show,Ord,GGen.Generic)
instance Binary LambekFun
-- Pretty printing
data ParenthesisNeeded = NoParen | AlwaysParen | ParenLeft | ParenRight
addParen str = "("++str++")"
renderLFparen (LFAtom base) _ = renderAnnotT base
renderLFparen (LFLeft b a) NoParen =
(renderLFparen a AlwaysParen) ++ "\\" ++ (renderLFparen b ParenRight)
renderLFparen t@(LFLeft _ _) ParenRight = renderLFparen t NoParen
renderLFparen t@(LFLeft _ _) _ = addParen $ renderLFparen t NoParen
renderLFparen (LFRight a b) NoParen =
(renderLFparen a ParenLeft) ++ "/" ++ (renderLFparen b AlwaysParen)
renderLFparen t@(LFRight _ _) ParenLeft = renderLFparen t NoParen
renderLFparen t@(LFRight _ _) _ = addParen $ renderLFparen t NoParen
renderLF x = renderLFparen x NoParen
typeLengthLF :: LambekFun -> Int
typeLengthLF (LFAtom _) = 1
typeLengthLF (LFLeft body arg) =
typeLengthLF body + (typeLengthLF arg)
typeLengthLF (LFRight body arg) =
typeLengthLF body + (typeLengthLF arg)
-- Lambek skeletons (without products)
data LambekSkel =
LSAtom AnnotT
| LSVar Int
| LSLeft LambekSkel LambekSkel
| LSRight LambekSkel LambekSkel
deriving (Eq,Show,Ord,GGen.Generic)
instance Binary LambekSkel
-- Pretty printing
renderLSparen (LSAtom base) _ = renderAnnotT base
renderLSparen (LSVar n) _ = show n
renderLSparen (LSLeft b a) NoParen =
(renderLSparen a AlwaysParen) ++ "\\" ++ (renderLSparen b ParenRight)
renderLSparen t@(LSLeft _ _) ParenRight = renderLSparen t NoParen
renderLSparen t@(LSLeft _ _) _ = addParen $ renderLSparen t NoParen
renderLSparen (LSRight a b) NoParen =
(renderLSparen a ParenLeft) ++ "/" ++ (renderLSparen b AlwaysParen)
renderLSparen t@(LSRight _ _) ParenLeft = renderLSparen t NoParen
renderLSparen t@(LSRight _ _) _ = addParen $ renderLSparen t NoParen
renderLS x = renderLSparen x NoParen
-- Utility: unify two assignments
unionMap m1 m2 =
Map.foldlWithKey
(\ m2 key value -> m2 >>= (\m2 -> case Map.lookup key m2 of
Nothing -> Just $ Map.insert key value m2
Just otherVal ->
if otherVal == value then
Just $ Map.insert key value m2
else Nothing))
(Just m2)
m1
-- Does this lambek type matches this Lambek skeleton ? If no, Nothing. Else, Just the corresponding type assignment
matchSkeleton :: LambekSkel -> LambekFun -> Maybe (Map.Map Int LambekFun)
matchSkeleton x@(LSAtom a) y@(LFAtom b) =
if a == b then Just Map.empty else Nothing
matchSkeleton (LSVar x) t = Just $ Map.insert x t Map.empty
matchSkeleton (LSLeft a1 b1) (LFLeft a2 b2) = do
s1 <- matchSkeleton a1 a2
s2 <- matchSkeleton b1 b2
unionMap s1 s2
matchSkeleton (LSRight a1 b1) (LFRight a2 b2) = do
s1 <- matchSkeleton a1 a2
s2 <- matchSkeleton b1 b2
unionMap s1 s2
matchSkeleton _ _ = Nothing
-- More convenient syntax
isMatchedBy :: LambekFun -> LambekSkel -> Bool
isMatchedBy fun skel = matchSkeleton skel fun /= Nothing
-- Given a list of skeletons and a list of types
-- filter the types with the skeletons (each type is paired with
-- the first skeleton it meets)
dispatchTypes :: (a -> LambekFun) -> [LambekSkel] -> [a] -> ([[a]],[a])
dispatchTypes getType skels types =
reverseOutput $ foldl addTypeToAccu (initSlots,[]) types
where
initSlots = map (\x -> (x,[])) skels
addTypeToAccu (slots,noMatch) tp =
case findMatching slots tp of
Just newSlots -> (newSlots,noMatch)
Nothing -> (slots,tp:noMatch)
findMatching [] _ = Nothing
findMatching ((skel,slot):tail) tp =
if matchSkeleton skel (getType tp) /= Nothing then
Just $ (skel,tp:slot):tail
else do
rest <- findMatching tail tp
return $ (skel,slot):rest
reverseOutput (slots,noMatch) =
(map (reverse . snd) slots, reverse noMatch)
-- Lambek types, with products
data LambekType = LTAtom String | LTLeft LambekType LambekType | LTRight LambekType LambekType | LTProd LambekType LambekType
deriving (GGen.Generic)
instance Binary LambekType
-- instance Semigroup LambekType where
-- (<>) = LTProd
------- PARSING ----------
lexerL = P.makeTokenParser (emptyDef { reservedOpNames = ["/","\\","[","]"] })
whiteSpaceL = P.whiteSpace lexerL
myIdentifier = many (noneOf "/\\[],\n")
termLF :: Parser LambekFun
termLF = (atomLF LFAtom) <|> (P.parens lexerL parserLF)
atomLF atomFun = do
id <- (P.identifier lexerL)
bracketParser id <|> (return $ atomFun (AnnotT id Nothing))
where
bracketParser baseId = do
char '['
annot <- myIdentifier
char ']'
return $ atomFun (AnnotT baseId (Just annot))
termLFwhiteSpace = whiteSpaceL >> termLF
tableLF = [ [PE.Infix (whiteSpaceL >> char '/' >> return LFRight) AssocLeft ],
[PE.Infix (whiteSpaceL >> char '\\' >> return LFLeft) AssocRight ] ]
parserLF = buildExpressionParser tableLF termLFwhiteSpace
parserLFeof = do
whiteSpaceL
x <- parserLF
whiteSpaceL
eof
return x
parseLFFromString :: String -> Either String LambekFun
parseLFFromString input = case parse parserLFeof "" input of
Left error -> Left (show error)
Right something -> Right something
---- Parsing for skeletons ----
termLS :: Parser LambekSkel
termLS = natParser <|> (atomLF LSAtom) <|> (P.parens lexerL parserLS)
where
natParser = do
x <- P.natural lexerL
return $ LSVar (fromIntegral x)
termLSwhiteSpace = whiteSpaceL >> termLS
tableLS = [ [PE.Infix (whiteSpaceL >> char '/' >> return LSRight) AssocLeft ],
[PE.Infix (whiteSpaceL >> char '\\' >> return (\a b -> LSLeft b a)) AssocRight ] ]
parserLS = buildExpressionParser tableLS termLSwhiteSpace
parserLSeof = do
whiteSpaceL
x <- parserLS
whiteSpaceL
eof
return x
--------------------
-- Pregroup types --
--------------------
-- Simple type in a pregroup: a base type and an exponent. +1 means right, -1 means left
data PrgSType = PrgS AnnotT Int
deriving (Show, Eq, GGen.Generic)
instance Binary PrgSType
-- Simple left adjoint
leftAdjS (PrgS base exp) = PrgS base (exp-1)
-- Simple right adjoint
rightAdjS (PrgS base exp) = PrgS base (exp+1)
-- Pretty printing
renderST (PrgS base 0) = renderAnnotT base
renderST (PrgS base n) =
let exponent = (if n > 0 then replicate n 'r' else replicate (-n) 'l')
in
(renderAnnotT base) ++ "(" ++ exponent ++ ")"
-- (Complex) type in a pregroup: list of simple types
type PrgType = [PrgSType]
-- Left adjoint
leftAdj :: PrgType -> PrgType
leftAdj = reverse . map leftAdjS
-- Right adjoint
rightAdj :: PrgType -> PrgType
rightAdj = reverse . map rightAdjS
-- Pretty printing
renderPrg :: PrgType -> String
renderPrg = foldl (\accu e -> renderST e ++ " " ++ accu) ""
-- Canonical morphism from Lambek to pregroup
lambekToPregroup :: LambekFun -> PrgType
lambekToPregroup (LFAtom typ) = [PrgS typ 0]
lambekToPregroup (LFLeft body arg) =
(lambekToPregroup body) ++ (leftAdj . lambekToPregroup $ arg)
lambekToPregroup (LFRight body arg) =
(rightAdj . lambekToPregroup $ arg) ++ (lambekToPregroup body)
-----------------
-- Group types --
-----------------
-- Simple type in a group: a base type and its exponent (True for +1, False for -1)
data GrpSType = GrpS AnnotT Bool
deriving (GGen.Generic)
instance Binary GrpSType
-- Inverse
invGrpS (GrpS base exp) = GrpS base (not exp)
-- Pretty printing
renderGS (GrpS base True) = renderAnnotT base
renderGS (GrpS base False) = (renderAnnotT base) ++ "(-1)"
-- (Complex) type in a group: product of simple types
type GrpType = [GrpSType]
-- Inverse
invGrp = reverse . (map invGrpS)
-- Pretty printing
renderGrp = foldl (\ accu elem -> accu ++ " " ++ (renderGS elem)) ""
-- Canonical morphism from pregroup to group
-- Simple version
pregroupToGroupS :: PrgSType -> GrpSType
pregroupToGroupS (PrgS base exp) = GrpS base (exp `rem` 2 == 0)
-- Complex version
pregroupToGroup :: PrgType -> GrpType
pregroupToGroup =
map pregroupToGroupS
-- Helper
lambekToGroup :: LambekFun -> GrpType
lambekToGroup =
pregroupToGroup . lambekToPregroup
| wetneb/yanker | src/TypeHierarchy.hs | gpl-3.0 | 9,164 | 0 | 17 | 1,771 | 2,838 | 1,479 | 1,359 | 188 | 4 |
{-#LANGUAGE DeriveGeneric #-}
module Handler.Instructor where
import Import
import Util.Data
import Util.Database
import Yesod.Form.Bootstrap3
import Carnap.GHCJS.SharedTypes(ProblemSource(..))
import Yesod.Form.Jquery
import Handler.User (scoreByIdAndClassTotal, scoreByIdAndClassPerProblem)
import Text.Blaze.Html (toMarkup)
import Text.Read (readMaybe)
import Data.Time
import Data.Time.Zones
import Data.Time.Zones.DB
import Data.Time.Zones.All
import Data.Aeson (decode,encode)
import qualified Data.IntMap (keys, insert,fromList,toList,delete)
import qualified Data.Text as T
import qualified Data.List as L
import System.FilePath
import System.Directory (getDirectoryContents,removeFile, doesFileExist, createDirectoryIfMissing)
putInstructorR :: Text -> Handler Value
putInstructorR ident = do
((assignmentrslt,_),_) <- runFormPost (identifyForm "updateAssignment" $ updateAssignmentForm)
((courserslt,_),_) <- runFormPost (identifyForm "updateCourse" $ updateCourseForm)
((documentrslt,_),_) <- runFormPost (identifyForm "updateDocument" $ updateDocumentForm)
case (assignmentrslt,courserslt,documentrslt) of
(FormSuccess (idstring, mdue, mduetime,mfrom,mfromtime,muntil,muntiltime, mdesc),_,_) -> do
case readMaybe idstring of
Nothing -> returnJson ("Could not read assignment key"::Text)
Just k -> do
mval <- runDB (get k)
case mval of
Nothing -> returnJson ("Could not find assignment!"::Text)
Just v ->
do let cid = assignmentMetadataCourse v
runDB $ do course <- get cid >>= maybe (liftIO $ fail "could not get course") pure
let (Just tz) = tzByName . courseTimeZone $ course
let mtimeUpdate Nothing Nothing field = update k [ field =. Nothing ]
mtimeUpdate mdate mtime field = maybeDo mdate (\date->
do let localtime = case mtime of
(Just time) -> LocalTime date time
_ -> LocalTime date (TimeOfDay 23 59 59)
update k [ field =. (Just $ localTimeToUTCTZ tz localtime) ])
mtimeUpdate mdue mduetime AssignmentMetadataDuedate
mtimeUpdate mfrom mfromtime AssignmentMetadataVisibleFrom
mtimeUpdate muntil muntiltime AssignmentMetadataVisibleTill
update k [ AssignmentMetadataDescription =. (unTextarea <$> mdesc) ]
returnJson ("updated!"::Text)
(_,FormSuccess (idstring,mdesc,mstart,mend,mpoints),_) -> do
case readMaybe idstring of
Just k -> do runDB $ do update k [ CourseDescription =. (unTextarea <$> mdesc) ]
maybeDo mstart (\start -> update k
[ CourseStartDate =. UTCTime start 0 ])
maybeDo mend (\end-> update k
[ CourseEndDate =. UTCTime end 0 ])
maybeDo mpoints (\points-> update k [ CourseTotalPoints =. points ])
returnJson ("updated!"::Text)
Nothing -> returnJson ("could not find course!"::Text)
(_,_,FormSuccess (idstring, mscope, mdesc,mfile,mtags)) -> do
case readMaybe idstring of
Just k -> do
mdoc <- runDB (get k)
case mdoc of
Nothing -> returnJson ("could not find document!"::Text)
Just doc -> do
maybeIdent <- getIdent $ documentCreator doc
--XXX: shouldn't be possible for a document to exist without a creator
case maybeIdent of
Just ident -> do
runDB $ do update k [ DocumentDescription =. (unTextarea <$> mdesc) ]
maybeDo mscope (\scope -> update k [ DocumentScope =. scope ])
maybeDo mtags (\tags -> do
oldTags <- selectList [TagBearer ==. k] []
mapM (delete . entityKey) oldTags
forM tags (\tag -> insert $ Tag k tag)
return ())
maybeDo mfile (saveTo ("documents" </> unpack ident) (unpack $ documentFilename doc))
returnJson ("updated!"::Text)
Nothing -> returnJson ("document did not have a creator. This is a bug."::Text)
Nothing -> returnJson ("could not read document key"::Text)
(FormMissing,FormMissing,FormMissing) -> returnJson ("no form" :: Text)
(form1,form2,form3) -> returnJson ("errors: " <> errorsOf form1 <> errorsOf form2 <> errorsOf form3)
where errorsOf (FormFailure s) = concat s <> ", "
errorsOf _ = ""
deleteInstructorR :: Text -> Handler Value
deleteInstructorR ident = do
msg <- requireJsonBody :: Handler InstructorDelete
case msg of
DeleteAssignment id ->
do datadir <- appDataRoot <$> (appSettings <$> getYesod)
deleted <- runDB $ deleteCascade id
returnJson ("Assignment deleted" :: Text)
DeleteProblems coursename setnum ->
do checkCourseOwnership coursename
mclass <- runDB $ getBy $ UniqueCourse coursename
case mclass of
Just (Entity classkey theclass)->
do case readAssignmentTable <$> courseTextbookProblems theclass of
Just assign -> do runDB $ update classkey
[CourseTextbookProblems =. (Just $ BookAssignmentTable $ Data.IntMap.delete setnum assign)]
returnJson ("Deleted Assignment"::Text)
Nothing -> returnJson ("Assignment table Missing, can't delete."::Text)
Nothing -> returnJson ("Something went wrong with retriving the course."::Text)
DeleteCourse coursename ->
do checkCourseOwnership coursename
mclass <- runDB $ getBy $ UniqueCourse coursename
case mclass of
Just (Entity classkey theclass)->
do runDB $ do studentsOf <- selectList [UserDataEnrolledIn ==. Just classkey] []
mapM (\s -> update (entityKey s) [UserDataEnrolledIn =. Nothing]) studentsOf
deleteCascade classkey
returnJson ("Class Deleted"::Text)
Nothing -> returnJson ("No class to delete, for some reason"::Text)
DeleteDocument fn ->
do datadir <- appDataRoot <$> (appSettings <$> getYesod)
musr <- runDB $ getBy $ UniqueUser ident
case musr of
Nothing -> returnJson ("Could not get user id."::Text)
Just usr -> do
deleted <- runDB $ do mk <- getBy $ UniqueDocument fn (entityKey usr)
case mk of
Just (Entity k v) ->
do deleteCascade k
liftIO $ do fe <- doesFileExist (datadir </> "documents" </> unpack ident </> unpack fn)
if fe then removeFile (datadir </> "documents" </> unpack ident </> unpack fn)
else return ()
return True
Nothing -> return False
if deleted
then returnJson (fn ++ " deleted")
else returnJson ("unable to retrieve metadata for " ++ fn)
DropStudent sident ->
do sid <- fromIdent sident
dropped <- runDB $ do msd <- getBy (UniqueUserData sid)
case msd of
Nothing -> return False
Just (Entity k _) ->
do update k [UserDataEnrolledIn =. Nothing]
return True
if dropped then returnJson (sident ++ " dropped")
else returnJson ("couldn't drop " ++ sident)
DeleteCoInstructor ciid -> do
runDB (deleteCascade ciid) >> returnJson ("Deleted" :: Text)
postInstructorR :: Text -> Handler Html
postInstructorR ident = do
time <- liftIO getCurrentTime
classes <- classesByInstructorIdent ident
let activeClasses = filter (\c -> courseEndDate (entityVal c) > time) classes
docs <- documentsByInstructorIdent ident
instructors <- runDB $ selectList [UserDataInstructorId !=. Nothing] []
((assignmentrslt,_),_) <- runFormPost (identifyForm "uploadAssignment" $ uploadAssignmentForm activeClasses docs)
((documentrslt,_),_) <- runFormPost (identifyForm "uploadDocument" $ uploadDocumentForm)
((newclassrslt,_),_) <- runFormPost (identifyForm "createCourse" createCourseForm)
((frombookrslt,_),_) <- runFormPost (identifyForm "setBookAssignment" $ setBookAssignmentForm activeClasses)
((instructorrslt,_),_) <- runFormPost (identifyForm "addCoinstructor" $ addCoInstructorForm instructors ("" :: String))
case assignmentrslt of
FormSuccess (doc, Entity classkey theclass, mdue,mduetime,mfrom,mfromtime,mtill,mtilltime, massignmentdesc, mpass, mhidden,mlimit, subtime) ->
do Entity uid user <- requireAuth
iid <- instructorIdByIdent (userIdent user)
>>= maybe (setMessage "failed to retrieve instructor" >> notFound) pure
mciid <- if courseInstructor theclass == iid
then return Nothing
else runDB $ getBy (UniqueCoInstructor iid classkey)
let (Just tz) = tzByName . courseTimeZone $ theclass
localize (mdate,mtime) = case (mdate,mtime) of
(Just date, Just time) -> Just $ LocalTime date time
(Just date,_) -> Just $ LocalTime date (TimeOfDay 23 59 59)
_ -> Nothing
localdue = localize (mdue,mduetime)
localfrom = localize (mfrom,mfromtime)
localtill = localize (mtill,mtilltime)
info = unTextarea <$> massignmentdesc
theassigner = mciid
thename = documentFilename (entityVal doc)
asgned <- runDB $ selectList [AssignmentMetadataCourse ==. classkey] []
dupes <- runDB $ filter (\x -> documentFilename (entityVal x) == thename)
<$> selectList [DocumentId <-. map (assignmentMetadataDocument . entityVal) asgned] []
case mpass of
_ | not (null dupes) -> setMessage "Names for assignments must be unique within a course, and it looks like you already have an assignment with this name"
Nothing | mhidden == Just True || mlimit /= Nothing -> setMessage "Hidden and time-limited assignments must be password protected"
_ -> do success <- tryInsert $ AssignmentMetadata
{ assignmentMetadataDocument = entityKey doc
, assignmentMetadataDescription = info
, assignmentMetadataAssigner = entityKey <$> theassigner
, assignmentMetadataDuedate = localTimeToUTCTZ tz <$> localdue
, assignmentMetadataVisibleFrom = localTimeToUTCTZ tz <$> localfrom
, assignmentMetadataVisibleTill = localTimeToUTCTZ tz <$> localtill
, assignmentMetadataDate = subtime
, assignmentMetadataCourse = classkey
, assignmentMetadataAvailability =
case (mpass,mhidden,mlimit) of
(Nothing,_,_) -> Nothing
(Just txt, Just True, Nothing) -> Just (HiddenViaPassword txt)
(Just txt, Just True, Just min) -> Just (HiddenViaPasswordExpiring txt min)
(Just txt, _, Just min) -> Just (ViaPasswordExpiring txt min)
(Just txt, _, _) -> Just (ViaPassword txt)
}
case success of Just _ -> return ()
Nothing -> setMessage "This file has already been assigned for this course"
FormFailure s -> setMessage $ "Something went wrong: " ++ toMarkup (show s)
FormMissing -> return ()
case documentrslt of
FormSuccess (file, sharescope, docdesc, subtime, mtags) ->
do musr <- runDB $ getBy $ UniqueUser ident
let fn = fileName file
info = unTextarea <$> docdesc
Just uid = musr -- FIXME: catch Nothing here
success <- tryInsert $ Document
{ documentFilename = fn
, documentDate = subtime
, documentCreator = entityKey uid
, documentDescription = info
, documentScope = sharescope
}
case success of
Just k -> do saveTo ("documents" </> unpack ident) (unpack fn) file
runDB $ maybeDo mtags (\tags -> do
forM tags (\tag -> insert $ Tag k tag)
return ())
Nothing -> setMessage "You already have a shared document with this name."
FormFailure s -> setMessage $ "Something went wrong: " ++ toMarkup (show s)
FormMissing -> return ()
case newclassrslt of
FormSuccess (title, coursedesc, startdate, enddate, tzlabel) -> do
miid <- instructorIdByIdent ident
case miid of
Just iid ->
do let localize x = localTimeToUTCTZ (tzByLabel tzlabel) (LocalTime x midnight)
success <- tryInsert $ Course
{ courseTitle = title
, courseDescription = unTextarea <$> coursedesc
, courseInstructor = iid
, courseTextbookProblems = Nothing
, courseStartDate = localize startdate
, courseEndDate = localize enddate
, courseTotalPoints = 0
, courseTimeZone = toTZName tzlabel
}
case success of Just _ -> setMessage "Course Created"
Nothing -> setMessage "Could not save. Course titles must be unique. Consider adding your instutition or the current semester as a suffix."
Nothing -> setMessage "you're not an instructor!"
FormFailure s -> setMessage $ "Something went wrong: " ++ toMarkup (show s)
FormMissing -> return ()
case frombookrslt of
FormSuccess (Entity classkey theclass, theassignment, duedate, mduetime) -> runDB $ do
let Just tz = tzByName . courseTimeZone $ theclass
localdue = case mduetime of
Just time -> LocalTime duedate time
_ -> LocalTime duedate (TimeOfDay 23 59 59)
due = localTimeToUTCTZ tz localdue
case readAssignmentTable <$> courseTextbookProblems theclass of
Just assign -> update classkey [CourseTextbookProblems =. (Just $ BookAssignmentTable $ Data.IntMap.insert theassignment due assign)]
Nothing -> update classkey [CourseTextbookProblems =. (Just $ BookAssignmentTable $ Data.IntMap.fromList [(theassignment, due)])]
FormFailure s -> setMessage $ "Something went wrong: " ++ toMarkup (show s)
FormMissing -> return ()
case instructorrslt of
(FormSuccess (cidstring , Just iid)) ->
case readMaybe cidstring of
Just cid -> do success <- tryInsert $ CoInstructor iid cid
case success of Just _ -> setMessage "Added Co-Instructor!"
Nothing -> setMessage "Co-Instructor seems to already be added"
Nothing -> setMessage "Couldn't read cid string"
FormSuccess (_, Nothing) -> setMessage "iid missing"
FormFailure s -> setMessage $ "Something went wrong: " ++ toMarkup (show s)
FormMissing -> return ()
redirect $ InstructorR ident
postInstructorQueryR :: Text -> Handler Value
postInstructorQueryR ident = do
msg <- requireJsonBody :: Handler InstructorQuery
case msg of
QueryGrade uid cid -> do
score <- scoreByIdAndClassTotal cid uid
returnJson score
QueryScores uid cid -> do
score <- scoreByIdAndClassPerProblem cid uid
returnJson score
getInstructorR :: Text -> Handler Html
getInstructorR ident = do
musr <- runDB $ getBy $ UniqueUser ident
case musr of
Nothing -> defaultLayout nopage
(Just (Entity uid _)) -> do
UserData firstname lastname enrolledin _ _ <- checkUserData uid
classes <- classesByInstructorIdent ident
time <- liftIO getCurrentTime
let activeClasses = filter (\c -> courseEndDate (entityVal c) > time) classes
let inactiveClasses = filter (\c -> courseEndDate (entityVal c) < time) classes
docs <- documentsByInstructorIdent ident
instructors <- runDB $ selectList [UserDataInstructorId !=. Nothing] []
let labels = map labelOf $ take (length activeClasses) [1 ..]
classWidgets <- mapM (classWidget ident instructors) activeClasses
assignmentMetadata <- concat <$> mapM listAssignmentMetadata activeClasses --Get the metadata
assignmentDocs <- mapM (runDB . get) (map (\(Entity _ v, _) -> assignmentMetadataDocument v) assignmentMetadata)
documents <- runDB $ selectList [DocumentCreator ==. uid] []
problemSetLookup <- mapM (\c -> (,)
<$> pure (entityKey c)
<*> (maybe mempty readAssignmentTable
<$> (getProblemSets . entityKey $ c))
) classes
let assignmentLookup = zipWith (\(Entity k v,_) (Just d) ->
( k
, documentFilename d
, assignmentMetadataDate v
, assignmentMetadataCourse v
)) assignmentMetadata assignmentDocs
tagMap <- forM documents $ \doc -> do
tags <- runDB $ selectList [TagBearer ==. entityKey doc] []
return (entityKey doc, map (tagName . entityVal) tags)
let tagsOf d = lookup d tagMap
tagString d = case lookup d tagMap of Just tags -> intercalate "," tags; _ -> ""
(createAssignmentWidget,enctypeCreateAssignment) <- generateFormPost (identifyForm "uploadAssignment" $ uploadAssignmentForm activeClasses docs)
(uploadDocumentWidget,enctypeShareDocument) <- generateFormPost (identifyForm "uploadDocument" $ uploadDocumentForm)
(setBookAssignmentWidget,enctypeSetBookAssignment) <- generateFormPost (identifyForm "setBookAssignment" $ setBookAssignmentForm activeClasses)
(updateAssignmentWidget,enctypeUpdateAssignment) <- generateFormPost (identifyForm "updateAssignment" $ updateAssignmentForm)
(updateDocumentWidget,enctypeUpdateDocument) <- generateFormPost (identifyForm "updateDocument" $ updateDocumentForm)
(createCourseWidget,enctypeCreateCourse) <- generateFormPost (identifyForm "createCourse" createCourseForm)
(updateCourseWidget,enctypeUpdateCourse) <- generateFormPost (identifyForm "updateCourse" $ updateCourseForm)
defaultLayout $ do
addScript $ StaticR js_bootstrap_bundle_min_js
addScript $ StaticR js_tagsinput_js
addScript $ StaticR js_bootstrap_min_js
addStylesheet $ StaticR css_tagsinput_css
setTitle $ "Instructor Page for " ++ toMarkup firstname ++ " " ++ toMarkup lastname
$(widgetFile "instructor")
where labelOf = T.append "course-" . pack . show
mprobsOf course = readAssignmentTable <$> courseTextbookProblems course
nopage = [whamlet|
<div.container>
<p> Instructor not found.
|]
---------------------
-- Message Types --
---------------------
data InstructorDelete = DeleteAssignment AssignmentMetadataId
| DeleteProblems Text Int
| DeleteCourse Text
| DeleteDocument Text
| DropStudent Text
| DeleteCoInstructor CoInstructorId
deriving Generic
instance ToJSON InstructorDelete
instance FromJSON InstructorDelete
data InstructorQuery = QueryGrade UserId CourseId
| QueryScores UserId CourseId
deriving Generic
instance ToJSON InstructorQuery
instance FromJSON InstructorQuery
------------------
-- Components --
------------------
uploadAssignmentForm classes docs extra = do
(fileRes, fileView) <- mreq (selectFieldList docnames) (bfs ("Document" :: Text)) Nothing
(classRes, classView) <- mreq (selectFieldList classnames) (bfs ("Class" :: Text)) Nothing
(dueRes,dueView) <- mopt (jqueryDayField def) (withPlaceholder "Date" $ bfs ("Due Date"::Text)) Nothing
(duetimeRes, duetimeView) <- mopt timeFieldTypeTime (withPlaceholder "Time" $ bfs ("Due Time"::Text)) Nothing
(fromRes,fromView) <- mopt (jqueryDayField def) (withPlaceholder "Date" $ bfs ("Visible From Date"::Text)) Nothing
(fromtimeRes, fromtimeView) <- mopt timeFieldTypeTime (withPlaceholder "Time" $ bfs ("Visible From Time"::Text)) Nothing
(tillRes, tillView) <- mopt (jqueryDayField def) (withPlaceholder "Date" $ bfs ("Visible Until Date"::Text)) Nothing
(tilltimeRes,tilltimeView) <- mopt timeFieldTypeTime (withPlaceholder "Time" $ bfs ("Visible Until Time"::Text)) Nothing
(descRes,descView) <- mopt textareaField (bfs ("Assignment Description"::Text)) Nothing
(passRes,passView) <- mopt textField (bfs ("Password"::Text)) Nothing
(hiddRes,hiddView) <- mopt checkBoxField (bfs ("Hidden"::Text)) Nothing
(limitRes,limitView) <- mopt intField (bfs ("Limit"::Text)) Nothing
currentTime <- lift (liftIO getCurrentTime)
let theRes = (,,,,,,,,,,,,) <$> fileRes <*> classRes
<*> dueRes <*> duetimeRes
<*> fromRes <*> fromtimeRes
<*> tillRes <*> tilltimeRes
<*> descRes <*> passRes
<*> hiddRes <*> limitRes
<*> pure currentTime
let widget = do
[whamlet|
#{extra}
<h6>File to Assign
<div.row>
<div.form-group.col-md-12>
^{fvInput fileView}
<h6>Assign to
<div.row>
<div.form-group.col-md-12>
^{fvInput classView}
<h6> Due Date
<div.row>
<div.form-group.col-md-6>
^{fvInput dueView}
<div.form-group.col-md-6>
^{fvInput duetimeView}
<h6> Visible From
<div.row>
<div.form-group.col-md-6>
^{fvInput fromView}
<div.form-group.col-md-6>
^{fvInput fromtimeView}
<h6> Visible To
<div.row>
<div.form-group.col-md-6>
^{fvInput tillView}
<div.form-group.col-md-6>
^{fvInput tilltimeView}
<h6> Description
<div.row>
<div.form-group.col-md-12>
^{fvInput descView}
<h5> Access Control Settings
<div.row>
<div.col-md-6>
<h6> Password
<div.col-md-2>
<h6> Hide
<div.col-md-4>
<h6> Minutes Available
<div.row>
<div.form-group.col-md-6>
^{fvInput passView}
<div.form-group.col-md-2>
<span style="position:relative;top:7px">
Hidden:
<div style="display:inline-block;width:20px;position:relative;top:10px">
^{fvInput hiddView}
<div.form-group.col-md-4>
^{fvInput limitView}
<p style="color:gray"> Note: all access control options require that you set a password
|]
return (theRes,widget)
where classnames = map (\theclass -> (courseTitle . entityVal $ theclass, theclass)) classes
docnames = map (\thedoc -> (documentFilename . entityVal $ thedoc, thedoc)) docs
updateAssignmentForm extra = do
(assignmentRes,assignmentView) <- mreq assignmentId "" Nothing
(dueRes,dueView) <- mopt (jqueryDayField def) (withPlaceholder "Date" $ bfs ("Due Date"::Text)) Nothing
(duetimeRes, duetimeView) <- mopt timeFieldTypeTime (withPlaceholder "Time" $ bfs ("Due Time"::Text)) Nothing
(fromRes,fromView) <- mopt (jqueryDayField def) (withPlaceholder "Date" $ bfs ("Visible From Date"::Text)) Nothing
(fromtimeRes, fromtimeView) <- mopt timeFieldTypeTime (withPlaceholder "Time" $ bfs ("Visible From Time"::Text)) Nothing
(tillRes, tillView) <- mopt (jqueryDayField def) (withPlaceholder "Date" $ bfs ("Visible Until Date"::Text)) Nothing
(tilltimeRes,tilltimeView) <- mopt timeFieldTypeTime (withPlaceholder "Time" $ bfs ("Visible Until Time"::Text)) Nothing
(descRes,descView) <- mopt textareaField (bfs ("Assignment Description"::Text)) Nothing
let theRes = (,,,,,,,) <$> assignmentRes
<*> dueRes <*> duetimeRes
<*> fromRes <*> fromtimeRes
<*> tillRes <*> tilltimeRes
<*> descRes
let widget = do
[whamlet|
#{extra}
^{fvInput assignmentView}
<h6> Due Date
<div.row>
<div.form-group.col-md-6>
^{fvInput dueView}
<div.form-group.col-md-6>
^{fvInput duetimeView}
<h6> Visible From
<div.row>
<div.form-group.col-md-6>
^{fvInput fromView}
<div.form-group.col-md-6>
^{fvInput fromtimeView}
<h6> Visible To
<div.row>
<div.form-group.col-md-6>
^{fvInput tillView}
<div.form-group.col-md-6>
^{fvInput tilltimeView}
<h6> Description
<div.row>
<div.form-group.col-md-12>
^{fvInput descView}
|]
return (theRes,widget)
where assignmentId :: (Monad m, RenderMessage (HandlerSite m) FormMessage) => Field m String
assignmentId = hiddenField
updateAssignmentModal form enc = [whamlet|
<div class="modal fade" id="updateAssignmentData" tabindex="-1" role="dialog" aria-labelledby="updateAssignmentDataLabel" aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="updateAssignmentDataLabel">Update Assignment Data</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
<div class="modal-body">
<form#updateAssignment enctype=#{enc}>
^{form}
<div.form-group>
<input.btn.btn-primary type=submit value="update">
|]
uploadDocumentForm = renderBootstrap3 BootstrapBasicForm $ (,,,,)
<$> fileAFormReq (bfs ("Document" :: Text))
<*> areq (selectFieldList scopes) (bfs ("Share With " :: Text)) Nothing
<*> aopt textareaField (bfs ("Description"::Text)) Nothing
<*> lift (liftIO getCurrentTime)
<*> aopt tagField "Tags" Nothing
where scopes :: [(Text,SharingScope)]
scopes = [("Everyone (Visible to everyone)", Public)
,("Instructors (Visible to all instructors)", InstructorsOnly)
,("Link Only (Available, but visible to no one)", LinkOnly)
,("Private (Unavailable)", Private)
]
updateDocumentForm = renderBootstrap3 BootstrapBasicForm $ (,,,,)
<$> areq docId "" Nothing
<*> aopt (selectFieldList scopes) (bfs ("Share With " :: Text)) Nothing
<*> aopt textareaField (bfs ("Description"::Text)) Nothing
<*> fileAFormOpt (bfs ("Replacement File" :: Text))
<*> aopt tagField "Tags" Nothing
where docId :: (Monad m, RenderMessage (HandlerSite m) FormMessage) => Field m String
docId = hiddenField
scopes :: [(Text,SharingScope)]
scopes = [("Everyone (Visible to everyone)", Public)
,("Instructors (Visible to all instructors)", InstructorsOnly)
,("Link Only (Available, but visible to no one)", LinkOnly)
,("Private (Unavailable)", Private)
]
tagField :: Field Handler [Text]
tagField = Field
{ fieldParse = \raw _ -> case raw of [a] -> return $ Right $ Just (T.splitOn "," a);
_ -> return $ Right Nothing
, fieldView = \idAttr nameAttr _ _ _ ->
[whamlet|
<input id=#{idAttr} name=#{nameAttr} data-role="tagsinput">
|]
, fieldEnctype = UrlEncoded
}
updateDocumentModal form enc = [whamlet|
<div class="modal fade" id="updateDocumentData" tabindex="-1" role="dialog" aria-labelledby="updateDocumentLabel" aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="updateDocumentLabel">Update Shared Document</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
<div class="modal-body">
<form#updateDocument enctype=#{enc}>
^{form}
<div.form-group>
<input.btn.btn-primary type=submit value="update">
|]
setBookAssignmentForm classes extra = do
(classRes, classView) <- mreq (selectFieldList classnames) (bfs ("Class" :: Text)) Nothing
(probRes, probView) <- mreq (selectFieldList chapters) (bfs ("Problem Set" :: Text)) Nothing
(dueRes, dueView) <- mreq (jqueryDayField def) (withPlaceholder "Date" $ bfs ("Due Date"::Text)) Nothing
(duetimeRes, duetimeView) <- mopt timeFieldTypeTime (withPlaceholder "Time" $ bfs ("Due Time"::Text)) Nothing
let theRes = (,,,) <$> classRes <*> probRes <*> dueRes <*> duetimeRes
let widget = do
[whamlet|
#{extra}
<h6>Assign to
<div.row>
<div.form-group.col-md-12>
^{fvInput classView}
<h6> Problem Set
<div.row>
<div.form-group.col-md-12>
^{fvInput probView}
<h6> Due Date
<div.row>
<div.form-group.col-md-6>
^{fvInput dueView}
<div.form-group.col-md-6>
^{fvInput duetimeView}
|]
return (theRes, widget)
where chapters = map (\x -> ("Problem Set " ++ pack (show x),x)) [1..17] :: [(Text,Int)]
classnames = map (\theclass -> (courseTitle . entityVal $ theclass, theclass)) classes
createCourseForm = renderBootstrap3 BootstrapBasicForm $ (,,,,)
<$> areq textField (bfs ("Title" :: Text)) Nothing
<*> aopt textareaField (bfs ("Course Description"::Text)) Nothing
<*> areq (jqueryDayField def) (bfs ("Start Date"::Text)) Nothing
<*> areq (jqueryDayField def) (bfs ("End Date"::Text)) Nothing
<*> areq (selectFieldList zones) (bfs ("TimeZone"::Text)) Nothing
where zones = map (\(x,y,_) -> (decodeUtf8 x,y)) (rights tzDescriptions)
updateCourseModal form enc = [whamlet|
<div class="modal fade" id="updateCourseData" tabindex="-1" role="dialog" aria-labelledby="updateCourseDataLabel" aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="updateCourseDataLabel">Update Course Data</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
<div class="modal-body">
<form#updateCourse enctype=#{enc}>
^{form}
<div.form-group>
<input.btn.btn-primary type=submit value="update">
|]
updateCourseForm = renderBootstrap3 BootstrapBasicForm $ (,,,,)
<$> areq courseId "" Nothing
<*> aopt textareaField (bfs ("Course Description"::Text)) Nothing
<*> aopt (jqueryDayField def) (bfs ("Start Date"::Text)) Nothing
<*> aopt (jqueryDayField def) (bfs ("End Date"::Text)) Nothing
<*> aopt intField (bfs ("Total Points for Course"::Text)) Nothing
where courseId :: (Monad m, RenderMessage (HandlerSite m) FormMessage) => Field m String
courseId = hiddenField
addCoInstructorForm instructors cid extra = do
(courseRes,courseView) <- mreq courseId "" Nothing
(instRes, instView) <- mreq (selectFieldList $ map toItem instructors) (bfs ("Instructor" :: Text)) Nothing
let theRes = (,) <$> courseRes <*> instRes
widget = do
[whamlet|
#{extra}
^{fvInput courseView}
<div.input-group>
^{fvInput instView}
<div.input-group>
<input.btn.btn-primary onclick="submitAddInstructor(this,'#{cid}')" value="Add Co-Instructor">
|]
return (theRes,widget)
where courseId :: (Monad m, RenderMessage (HandlerSite m) FormMessage) => Field m String
courseId = hiddenField
toItem (Entity _ i) = (userDataLastName i ++ ", " ++ userDataFirstName i, userDataInstructorId i)
saveTo thedir fn file = do
datadir <- appDataRoot <$> (appSettings <$> getYesod)
let path = datadir </> thedir
liftIO $
do createDirectoryIfMissing True path
e <- doesFileExist (path </> fn)
if e then removeFile (path </> fn) else return ()
fileMove file (path </> fn)
classWidget :: Text -> [Entity UserData] -> Entity Course -> HandlerT App IO Widget
classWidget ident instructors classent = do
let cid = entityKey classent
course = entityVal classent
mprobs = readAssignmentTable <$> courseTextbookProblems course :: Maybe (IntMap UTCTime)
coInstructors <- runDB $ selectList [CoInstructorCourse ==. cid] []
coInstructorUD <- mapM udByInstructorId (map (coInstructorIdent . entityVal) coInstructors)
theInstructorUD <- entityVal <$> udByInstructorId (courseInstructor course)
allUserData <- map entityVal <$> (runDB $ selectList [UserDataEnrolledIn ==. Just cid] [])
(addCoInstructorWidget,enctypeAddCoInstructor) <- generateFormPost (identifyForm "addCoinstructor" $ addCoInstructorForm instructors (show cid))
asmd <- runDB $ selectList [AssignmentMetadataCourse ==. cid] []
asDocs <- mapM (runDB . get) (map (assignmentMetadataDocument . entityVal) asmd)
let allUids = map userDataUserId allUserData
musers <- mapM (\x -> runDB (get x)) allUids
let users = catMaybes musers
let numberOfUsers = length allUids
usersAndData = zip users allUserData
sortedUsersAndData = let lnOf (_,UserData _ ln _ _ _) = ln
in sortBy (\x y -> compare (lnOf x) (lnOf y)) usersAndData
course <- runDB $ get cid
>>= maybe (setMessage "failed to get course" >> notFound) pure
return [whamlet|
<h2>Assignments
<div.scrollbox>
<table.table.table-striped>
<thead>
<th> Assignment
<th> Due Date
<tbody>
$maybe probs <- mprobs
$forall (set,due) <- Data.IntMap.toList probs
<tr>
<td>Problem Set #{show set}
<td>#{dateDisplay due course}
$forall (Entity k a, Just d) <- zip asmd asDocs
<tr>
<td>
<a href=@{CourseAssignmentR (courseTitle course) (documentFilename d)}>
#{documentFilename d}
$maybe due <- assignmentMetadataDuedate a
<td>#{dateDisplay due course}
$nothing
<td>No Due Date
<h2>Students
<div.scrollbox
data-studentnumber="#{show numberOfUsers}"
data-cid="#{jsonSerialize cid}">
<table.table.table-striped >
<thead>
<th> Registered Student
<th> Student Name
<th> Total Score
<th> Action
<tbody>
$forall (u,UserData fn ln _ _ uid) <- sortedUsersAndData
<tr#student-#{userIdent u}>
<td>
<a href=@{UserR (userIdent u)}>#{userIdent u}
<td>
#{ln}, #{fn}
<td.async
data-query="#{jsonSerialize $ QueryScores uid cid}"
data-email="#{userIdent u}"
data-fn="#{fn}"
data-ln="#{ln}"
data-uid="#{jsonSerialize uid}" >
<span.loading>—
<td>
<button.btn.btn-sm.btn-secondary type="button" title="Drop #{fn} #{ln} from class"
onclick="tryDropStudent('#{jsonSerialize $ DropStudent $ userIdent u}')">
<i.fa.fa-trash-o>
<button.btn.btn-sm.btn-secondary type="button" title="Email #{fn} #{ln}"
onclick="location.href='mailto:#{userIdent u}'">
<i.fa.fa-envelope-o>
<h2>Course Data
<dl.row>
<dt.col-sm-3>Primary Instructor
<dd.col-sm-9>#{userDataLastName theInstructorUD}, #{userDataFirstName theInstructorUD}
<dt.col-sm-3>Course Title
<dd.col-sm-9>#{courseTitle course}
$maybe desc <- courseDescription course
<dd.col-sm-9.offset-sm-3>#{desc}
<dt.col-sm-3>Points Available
<dd.col-sm-9>#{courseTotalPoints course}
<dt.col-sm-3>Number of Students
<dd.col-sm-9>#{numberOfUsers} (Loaded:
<span id="loaded-#{jsonSerialize cid}"> 0#
)
<dt.col-sm-3>Start Date
<dd.col-sm-9>#{dateDisplay (courseStartDate course) course}
<dt.col-sm-3>End Date
<dd.col-sm-9>#{dateDisplay (courseEndDate course) course}
<dt.col-sm-3>Time Zone
<dd.col-sm-9>#{decodeUtf8 $ courseTimeZone course}
<dt.col-sm-3>Enrollment Link
<dd.col-sm-9>
<a href="@{EnrollR (courseTitle course)}">@{EnrollR (courseTitle course)}
$if null coInstructors
$else
<dt.col-sm-3>Co-Instructors
<dd.col-sm-9>
$forall (Entity _ coud, Entity ciid _) <- zip coInstructorUD coInstructors
<div#Co-Instructor-#{userDataLastName coud}-#{userDataFirstName coud}>
<i.fa.fa-trash-o
style="cursor:pointer"
title="Remove #{userDataFirstName coud} #{userDataLastName coud} as Co-Instructor"
onclick="tryDeleteCoInstructor('#{jsonSerialize $ DeleteCoInstructor ciid}','#{userDataLastName coud}', '#{userDataFirstName coud}')">
<span>#{userDataFirstName coud},
<span> #{userDataLastName coud}
<div.row>
<div.col-xl-6.col-lg-12 style="padding:5px">
<form.form-inline method=post enctype=#{enctypeAddCoInstructor}>
^{addCoInstructorWidget}
<div.col-xl-6.col-lg-12 style="padding:5px">
<div.float-xl-right>
<button.btn.btn-secondary style="width:160px" type="button"
onclick="modalEditCourse('#{show cid}','#{maybe "" sanatizeForJS (unpack <$> courseDescription course)}','#{dateDisplay (courseStartDate course) course}','#{dateDisplay (courseEndDate course) course}',#{courseTotalPoints course})">
Edit Information
<button.btn.btn-secondary style="width:160px" type="button"
onclick="exportGrades('#{jsonSerialize cid}')";">
Export Grades
<button.btn.btn-danger style="width:160px" type="button"
onclick="tryDeleteCourse('#{jsonSerialize $ DeleteCourse (courseTitle course)}')">
Delete Course
|]
dateDisplay utc course = case tzByName $ courseTimeZone course of
Just tz -> formatTime defaultTimeLocale "%F %R %Z" $ utcToZonedTime (timeZoneForUTCTime tz utc) utc
Nothing -> formatTime defaultTimeLocale "%F %R UTC" $ utc
maybeDo mv f = case mv of Just v -> f v; _ -> return ()
sanatizeForJS ('\n':xs) = '\\' : 'n' : sanatizeForJS xs
sanatizeForJS ('\\':xs) = '\\' : '\\' : sanatizeForJS xs
sanatizeForJS ('\'':xs) = '\\' : '\'' : sanatizeForJS xs
sanatizeForJS ('"':xs) = '\\' : '"' : sanatizeForJS xs
sanatizeForJS ('\r':xs) = sanatizeForJS xs
sanatizeForJS (x:xs) = x : sanatizeForJS xs
sanatizeForJS [] = []
-- TODO compare directory contents with database results
listAssignmentMetadata theclass = do asmd <- runDB $ selectList [AssignmentMetadataCourse ==. entityKey theclass] []
return $ map (\a -> (a,theclass)) asmd
| gleachkr/Carnap | Carnap-Server/Handler/Instructor.hs | gpl-3.0 | 48,694 | 0 | 41 | 20,263 | 8,739 | 4,416 | 4,323 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE PostfixOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
import Control.Monad.IO.Class
import Control.Monad.Trans.Either
-- import Data.Array
--
import Data.Enumerated.Partition
import Data.Enumerated.Sequence
import Data.Fin1
import Data.FromTuple
import Data.PeanoNat
import Data.Permute
--
main = do
putStrLn "permutation test"
r <- runEitherT $ do
p1 <- hoistEither (fromTuple (1,2,4,3,5)) -- (mkPerm arr1)
p2 <- hoistEither (fromTuple (1,3,2,4,5)) -- (mkPerm arr2)
let p = p1 · p2
g = 2
(gen :: Generator 2 5) <- hoistEither (mkGen ( p <| p1 <| empty ))
liftIO $ do
print (firstUnfixed p1)
print (firstUnfixed p)
print (chooseUnfixed gen)
print (splitFixed gen)
-- NSeq test
lst1 :: NSeq 2 Int <- hoistEither (fromTuple (1,2))
lst2 :: NSeq 3 Int <- hoistEither (fromTuple (3,4,5))
liftIO $ do
print lst1
print lst2
print (singleton 0 `Cons` lst1 `Cons` lst2 `Cons` Nil)
-- print (Nil :: Partition '[] Int)
print (singleton 0 `PCons` lst1 `PCons` lst2 `PCons` PNil :: Partition [ '(FromNat 1, NSeq 1 Int)
, '(FromNat 2, NSeq 2 Int)
, '(FromNat 3, NSeq 3 Int) ] )
return ()
either print (const (return ())) r
| wavewave/qft | old/exe/qft5.hs | gpl-3.0 | 1,539 | 0 | 20 | 517 | 494 | 257 | 237 | 37 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Books.Bookshelves.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of public bookshelves for the specified user.
--
-- /See:/ <https://developers.google.com/books/docs/v1/getting_started Books API Reference> for @books.bookshelves.list@.
module Network.Google.Resource.Books.Bookshelves.List
(
-- * REST Resource
BookshelvesListResource
-- * Creating a Request
, bookshelvesList
, BookshelvesList
-- * Request Lenses
, blUserId
, blSource
) where
import Network.Google.Books.Types
import Network.Google.Prelude
-- | A resource alias for @books.bookshelves.list@ method which the
-- 'BookshelvesList' request conforms to.
type BookshelvesListResource =
"books" :>
"v1" :>
"users" :>
Capture "userId" Text :>
"bookshelves" :>
QueryParam "source" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Bookshelves
-- | Retrieves a list of public bookshelves for the specified user.
--
-- /See:/ 'bookshelvesList' smart constructor.
data BookshelvesList = BookshelvesList'
{ _blUserId :: !Text
, _blSource :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BookshelvesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'blUserId'
--
-- * 'blSource'
bookshelvesList
:: Text -- ^ 'blUserId'
-> BookshelvesList
bookshelvesList pBlUserId_ =
BookshelvesList'
{ _blUserId = pBlUserId_
, _blSource = Nothing
}
-- | ID of user for whom to retrieve bookshelves.
blUserId :: Lens' BookshelvesList Text
blUserId = lens _blUserId (\ s a -> s{_blUserId = a})
-- | String to identify the originator of this request.
blSource :: Lens' BookshelvesList (Maybe Text)
blSource = lens _blSource (\ s a -> s{_blSource = a})
instance GoogleRequest BookshelvesList where
type Rs BookshelvesList = Bookshelves
type Scopes BookshelvesList =
'["https://www.googleapis.com/auth/books"]
requestClient BookshelvesList'{..}
= go _blUserId _blSource (Just AltJSON) booksService
where go
= buildClient
(Proxy :: Proxy BookshelvesListResource)
mempty
| rueshyna/gogol | gogol-books/gen/Network/Google/Resource/Books/Bookshelves/List.hs | mpl-2.0 | 3,053 | 0 | 14 | 714 | 386 | 231 | 155 | 58 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.