code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE Haskell2010 #-}
-- | Math (display) for 'normalDensity'
--
-- \[
-- \int_{-\infty}^{\infty} e^{-x^2/2} = \sqrt{2\pi}
-- \]
--
-- \(\int_{-\infty}^{\infty} e^{-x^2/2} = \sqrt{2\pi}\)
module Math where
-- | Math (inline) for 'normalDensity'
-- \(\int_{-\infty}^{\infty} e^{-x^2/2} = \sqrt{2\pi}\)
-- \[\int_{-\infty}^{\infty} e^{-x^2/2} = \sqrt{2\pi}\]
f = 5
| haskell/haddock | html-test/src/Math.hs | bsd-2-clause | 372 | 0 | 4 | 50 | 20 | 17 | 3 | 3 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 800
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
#endif
{-# OPTIONS_GHC -fno-warn-dodgy-exports #-}
module Text.RE.PCRE
(
-- * Tutorial
-- $tutorial
-- * The Overloaded Match Operators
(*=~)
, (?=~)
, (=~)
, (=~~)
-- * The Toolkit
-- $toolkit
, module Text.RE
-- * The 'RE' Type
-- $re
, module Text.RE.PCRE.RE
-- * The Operator Instances
-- $instances
, module Text.RE.PCRE.ByteString
, module Text.RE.PCRE.ByteString.Lazy
, module Text.RE.PCRE.Sequence
, module Text.RE.PCRE.String
) where
import qualified Text.Regex.Base as B
import Text.RE
import Text.RE.Internal.AddCaptureNames
import Text.RE.PCRE.RE
import qualified Text.Regex.PCRE as PCRE
import Text.RE.PCRE.ByteString()
import Text.RE.PCRE.ByteString.Lazy()
import Text.RE.PCRE.Sequence()
import Text.RE.PCRE.String()
-- | find all matches in text
(*=~) :: IsRegex RE s
=> s
-> RE
-> Matches s
(*=~) bs rex = addCaptureNamesToMatches (reCaptureNames rex) $ matchMany rex bs
-- | find first match in text
(?=~) :: IsRegex RE s
=> s
-> RE
-> Match s
(?=~) bs rex = addCaptureNamesToMatch (reCaptureNames rex) $ matchOnce rex bs
-- | the regex-base polymorphic match operator
(=~) :: ( B.RegexContext PCRE.Regex s a
, B.RegexMaker PCRE.Regex PCRE.CompOption PCRE.ExecOption s
)
=> s
-> RE
-> a
(=~) bs rex = B.match (reRegex rex) bs
-- | the regex-base monadic, polymorphic match operator
(=~~) :: ( Monad m
, B.RegexContext PCRE.Regex s a
, B.RegexMaker PCRE.Regex PCRE.CompOption PCRE.ExecOption s
)
=> s
-> RE
-> m a
(=~~) bs rex = B.matchM (reRegex rex) bs
-- $tutorial
-- We have a regex tutorial at <http://tutorial.regex.uk>. These API
-- docs are mainly for reference.
-- $toolkit
--
-- Beyond the above match operators and the regular expression type
-- below, "Text.RE" contains the toolkit for replacing captures,
-- specifying options, etc.
-- $re
--
-- "Text.RE.TDFA.RE" contains the toolkit specific to the 'RE' type,
-- the type generated by the gegex compiler.
-- $instances
--
-- These modules merely provide the instances for the above regex
-- match operators at the various text types.
| cdornan/idiot | Text/RE/PCRE.hs | bsd-3-clause | 2,481 | 0 | 8 | 656 | 466 | 292 | 174 | 47 | 1 |
{-|
Module : Idris.IdeMode
Description : Idris' IDE Mode
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
{-# LANGUAGE FlexibleInstances, IncoherentInstances, PatternGuards #-}
module Idris.IdeMode(parseMessage, convSExp, WhatDocs(..), IdeModeCommand(..), sexpToCommand, toSExp, SExp(..), SExpable, Opt(..), ideModeEpoch, getLen, getNChar, sExpToString) where
import Idris.Core.Binary ()
import Idris.Core.TT
import Control.Applicative hiding (Const)
import qualified Data.Binary as Binary
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Lazy as Lazy
import qualified Data.ByteString.UTF8 as UTF8
import Data.List
import Data.Maybe (isJust)
import qualified Data.Text as T
import Numeric
import System.IO
import Text.Printf
import Text.Trifecta hiding (Err)
import Text.Trifecta.Delta
getNChar :: Handle -> Int -> String -> IO (String)
getNChar _ 0 s = return (reverse s)
getNChar h n s = do c <- hGetChar h
getNChar h (n - 1) (c : s)
getLen :: Handle -> IO (Either Err Int)
getLen h = do s <- getNChar h 6 ""
case readHex s of
((num, ""):_) -> return $ Right num
_ -> return $ Left . Msg $ "Couldn't read length " ++ s
data SExp = SexpList [SExp]
| StringAtom String
| BoolAtom Bool
| IntegerAtom Integer
| SymbolAtom String
deriving ( Eq, Show )
sExpToString :: SExp -> String
sExpToString (StringAtom s) = "\"" ++ escape s ++ "\""
sExpToString (BoolAtom True) = ":True"
sExpToString (BoolAtom False) = ":False"
sExpToString (IntegerAtom i) = printf "%d" i
sExpToString (SymbolAtom s) = ":" ++ s
sExpToString (SexpList l) = "(" ++ intercalate " " (map sExpToString l) ++ ")"
class SExpable a where
toSExp :: a -> SExp
instance SExpable SExp where
toSExp a = a
instance SExpable Bool where
toSExp True = BoolAtom True
toSExp False = BoolAtom False
instance SExpable String where
toSExp s = StringAtom s
instance SExpable Integer where
toSExp n = IntegerAtom n
instance SExpable Int where
toSExp n = IntegerAtom (toInteger n)
instance SExpable Name where
toSExp s = StringAtom (show s)
instance (SExpable a) => SExpable (Maybe a) where
toSExp Nothing = SexpList [SymbolAtom "Nothing"]
toSExp (Just a) = SexpList [SymbolAtom "Just", toSExp a]
instance (SExpable a) => SExpable [a] where
toSExp l = SexpList (map toSExp l)
instance (SExpable a, SExpable b) => SExpable (a, b) where
toSExp (l, r) = SexpList [toSExp l, toSExp r]
instance (SExpable a, SExpable b, SExpable c) => SExpable (a, b, c) where
toSExp (l, m, n) = SexpList [toSExp l, toSExp m, toSExp n]
instance (SExpable a, SExpable b, SExpable c, SExpable d) => SExpable (a, b, c, d) where
toSExp (l, m, n, o) = SexpList [toSExp l, toSExp m, toSExp n, toSExp o]
instance (SExpable a, SExpable b, SExpable c, SExpable d, SExpable e) =>
SExpable (a, b, c, d, e) where
toSExp (l, m, n, o, p) = SexpList [toSExp l, toSExp m, toSExp n, toSExp o, toSExp p]
instance SExpable NameOutput where
toSExp TypeOutput = SymbolAtom "type"
toSExp FunOutput = SymbolAtom "function"
toSExp DataOutput = SymbolAtom "data"
toSExp MetavarOutput = SymbolAtom "metavar"
toSExp PostulateOutput = SymbolAtom "postulate"
maybeProps :: SExpable a => [(String, Maybe a)] -> [(SExp, SExp)]
maybeProps [] = []
maybeProps ((n, Just p):ps) = (SymbolAtom n, toSExp p) : maybeProps ps
maybeProps ((n, Nothing):ps) = maybeProps ps
constTy :: Const -> String
constTy (I _) = "Int"
constTy (BI _) = "Integer"
constTy (Fl _) = "Double"
constTy (Ch _) = "Char"
constTy (Str _) = "String"
constTy (B8 _) = "Bits8"
constTy (B16 _) = "Bits16"
constTy (B32 _) = "Bits32"
constTy (B64 _) = "Bits64"
constTy _ = "Type"
namespaceOf :: Name -> Maybe String
namespaceOf (NS _ ns) = Just (intercalate "." $ reverse (map T.unpack ns))
namespaceOf _ = Nothing
instance SExpable OutputAnnotation where
toSExp (AnnName n ty d t) = toSExp $ [(SymbolAtom "name", StringAtom (show n)),
(SymbolAtom "implicit", BoolAtom False),
(SymbolAtom "key", StringAtom (encodeName n))] ++
maybeProps [("decor", ty)] ++
maybeProps [("doc-overview", d), ("type", t)] ++
maybeProps [("namespace", namespaceOf n)]
toSExp (AnnBoundName n imp) = toSExp [(SymbolAtom "name", StringAtom (show n)),
(SymbolAtom "decor", SymbolAtom "bound"),
(SymbolAtom "implicit", BoolAtom imp)]
toSExp (AnnConst c) = toSExp [(SymbolAtom "decor",
SymbolAtom (if constIsType c then "type" else "data")),
(SymbolAtom "type", StringAtom (constTy c)),
(SymbolAtom "doc-overview", StringAtom (constDocs c)),
(SymbolAtom "name", StringAtom (show c))]
toSExp (AnnData ty doc) = toSExp [(SymbolAtom "decor", SymbolAtom "data"),
(SymbolAtom "type", StringAtom ty),
(SymbolAtom "doc-overview", StringAtom doc)]
toSExp (AnnType name doc) = toSExp $ [(SymbolAtom "decor", SymbolAtom "type"),
(SymbolAtom "type", StringAtom "Type"),
(SymbolAtom "doc-overview", StringAtom doc)] ++
if not (null name) then [(SymbolAtom "name", StringAtom name)] else []
toSExp AnnKeyword = toSExp [(SymbolAtom "decor", SymbolAtom "keyword")]
toSExp (AnnFC fc) = toSExp [(SymbolAtom "source-loc", toSExp fc)]
toSExp (AnnTextFmt fmt) = toSExp [(SymbolAtom "text-formatting", SymbolAtom format)]
where format = case fmt of
BoldText -> "bold"
ItalicText -> "italic"
UnderlineText -> "underline"
toSExp (AnnLink url) = toSExp [(SymbolAtom "link-href", StringAtom url)]
toSExp (AnnTerm bnd tm)
| termSmallerThan 1000 tm = toSExp [(SymbolAtom "tt-term", StringAtom (encodeTerm bnd tm))]
| otherwise = SexpList []
toSExp (AnnSearchResult ordr) = toSExp [(SymbolAtom "doc-overview",
StringAtom ("Result type is " ++ descr))]
where descr = case ordr of
EQ -> "isomorphic"
LT -> "more general than searched type"
GT -> "more specific than searched type"
toSExp (AnnErr e) = toSExp [(SymbolAtom "error", StringAtom (encodeErr e))]
toSExp (AnnNamespace ns file) =
toSExp $ [(SymbolAtom "namespace", StringAtom (intercalate "." (map T.unpack ns)))] ++
[(SymbolAtom "decor", SymbolAtom $ if isJust file then "module" else "namespace")] ++
maybeProps [("source-file", file)]
toSExp AnnQuasiquote = toSExp [(SymbolAtom "quasiquotation", True)]
toSExp AnnAntiquote = toSExp [(SymbolAtom "antiquotation", True)]
encodeName :: Name -> String
encodeName n = UTF8.toString . Base64.encode . Lazy.toStrict . Binary.encode $ n
encodeTerm :: [(Name, Bool)] -> Term -> String
encodeTerm bnd tm = UTF8.toString . Base64.encode . Lazy.toStrict . Binary.encode $
(bnd, tm)
decodeTerm :: String -> ([(Name, Bool)], Term)
decodeTerm = Binary.decode . Lazy.fromStrict . Base64.decodeLenient . UTF8.fromString
encodeErr :: Err -> String
encodeErr e = UTF8.toString . Base64.encode . Lazy.toStrict . Binary.encode $ e
decodeErr :: String -> Err
decodeErr = Binary.decode . Lazy.fromStrict . Base64.decodeLenient . UTF8.fromString
instance SExpable FC where
toSExp (FC f (sl, sc) (el, ec)) =
toSExp ((SymbolAtom "filename", StringAtom f),
(SymbolAtom "start", IntegerAtom (toInteger sl), IntegerAtom (toInteger sc)),
(SymbolAtom "end", IntegerAtom (toInteger el), IntegerAtom (toInteger ec)))
toSExp NoFC = toSExp ([] :: [String])
toSExp (FileFC f) = toSExp [(SymbolAtom "filename", StringAtom f)]
escape :: String -> String
escape = concatMap escapeChar
where
escapeChar '\\' = "\\\\"
escapeChar '"' = "\\\""
escapeChar c = [c]
pSExp = do xs <- between (char '(') (char ')') (pSExp `sepBy` (char ' '))
return (SexpList xs)
<|> atom
atom = do string "nil"; return (SexpList [])
<|> do char ':'; x <- atomC; return x
<|> do char '"'; xs <- many quotedChar; char '"'; return (StringAtom xs)
<|> do ints <- some digit
case readDec ints of
((num, ""):_) -> return (IntegerAtom (toInteger num))
_ -> return (StringAtom ints)
atomC = do string "True"; return (BoolAtom True)
<|> do string "False"; return (BoolAtom False)
<|> do xs <- many (noneOf " \n\t\r\"()"); return (SymbolAtom xs)
quotedChar = try (string "\\\\" >> return '\\')
<|> try (string "\\\"" >> return '"')
<|> noneOf "\""
parseSExp :: String -> Result SExp
parseSExp = parseString pSExp (Directed (UTF8.fromString "(unknown)") 0 0 0 0)
data Opt = ShowImpl | ErrContext deriving Show
data WhatDocs = Overview | Full
data IdeModeCommand = REPLCompletions String
| Interpret String
| TypeOf String
| CaseSplit Int String
| AddClause Int String
| AddProofClause Int String
| AddMissing Int String
| MakeWithBlock Int String
| MakeCaseBlock Int String
| ProofSearch Bool Int String [String] (Maybe Int) -- ^^ Recursive?, line, name, hints, depth
| MakeLemma Int String
| LoadFile String (Maybe Int)
| DocsFor String WhatDocs
| Apropos String
| GetOpts
| SetOpt Opt Bool
| Metavariables Int -- ^^ the Int is the column count for pretty-printing
| WhoCalls String
| CallsWho String
| BrowseNS String
| TermNormalise [(Name, Bool)] Term
| TermShowImplicits [(Name, Bool)] Term
| TermNoImplicits [(Name, Bool)] Term
| TermElab [(Name, Bool)] Term
| PrintDef String
| ErrString Err
| ErrPPrint Err
| GetIdrisVersion
sexpToCommand :: SExp -> Maybe IdeModeCommand
sexpToCommand (SexpList ([x])) = sexpToCommand x
sexpToCommand (SexpList [SymbolAtom "interpret", StringAtom cmd]) = Just (Interpret cmd)
sexpToCommand (SexpList [SymbolAtom "repl-completions", StringAtom prefix]) = Just (REPLCompletions prefix)
sexpToCommand (SexpList [SymbolAtom "load-file", StringAtom filename, IntegerAtom line]) = Just (LoadFile filename (Just (fromInteger line)))
sexpToCommand (SexpList [SymbolAtom "load-file", StringAtom filename]) = Just (LoadFile filename Nothing)
sexpToCommand (SexpList [SymbolAtom "type-of", StringAtom name]) = Just (TypeOf name)
sexpToCommand (SexpList [SymbolAtom "case-split", IntegerAtom line, StringAtom name]) = Just (CaseSplit (fromInteger line) name)
sexpToCommand (SexpList [SymbolAtom "add-clause", IntegerAtom line, StringAtom name]) = Just (AddClause (fromInteger line) name)
sexpToCommand (SexpList [SymbolAtom "add-proof-clause", IntegerAtom line, StringAtom name]) = Just (AddProofClause (fromInteger line) name)
sexpToCommand (SexpList [SymbolAtom "add-missing", IntegerAtom line, StringAtom name]) = Just (AddMissing (fromInteger line) name)
sexpToCommand (SexpList [SymbolAtom "make-with", IntegerAtom line, StringAtom name]) = Just (MakeWithBlock (fromInteger line) name)
sexpToCommand (SexpList [SymbolAtom "make-case", IntegerAtom line, StringAtom name]) = Just (MakeCaseBlock (fromInteger line) name)
-- The Boolean in ProofSearch means "search recursively"
-- If it's False, that means "refine", i.e. apply the name and fill in any
-- arguments which can be done by unification.
sexpToCommand (SexpList (SymbolAtom "proof-search" : IntegerAtom line : StringAtom name : rest))
| [] <- rest = Just (ProofSearch True (fromInteger line) name [] Nothing)
| [SexpList hintexp] <- rest
, Just hints <- getHints hintexp = Just (ProofSearch True (fromInteger line) name hints Nothing)
| [SexpList hintexp, IntegerAtom depth] <- rest
, Just hints <- getHints hintexp = Just (ProofSearch True (fromInteger line) name hints (Just (fromInteger depth)))
where getHints = mapM (\h -> case h of
StringAtom s -> Just s
_ -> Nothing)
sexpToCommand (SexpList [SymbolAtom "make-lemma", IntegerAtom line, StringAtom name]) = Just (MakeLemma (fromInteger line) name)
sexpToCommand (SexpList [SymbolAtom "refine", IntegerAtom line, StringAtom name, StringAtom hint]) = Just (ProofSearch False (fromInteger line) name [hint] Nothing)
sexpToCommand (SexpList [SymbolAtom "docs-for", StringAtom name]) = Just (DocsFor name Full)
sexpToCommand (SexpList [SymbolAtom "docs-for", StringAtom name, SymbolAtom s])
| Just w <- lookup s opts = Just (DocsFor name w)
where opts = [("overview", Overview), ("full", Full)]
sexpToCommand (SexpList [SymbolAtom "apropos", StringAtom search]) = Just (Apropos search)
sexpToCommand (SymbolAtom "get-options") = Just GetOpts
sexpToCommand (SexpList [SymbolAtom "set-option", SymbolAtom s, BoolAtom b])
| Just opt <- lookup s opts = Just (SetOpt opt b)
where opts = [("show-implicits", ShowImpl), ("error-context", ErrContext)] --TODO support more options. Issue #1611 in the Issue tracker. https://github.com/idris-lang/Idris-dev/issues/1611
sexpToCommand (SexpList [SymbolAtom "metavariables", IntegerAtom cols]) = Just (Metavariables (fromIntegral cols))
sexpToCommand (SexpList [SymbolAtom "who-calls", StringAtom name]) = Just (WhoCalls name)
sexpToCommand (SexpList [SymbolAtom "calls-who", StringAtom name]) = Just (CallsWho name)
sexpToCommand (SexpList [SymbolAtom "browse-namespace", StringAtom ns]) = Just (BrowseNS ns)
sexpToCommand (SexpList [SymbolAtom "normalise-term", StringAtom encoded]) = let (bnd, tm) = decodeTerm encoded in
Just (TermNormalise bnd tm)
sexpToCommand (SexpList [SymbolAtom "show-term-implicits", StringAtom encoded]) = let (bnd, tm) = decodeTerm encoded in
Just (TermShowImplicits bnd tm)
sexpToCommand (SexpList [SymbolAtom "hide-term-implicits", StringAtom encoded]) = let (bnd, tm) = decodeTerm encoded in
Just (TermNoImplicits bnd tm)
sexpToCommand (SexpList [SymbolAtom "elaborate-term", StringAtom encoded]) = let (bnd, tm) = decodeTerm encoded in
Just (TermElab bnd tm)
sexpToCommand (SexpList [SymbolAtom "print-definition", StringAtom name]) = Just (PrintDef name)
sexpToCommand (SexpList [SymbolAtom "error-string", StringAtom encoded]) = Just . ErrString . decodeErr $ encoded
sexpToCommand (SexpList [SymbolAtom "error-pprint", StringAtom encoded]) = Just . ErrPPrint . decodeErr $ encoded
sexpToCommand (SymbolAtom "version") = Just GetIdrisVersion
sexpToCommand _ = Nothing
parseMessage :: String -> Either Err (SExp, Integer)
parseMessage x = case receiveString x of
Right (SexpList [cmd, (IntegerAtom id)]) -> Right (cmd, id)
Right x -> Left . Msg $ "Invalid message " ++ show x
Left err -> Left err
receiveString :: String -> Either Err SExp
receiveString x =
case parseSExp x of
Failure _ -> Left . Msg $ "parse failure"
Success r -> Right r
convSExp :: SExpable a => String -> a -> Integer -> String
convSExp pre s id =
let sex = SexpList [SymbolAtom pre, toSExp s, IntegerAtom id] in
let str = sExpToString sex in
(getHexLength str) ++ str
getHexLength :: String -> String
getHexLength s = printf "%06x" (1 + (length s))
-- | The version of the IDE mode command set. Increment this when you
-- change it so clients can adapt.
ideModeEpoch :: Int
ideModeEpoch = 1
| uuhan/Idris-dev | src/Idris/IdeMode.hs | bsd-3-clause | 17,331 | 0 | 16 | 5,164 | 5,563 | 2,850 | 2,713 | 279 | 3 |
-----------------------------------------------------------------------------
--
-- Module : GenerateForm
-- Copyright :
-- License : BSD3
--
-- Maintainer : agocorona@gmail.com
-- Stability : experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable, OverloadedStrings, ExistentialQuantification #-}
module GenerateForm (
genForm
) where
import MFlow.Wai.Blaze.Html.All
import MFlow.Forms.Internals
import Control.Monad.State
import Data.Typeable
import Data.Monoid
import Prelude hiding (div)
import Text.Blaze.Html5.Attributes as At hiding (step,span)
import Data.List(nub)
main=do
userRegister "edituser" "edituser"
runNavigation "nav" . step $ genForm
-- page with header
hpage w = page $ tFieldEd "editor" "genFormHeader.html" "header" **> w
genForm= do
id <- getSessionId
let title= "generateForm/"++id ++ "/form.html"
initFormTemplate title
desc <- hpage $ createForm title
r <- hpage $ b "This is the form created, asking for input"
++> hr
++> generateForm title desc
<++ br
<** pageFlow "button" (submitButton "submit")
hpage $ h3 "results of the form:" ++> p << show r ++> noWidget
return()
type Template= String
data WType = Intv | Stringv | TextArea |OptionBox[String]
| WCheckBoxes [String] | Form Template [WType] deriving (Typeable,Read,Show)
initFormTemplate title= do
liftIO $ writetField title $
p "( delete this line. Press the save button to save the edits)"
setSessionData ([] :: [WType])
setSessionData $ Seq 0
data Result = forall a.(Typeable a, Show a) => Result a deriving (Typeable)
instance Show Result where
show (Result x)= show x
genElem Intv= Result <$> getInt Nothing
genElem Stringv= Result <$> getString Nothing
genElem TextArea= Result <$> getMultilineText ""
genElem (OptionBox xs) =
Result <$> getSelect (setSelectedOption ""(p "select a option") <|>
firstOf[setOption op (fromStr op) | op <- xs])
genElem (WCheckBoxes xs) =
Result <$> getCheckBoxes(mconcat[setCheckBox False x <++ (fromStr x) | x <- xs])
genElem (Form temp desc)= Result <$> generateForm temp desc
generateForm title xs=
input ! At.type_ "hidden" ! name "p0" ! value "()"
++> template title
(pageFlow "" $ allOf $ map genElem xs )
createForm title= do
divmenu <<< ( wlogin
**>
do br ++> wlink ("save" :: String) << b "Save the form and continue"
<++ br <> "(when finished)"
getSessionData `onNothing` return []
<** do
wdesc <- chooseWidget <++ hr
desc <- getSessionData `onNothing` return []
setSessionData $ mappend desc [wdesc]
content <- liftIO $ readtField mempty title
fieldview <- generateView wdesc
liftIO . writetField title $ content <> br <> fieldview
)
<** divbody <<< wform (edTemplate "edituser" title (return ()) )
divbody= div ! At.style "float:right;width:65%"
divmenu= div ! At.style "background-color:#EEEEEE;float:left;margin-left:10px;margin-right:10px;overflow:auto;"
newtype Seq= Seq Int deriving (Typeable)
generateView desc= View $ do
Seq n <- getSessionData `onNothing` return (Seq 0)
s <- get
let n'= if n== 0 then 1 else n
put s{mfSequence= n'}
FormElm render _ <- runView $ genElem desc
n'' <- gets mfSequence
setSessionData $ Seq n''
return $ FormElm mempty $ Just ( br <> br <> render :: Html)
chooseWidget=
(p $ a ! At.href "/" $ "home/reset") ++>
(p <<< do wlink ("text":: String) "text field"
ul <<<(li <<< wlink Intv "returning Int"
<|> li <<< wlink Stringv "returning string"))
<|> p <<< do wlink TextArea "text area"
<|> p <<< do
wlink ("check" :: String) "checkBoxes"
ul <<< getOptions "comb"
<|> p <<< do
wlink ("options" :: String) "options"
ul <<< getOptions "opt"
getOptions pf =
do
r <- wform $ submitButton "create" <|> submitButton "clear"
case r of
"create" -> do
ops <- getSessionData
case ops of
Nothing -> stop
Just elem -> return elem
"clear" -> do
delSessionData (undefined :: WType)
stop
<** do
op <- wform $ getString Nothing <! [("size","8")
,("placeholder","option")]
<** submitButton "add" <++ br
mops <- getSessionData
ops' <- case (mops,pf) of
(Nothing, "comb") -> do setSessionData $ WCheckBoxes [op] ; return [op]
(Nothing, "opt") -> do setSessionData $ OptionBox [op] ; return [op]
(Just (OptionBox _), "comb") -> do setSessionData $ WCheckBoxes [op] ; return [op]
(Just (WCheckBoxes _),"opt") -> do setSessionData $ OptionBox [op] ; return [op]
(Just (WCheckBoxes ops),"comb") -> do
let ops'= nub $ op:ops
setSessionData . WCheckBoxes $ ops'
return ops'
(Just (OptionBox ops),"opt") -> do
let ops'= nub $ op:ops
setSessionData . OptionBox $ ops'
return ops'
wraw $ mconcat [p << op | op <- ops']
--delParam par= modify $ \s -> s{mfEnv=filter ( (par /=) . fst) $ mfEnv s}
| agocorona/MFlow | Demos/GenerateForm.hs | bsd-3-clause | 5,448 | 0 | 21 | 1,518 | 1,700 | 845 | 855 | 120 | 8 |
{-# LANGUAGE ViewPatterns, PatternGuards, FlexibleContexts #-}
{-
Find and match:
<TEST>
yes = 1:2:[] -- [1,2]
yes = ['h','e','l','l','o'] -- "hello"
yes (1:2:[]) = 1 -- [1,2]
yes ['h','e'] = 1 -- "he"
-- [a]++b -> a : b, but only if not in a chain of ++'s
yes = [x] ++ xs -- x : xs
no = "x" ++ xs
no = [x] ++ xs ++ ys
no = xs ++ [x] ++ ys
yes = [if a then b else c] ++ xs -- (if a then b else c) : xs
yes = [1] : [2] : [3] : [4] : [5] : [] -- [[1], [2], [3], [4], [5]]
yes = if x == e then l2 ++ xs else [x] ++ check_elem xs -- x : check_elem xs
data Yes = Yes (Maybe [Char]) -- Maybe String
yes = y :: [Char] -> a -- String -> a
instance C [Char]
foo = [a b] ++ xs -- a b : xs
</TEST>
-}
module Hint.List(listHint) where
import Control.Applicative
import Hint.Type
listHint :: DeclHint
listHint _ _ = listDecl
listDecl :: Decl_ -> [Idea]
listDecl x = concatMap (listExp False) (childrenBi x) ++ stringType x ++ concatMap listPat (childrenBi x)
-- boolean = are you in a ++ chain
listExp :: Bool -> Exp_ -> [Idea]
listExp b (fromParen -> x) =
if null res then concatMap (listExp $ isAppend x) $ children x else [head res]
where
res = [warn name x x2 | (name,f) <- checks, Just x2 <- [f b x]]
listPat :: Pat_ -> [Idea]
listPat x = if null res then concatMap listPat $ children x else [head res]
where res = [warn name x x2 | (name,f) <- pchecks, Just x2 <- [f x]]
isAppend (view -> App2 op _ _) = op ~= "++"
isAppend _ = False
checks = let (*) = (,) in
["Use string literal" * useString
,"Use list literal" * useList
,"Use :" * useCons
]
pchecks = let (*) = (,) in
["Use string literal pattern" * usePString
,"Use list literal pattern" * usePList
]
usePString (PList _ xs) | xs /= [], Just s <- mapM fromPChar xs = Just $ PLit an (Signless an) $ String an s (show s)
usePString _ = Nothing
usePList = fmap (PList an) . f True
where
f first x | x ~= "[]" = if first then Nothing else Just []
f first (view -> PApp_ ":" [a,b]) = (a:) <$> f False b
f first _ = Nothing
useString b (List _ xs) | xs /= [], Just s <- mapM fromChar xs = Just $ Lit an $ String an s (show s)
useString b _ = Nothing
useList b = fmap (List an) . f True
where
f first x | x ~= "[]" = if first then Nothing else Just []
f first (view -> App2 c a b) | c ~= ":" = (a:) <$> f False b
f first _ = Nothing
useCons False (view -> App2 op x y) | op ~= "++", Just x2 <- f x, not $ isAppend y =
Just $ InfixApp an x2 (QConOp an $ list_cons_name an) y
where
f (List _ [x]) = Just $ if isApp x then x else paren x
f _ = Nothing
useCons _ _ = Nothing
typeListChar = TyList an (TyCon an (toNamed "Char"))
typeString = TyCon an (toNamed "String")
stringType :: Decl_ -> [Idea]
stringType x = case x of
InstDecl _ _ _ x -> f x
_ -> f x
where
f x = concatMap g $ childrenBi x
g :: Type_ -> [Idea]
g (fromTyParen -> x) = [warn "Use String" x (transform f x) | any (=~= typeListChar) $ universe x]
where f x = if x =~= typeListChar then typeString else x
| fpco/hlint | src/Hint/List.hs | bsd-3-clause | 3,163 | 0 | 11 | 898 | 1,149 | 578 | 571 | 51 | 4 |
{-# LANGUAGE FlexibleContexts #-}
module Jade.Part ( isTerm
, isSubModule
, removeTerms
, bundle
, getValsWithIdent
, getBundleWithIdent
, getBundleWithLit
, width
, hasVal
, getLitVals
, containsIdentifier
, getNames
, loc
, toWire
, hasPoint
) where
import Jade.Common
import Control.Monad
import Data.Maybe
import qualified Jade.Port as Port
import qualified Jade.Jumper as Jumper
import qualified Jade.SubModule as SubModule
import qualified Jade.Module as Moduile
import qualified Jade.Wire as Wire
import qualified Jade.Signal as Signal
import qualified Jade.Decode.Bundle as Bundle
import qualified Jade.Term as Term
bundle :: Part -> ValBundle
bundle part =
case part of
PortC (Port _ (Just s)) -> Signal.getBundle s
WireC (Wire _ (Just s)) -> Signal.getBundle s
WireC (Wire _ Nothing) -> mempty
TermC (Terminal _ s) -> s
x -> error $ "Part.sig: Not implemented for: " ++ show x
getBundleWithIdent part ident = if Bundle.hasName (bundle part) ident
then Just $ bundle part
else Nothing
getBundleWithLit part = if Bundle.hasLit (bundle part)
then Just $ bundle part
else Nothing
getLitVals part = Bundle.getLitVals (bundle part)
containsIdentifier :: Part -> String -> Bool
containsIdentifier part ident = Bundle.containsIdentifier (bundle part) ident
-- hasAnySigName :: Part -> Bool
-- hasAnySigName part = Bundle.hasAnyValName (bundle part)
hasVal part val = Bundle.hasVal (bundle part) val
-- isJumper (JumperC _) = True
-- isJumper _ = False
-- isWire (WireC _) = True
-- isWire _ = False
toWire (WireC w) = Just w
toWire _ = Nothing
isSubModule (SubModuleC _) = True
isSubModule _ = False
isTerm (TermC _) = True
isTerm _ = False
getValsWithIdent :: Part -> String -> [Val]
getValsWithIdent part ident = Bundle.getValsWithIdent (bundle part) ident
removeTerms parts = filter (not . isTerm) parts
getNames part = Bundle.getNames (bundle part)
loc part =
case part of
PortC (Port (Coord3 x y _) _) -> return (x, y)
WireC _ -> die "Part.loc doesn't support Wire"
TermC (Terminal (Coord3 x y _) _) -> return (x, y)
x -> die $ "Part.loc: doesn't support: " ++ show x
points :: Part -> J [Point]
points part = "Part.points" <? do
case part of
PortC x -> return $ Port.points x
SubModuleC x -> SubModule.points x
WireC x -> return $ Wire.points x
JumperC x -> return $ Jumper.points x
TermC x -> return $ Term.points x
UnusedPart -> return []
hasPoint :: Part -> Point -> J Bool
hasPoint part point = "Part.hasPoint" <? do
enb ("PART!", part)
asdf <- points part
enb (point, asdf)
(point `elem`) <$> (points part)
width :: Part -> J Int
width part = do
nb $ show part
case part of
PortC (Port _ (Just s)) -> return $ Signal.width s
PortC (Port _ Nothing) -> return 1
WireC (Wire _ (Just s)) -> do
nb $ "For this wire: " ++ show part
nb $ "Found width: " ++ (show $ Signal.width s)
return $ Signal.width s
WireC (Wire _ Nothing) -> return 1
TermC (Terminal _ s) -> return $ Bundle.width s
x -> die $ "Part.width: Not implemented for: " ++ show x
| drhodes/jade2hdl | src/Jade/Part.hs | bsd-3-clause | 3,462 | 0 | 16 | 1,018 | 1,116 | 568 | 548 | 89 | 6 |
{-# LANGUAGE GADTs, PolyKinds, TypeOperators, CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-warnings-deprecations #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Glambda.Util
-- Copyright : (C) 2015 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg (rae@cs.brynmawr.edu)
-- Stability : experimental
--
-- Utility exports (and re-exports) for glambda. This module is meant to
-- be internal -- do not import it if you are not part of the glambda
-- package!
--
----------------------------------------------------------------------------
module Language.Glambda.Util (
render, toSimpleDoc, maybeParens, ($$),
Prec, topPrec,
stripWhitespace, nthDefault,
(:~:)(..), ignore
) where
import Text.Parsec
import Text.PrettyPrint.ANSI.Leijen as Pretty
import Data.Char
import Data.List
#if __GLASGOW_HASKELL__ < 709
import Data.Functor
#endif
#if __GLASGOW_HASKELL__ >= 707
import Data.Type.Equality
#else
data a :~: b where
Refl :: a :~: a
#endif
-- | Like 'Data.Functor.void'
ignore :: Functor f => f a -> f ()
ignore = (() <$)
instance Pretty ParseError where
pretty = text . show
-- | More perspicuous synonym for operator precedence
type Prec = Rational
-- | Precedence for top-level printing
topPrec :: Prec
topPrec = 0
-- | Convert a 'Doc' to a 'String'
render :: Doc -> String
render = flip displayS "" . toSimpleDoc
-- | Convert a 'Doc' to a 'SimpleDoc' for further rendering
toSimpleDoc :: Doc -> SimpleDoc
toSimpleDoc = renderPretty 1.0 78
-- | Enclose a 'Doc' in parens if the flag is 'True'
maybeParens :: Bool -> Doc -> Doc
maybeParens True = parens
maybeParens False = id
-- | Synonym for 'Pretty.<$>'
($$) :: Doc -> Doc -> Doc
($$) = (Pretty.<$>)
-- | (Inefficiently) strips whitespace from a string
stripWhitespace :: String -> String
stripWhitespace = dropWhile isSpace . dropWhileEnd isSpace
-- | Pluck out the nth item from a list, or use a default if the list
-- is too short
nthDefault :: a -> Int -> [a] -> a
nthDefault _ 0 (x:_) = x
nthDefault def n (_:xs) = nthDefault def (n-1) xs
nthDefault def _ [] = def
| goldfirere/glambda | src/Language/Glambda/Util.hs | bsd-3-clause | 2,186 | 0 | 8 | 387 | 387 | 234 | 153 | 36 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_push_descriptor - device extension
--
-- == VK_KHR_push_descriptor
--
-- [__Name String__]
-- @VK_KHR_push_descriptor@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 81
--
-- [__Revision__]
-- 2
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Contact__]
--
-- - Jeff Bolz
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_push_descriptor] @jeffbolznv%0A<<Here describe the issue or question you have about the VK_KHR_push_descriptor extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2017-09-12
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Jeff Bolz, NVIDIA
--
-- - Michael Worcester, Imagination Technologies
--
-- == Description
--
-- This extension allows descriptors to be written into the command buffer,
-- while the implementation is responsible for managing their memory. Push
-- descriptors may enable easier porting from older APIs and in some cases
-- can be more efficient than writing descriptors into descriptor sets.
--
-- == New Commands
--
-- - 'cmdPushDescriptorSetKHR'
--
-- If
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_descriptor_update_template VK_KHR_descriptor_update_template>
-- is supported:
--
-- - 'cmdPushDescriptorSetWithTemplateKHR'
--
-- If
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.1 Version 1.1>
-- is supported:
--
-- - 'cmdPushDescriptorSetWithTemplateKHR'
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2':
--
-- - 'PhysicalDevicePushDescriptorPropertiesKHR'
--
-- == New Enum Constants
--
-- - 'KHR_PUSH_DESCRIPTOR_EXTENSION_NAME'
--
-- - 'KHR_PUSH_DESCRIPTOR_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core10.Enums.DescriptorSetLayoutCreateFlagBits.DescriptorSetLayoutCreateFlagBits':
--
-- - 'Vulkan.Core10.Enums.DescriptorSetLayoutCreateFlagBits.DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR'
--
-- If
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_descriptor_update_template VK_KHR_descriptor_update_template>
-- is supported:
--
-- - Extending
-- 'Vulkan.Core11.Enums.DescriptorUpdateTemplateType.DescriptorUpdateTemplateType':
--
-- - 'Vulkan.Core11.Enums.DescriptorUpdateTemplateType.DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR'
--
-- If
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.1 Version 1.1>
-- is supported:
--
-- - Extending
-- 'Vulkan.Core11.Enums.DescriptorUpdateTemplateType.DescriptorUpdateTemplateType':
--
-- - 'Vulkan.Core11.Enums.DescriptorUpdateTemplateType.DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR'
--
-- == Version History
--
-- - Revision 1, 2016-10-15 (Jeff Bolz)
--
-- - Internal revisions
--
-- - Revision 2, 2017-09-12 (Tobias Hector)
--
-- - Added interactions with Vulkan 1.1
--
-- == See Also
--
-- 'PhysicalDevicePushDescriptorPropertiesKHR', 'cmdPushDescriptorSetKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_push_descriptor Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_push_descriptor ( cmdPushDescriptorSetKHR
, cmdPushDescriptorSetWithTemplateKHR
, PhysicalDevicePushDescriptorPropertiesKHR(..)
, KHR_PUSH_DESCRIPTOR_SPEC_VERSION
, pattern KHR_PUSH_DESCRIPTOR_SPEC_VERSION
, KHR_PUSH_DESCRIPTOR_EXTENSION_NAME
, pattern KHR_PUSH_DESCRIPTOR_EXTENSION_NAME
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import qualified Data.Vector (imapM_)
import qualified Data.Vector (length)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Word (Word32)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.CStruct.Extends (forgetExtensions)
import Vulkan.CStruct.Extends (pokeSomeCStruct)
import Vulkan.NamedType ((:::))
import Vulkan.Core10.Handles (CommandBuffer)
import Vulkan.Core10.Handles (CommandBuffer(..))
import Vulkan.Core10.Handles (CommandBuffer(CommandBuffer))
import Vulkan.Core10.Handles (CommandBuffer_T)
import Vulkan.Core11.Handles (DescriptorUpdateTemplate)
import Vulkan.Core11.Handles (DescriptorUpdateTemplate(..))
import Vulkan.Dynamic (DeviceCmds(pVkCmdPushDescriptorSetKHR))
import Vulkan.Dynamic (DeviceCmds(pVkCmdPushDescriptorSetWithTemplateKHR))
import Vulkan.Core10.Enums.PipelineBindPoint (PipelineBindPoint)
import Vulkan.Core10.Enums.PipelineBindPoint (PipelineBindPoint(..))
import Vulkan.Core10.Handles (PipelineLayout)
import Vulkan.Core10.Handles (PipelineLayout(..))
import Vulkan.CStruct.Extends (SomeStruct)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.DescriptorSet (WriteDescriptorSet)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdPushDescriptorSetKHR
:: FunPtr (Ptr CommandBuffer_T -> PipelineBindPoint -> PipelineLayout -> Word32 -> Word32 -> Ptr (SomeStruct WriteDescriptorSet) -> IO ()) -> Ptr CommandBuffer_T -> PipelineBindPoint -> PipelineLayout -> Word32 -> Word32 -> Ptr (SomeStruct WriteDescriptorSet) -> IO ()
-- | vkCmdPushDescriptorSetKHR - Pushes descriptor updates into a command
-- buffer
--
-- = Description
--
-- /Push descriptors/ are a small bank of descriptors whose storage is
-- internally managed by the command buffer rather than being written into
-- a descriptor set and later bound to a command buffer. Push descriptors
-- allow for incremental updates of descriptors without managing the
-- lifetime of descriptor sets.
--
-- When a command buffer begins recording, all push descriptors are
-- undefined. Push descriptors /can/ be updated incrementally and cause
-- shaders to use the updated descriptors for subsequent
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#pipeline-bindpoint-commands bound pipeline commands>
-- with the pipeline type set by @pipelineBindPoint@ until the descriptor
-- is overwritten, or else until the set is disturbed as described in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#descriptorsets-compatibility Pipeline Layout Compatibility>.
-- When the set is disturbed or push descriptors with a different
-- descriptor set layout are set, all push descriptors are undefined.
--
-- Push descriptors that are
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#shaders-staticuse statically used>
-- by a pipeline /must/ not be undefined at the time that a drawing or
-- dispatching command is recorded to execute using that pipeline. This
-- includes immutable sampler descriptors, which /must/ be pushed before
-- they are accessed by a pipeline (the immutable samplers are pushed,
-- rather than the samplers in @pDescriptorWrites@). Push descriptors that
-- are not statically used /can/ remain undefined.
--
-- Push descriptors do not use dynamic offsets. Instead, the corresponding
-- non-dynamic descriptor types /can/ be used and the @offset@ member of
-- 'Vulkan.Core10.DescriptorSet.DescriptorBufferInfo' /can/ be changed each
-- time the descriptor is written.
--
-- Each element of @pDescriptorWrites@ is interpreted as in
-- 'Vulkan.Core10.DescriptorSet.WriteDescriptorSet', except the @dstSet@
-- member is ignored.
--
-- To push an immutable sampler, use a
-- 'Vulkan.Core10.DescriptorSet.WriteDescriptorSet' with @dstBinding@ and
-- @dstArrayElement@ selecting the immutable sampler’s binding. If the
-- descriptor type is
-- 'Vulkan.Core10.Enums.DescriptorType.DESCRIPTOR_TYPE_SAMPLER', the
-- @pImageInfo@ parameter is ignored and the immutable sampler is taken
-- from the push descriptor set layout in the pipeline layout. If the
-- descriptor type is
-- 'Vulkan.Core10.Enums.DescriptorType.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER',
-- the @sampler@ member of the @pImageInfo@ parameter is ignored and the
-- immutable sampler is taken from the push descriptor set layout in the
-- pipeline layout.
--
-- == Valid Usage
--
-- - #VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363#
-- @pipelineBindPoint@ /must/ be supported by the @commandBuffer@’s
-- parent 'Vulkan.Core10.Handles.CommandPool'’s queue family
--
-- - #VUID-vkCmdPushDescriptorSetKHR-set-00364# @set@ /must/ be less than
-- 'Vulkan.Core10.PipelineLayout.PipelineLayoutCreateInfo'::@setLayoutCount@
-- provided when @layout@ was created
--
-- - #VUID-vkCmdPushDescriptorSetKHR-set-00365# @set@ /must/ be the
-- unique set number in the pipeline layout that uses a descriptor set
-- layout that was created with
-- 'Vulkan.Core10.Enums.DescriptorSetLayoutCreateFlagBits.DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR'
--
-- - #VUID-vkCmdPushDescriptorSetKHR-pDescriptorWrites-06494# For each
-- element i where @pDescriptorWrites@[i].@descriptorType@ is
-- 'Vulkan.Core10.Enums.DescriptorType.DESCRIPTOR_TYPE_SAMPLED_IMAGE',
-- 'Vulkan.Core10.Enums.DescriptorType.DESCRIPTOR_TYPE_STORAGE_IMAGE',
-- or
-- 'Vulkan.Core10.Enums.DescriptorType.DESCRIPTOR_TYPE_INPUT_ATTACHMENT',
-- @pDescriptorWrites@[i].@pImageInfo@ /must/ be a valid pointer to an
-- array of @pDescriptorWrites@[i].@descriptorCount@ valid
-- 'Vulkan.Core10.DescriptorSet.DescriptorImageInfo' structures
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdPushDescriptorSetKHR-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
-- 'Vulkan.Core10.Handles.CommandBuffer' handle
--
-- - #VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-parameter#
-- @pipelineBindPoint@ /must/ be a valid
-- 'Vulkan.Core10.Enums.PipelineBindPoint.PipelineBindPoint' value
--
-- - #VUID-vkCmdPushDescriptorSetKHR-layout-parameter# @layout@ /must/ be
-- a valid 'Vulkan.Core10.Handles.PipelineLayout' handle
--
-- - #VUID-vkCmdPushDescriptorSetKHR-pDescriptorWrites-parameter#
-- @pDescriptorWrites@ /must/ be a valid pointer to an array of
-- @descriptorWriteCount@ valid
-- 'Vulkan.Core10.DescriptorSet.WriteDescriptorSet' structures
--
-- - #VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool# The
-- 'Vulkan.Core10.Handles.CommandPool' that @commandBuffer@ was
-- allocated from /must/ support graphics, or compute operations
--
-- - #VUID-vkCmdPushDescriptorSetKHR-descriptorWriteCount-arraylength#
-- @descriptorWriteCount@ /must/ be greater than @0@
--
-- - #VUID-vkCmdPushDescriptorSetKHR-commonparent# Both of
-- @commandBuffer@, and @layout@ /must/ have been created, allocated,
-- or retrieved from the same 'Vulkan.Core10.Handles.Device'
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
-- - Host access to the 'Vulkan.Core10.Handles.CommandPool' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
-- | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types> |
-- +============================================================================================================================+========================================================================================================================+=======================================================================================================================+
-- | Primary | Both | Graphics |
-- | Secondary | | Compute |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_push_descriptor VK_KHR_push_descriptor>,
-- 'Vulkan.Core10.Handles.CommandBuffer',
-- 'Vulkan.Core10.Enums.PipelineBindPoint.PipelineBindPoint',
-- 'Vulkan.Core10.Handles.PipelineLayout',
-- 'Vulkan.Core10.DescriptorSet.WriteDescriptorSet'
cmdPushDescriptorSetKHR :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer that the descriptors will be
-- recorded in.
CommandBuffer
-> -- | @pipelineBindPoint@ is a
-- 'Vulkan.Core10.Enums.PipelineBindPoint.PipelineBindPoint' indicating the
-- type of the pipeline that will use the descriptors. There is a separate
-- set of push descriptor bindings for each pipeline type, so binding one
-- does not disturb the others.
PipelineBindPoint
-> -- | @layout@ is a 'Vulkan.Core10.Handles.PipelineLayout' object used to
-- program the bindings.
PipelineLayout
-> -- | @set@ is the set number of the descriptor set in the pipeline layout
-- that will be updated.
("set" ::: Word32)
-> -- | @pDescriptorWrites@ is a pointer to an array of
-- 'Vulkan.Core10.DescriptorSet.WriteDescriptorSet' structures describing
-- the descriptors to be updated.
("descriptorWrites" ::: Vector (SomeStruct WriteDescriptorSet))
-> io ()
cmdPushDescriptorSetKHR commandBuffer pipelineBindPoint layout set descriptorWrites = liftIO . evalContT $ do
let vkCmdPushDescriptorSetKHRPtr = pVkCmdPushDescriptorSetKHR (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
lift $ unless (vkCmdPushDescriptorSetKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdPushDescriptorSetKHR is null" Nothing Nothing
let vkCmdPushDescriptorSetKHR' = mkVkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHRPtr
pPDescriptorWrites <- ContT $ allocaBytes @(WriteDescriptorSet _) ((Data.Vector.length (descriptorWrites)) * 64)
Data.Vector.imapM_ (\i e -> ContT $ pokeSomeCStruct (forgetExtensions (pPDescriptorWrites `plusPtr` (64 * (i)) :: Ptr (WriteDescriptorSet _))) (e) . ($ ())) (descriptorWrites)
lift $ traceAroundEvent "vkCmdPushDescriptorSetKHR" (vkCmdPushDescriptorSetKHR' (commandBufferHandle (commandBuffer)) (pipelineBindPoint) (layout) (set) ((fromIntegral (Data.Vector.length $ (descriptorWrites)) :: Word32)) (forgetExtensions (pPDescriptorWrites)))
pure $ ()
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdPushDescriptorSetWithTemplateKHR
:: FunPtr (Ptr CommandBuffer_T -> DescriptorUpdateTemplate -> PipelineLayout -> Word32 -> Ptr () -> IO ()) -> Ptr CommandBuffer_T -> DescriptorUpdateTemplate -> PipelineLayout -> Word32 -> Ptr () -> IO ()
-- | vkCmdPushDescriptorSetWithTemplateKHR - Pushes descriptor updates into a
-- command buffer using a descriptor update template
--
-- == Valid Usage
--
-- - #VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366# The
-- @pipelineBindPoint@ specified during the creation of the descriptor
-- update template /must/ be supported by the @commandBuffer@’s parent
-- 'Vulkan.Core10.Handles.CommandPool'’s queue family
--
-- - #VUID-vkCmdPushDescriptorSetWithTemplateKHR-pData-01686# @pData@
-- /must/ be a valid pointer to a memory containing one or more valid
-- instances of 'Vulkan.Core10.DescriptorSet.DescriptorImageInfo',
-- 'Vulkan.Core10.DescriptorSet.DescriptorBufferInfo', or
-- 'Vulkan.Core10.Handles.BufferView' in a layout defined by
-- @descriptorUpdateTemplate@ when it was created with
-- 'Vulkan.Core11.Promoted_From_VK_KHR_descriptor_update_template.createDescriptorUpdateTemplate'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
-- 'Vulkan.Core10.Handles.CommandBuffer' handle
--
-- - #VUID-vkCmdPushDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parameter#
-- @descriptorUpdateTemplate@ /must/ be a valid
-- 'Vulkan.Core11.Handles.DescriptorUpdateTemplate' handle
--
-- - #VUID-vkCmdPushDescriptorSetWithTemplateKHR-layout-parameter#
-- @layout@ /must/ be a valid 'Vulkan.Core10.Handles.PipelineLayout'
-- handle
--
-- - #VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-cmdpool#
-- The 'Vulkan.Core10.Handles.CommandPool' that @commandBuffer@ was
-- allocated from /must/ support graphics, or compute operations
--
-- - #VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent# Each of
-- @commandBuffer@, @descriptorUpdateTemplate@, and @layout@ /must/
-- have been created, allocated, or retrieved from the same
-- 'Vulkan.Core10.Handles.Device'
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
-- - Host access to the 'Vulkan.Core10.Handles.CommandPool' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
-- | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types> |
-- +============================================================================================================================+========================================================================================================================+=======================================================================================================================+
-- | Primary | Both | Graphics |
-- | Secondary | | Compute |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
--
-- __API example__
--
-- > struct AppDataStructure
-- > {
-- > VkDescriptorImageInfo imageInfo; // a single image info
-- > // ... some more application related data
-- > };
-- >
-- > const VkDescriptorUpdateTemplateEntry descriptorUpdateTemplateEntries[] =
-- > {
-- > // binding to a single image descriptor
-- > {
-- > 0, // binding
-- > 0, // dstArrayElement
-- > 1, // descriptorCount
-- > VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, // descriptorType
-- > offsetof(AppDataStructure, imageInfo), // offset
-- > 0 // stride is not required if descriptorCount is 1
-- > }
-- > };
-- >
-- > // create a descriptor update template for push descriptor set updates
-- > const VkDescriptorUpdateTemplateCreateInfo createInfo =
-- > {
-- > VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, // sType
-- > NULL, // pNext
-- > 0, // flags
-- > 1, // descriptorUpdateEntryCount
-- > descriptorUpdateTemplateEntries, // pDescriptorUpdateEntries
-- > VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, // templateType
-- > 0, // descriptorSetLayout, ignored by given templateType
-- > VK_PIPELINE_BIND_POINT_GRAPHICS, // pipelineBindPoint
-- > myPipelineLayout, // pipelineLayout
-- > 0, // set
-- > };
-- >
-- > VkDescriptorUpdateTemplate myDescriptorUpdateTemplate;
-- > myResult = vkCreateDescriptorUpdateTemplate(
-- > myDevice,
-- > &createInfo,
-- > NULL,
-- > &myDescriptorUpdateTemplate);
-- >
-- > AppDataStructure appData;
-- > // fill appData here or cache it in your engine
-- > vkCmdPushDescriptorSetWithTemplateKHR(myCmdBuffer, myDescriptorUpdateTemplate, myPipelineLayout, 0,&appData);
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_descriptor_update_template VK_KHR_descriptor_update_template>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_push_descriptor VK_KHR_push_descriptor>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_1 VK_VERSION_1_1>,
-- 'Vulkan.Core10.Handles.CommandBuffer',
-- 'Vulkan.Core11.Handles.DescriptorUpdateTemplate',
-- 'Vulkan.Core10.Handles.PipelineLayout'
cmdPushDescriptorSetWithTemplateKHR :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer that the descriptors will be
-- recorded in.
CommandBuffer
-> -- | @descriptorUpdateTemplate@ is a descriptor update template defining how
-- to interpret the descriptor information in @pData@.
DescriptorUpdateTemplate
-> -- | @layout@ is a 'Vulkan.Core10.Handles.PipelineLayout' object used to
-- program the bindings. It /must/ be compatible with the layout used to
-- create the @descriptorUpdateTemplate@ handle.
PipelineLayout
-> -- | @set@ is the set number of the descriptor set in the pipeline layout
-- that will be updated. This /must/ be the same number used to create the
-- @descriptorUpdateTemplate@ handle.
("set" ::: Word32)
-> -- | @pData@ is a pointer to memory containing descriptors for the templated
-- update.
("data" ::: Ptr ())
-> io ()
cmdPushDescriptorSetWithTemplateKHR commandBuffer descriptorUpdateTemplate layout set data' = liftIO $ do
let vkCmdPushDescriptorSetWithTemplateKHRPtr = pVkCmdPushDescriptorSetWithTemplateKHR (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
unless (vkCmdPushDescriptorSetWithTemplateKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdPushDescriptorSetWithTemplateKHR is null" Nothing Nothing
let vkCmdPushDescriptorSetWithTemplateKHR' = mkVkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHRPtr
traceAroundEvent "vkCmdPushDescriptorSetWithTemplateKHR" (vkCmdPushDescriptorSetWithTemplateKHR' (commandBufferHandle (commandBuffer)) (descriptorUpdateTemplate) (layout) (set) (data'))
pure $ ()
-- | VkPhysicalDevicePushDescriptorPropertiesKHR - Structure describing push
-- descriptor limits that can be supported by an implementation
--
-- = Description
--
-- If the 'PhysicalDevicePushDescriptorPropertiesKHR' structure is included
-- in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2',
-- it is filled in with each corresponding implementation-dependent
-- property.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_push_descriptor VK_KHR_push_descriptor>,
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorPropertiesKHR
{ -- | #limits-maxPushDescriptors# @maxPushDescriptors@ is the maximum number
-- of descriptors that /can/ be used in a descriptor set created with
-- 'Vulkan.Core10.Enums.DescriptorSetLayoutCreateFlagBits.DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR'
-- set.
maxPushDescriptors :: Word32 }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDevicePushDescriptorPropertiesKHR)
#endif
deriving instance Show PhysicalDevicePushDescriptorPropertiesKHR
instance ToCStruct PhysicalDevicePushDescriptorPropertiesKHR where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDevicePushDescriptorPropertiesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word32)) (maxPushDescriptors)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word32)) (zero)
f
instance FromCStruct PhysicalDevicePushDescriptorPropertiesKHR where
peekCStruct p = do
maxPushDescriptors <- peek @Word32 ((p `plusPtr` 16 :: Ptr Word32))
pure $ PhysicalDevicePushDescriptorPropertiesKHR
maxPushDescriptors
instance Storable PhysicalDevicePushDescriptorPropertiesKHR where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDevicePushDescriptorPropertiesKHR where
zero = PhysicalDevicePushDescriptorPropertiesKHR
zero
type KHR_PUSH_DESCRIPTOR_SPEC_VERSION = 2
-- No documentation found for TopLevel "VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION"
pattern KHR_PUSH_DESCRIPTOR_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_PUSH_DESCRIPTOR_SPEC_VERSION = 2
type KHR_PUSH_DESCRIPTOR_EXTENSION_NAME = "VK_KHR_push_descriptor"
-- No documentation found for TopLevel "VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME"
pattern KHR_PUSH_DESCRIPTOR_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_PUSH_DESCRIPTOR_EXTENSION_NAME = "VK_KHR_push_descriptor"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_KHR_push_descriptor.hs | bsd-3-clause | 31,852 | 0 | 20 | 6,849 | 2,424 | 1,543 | 881 | -1 | -1 |
{-# LANGUAGE ForeignFunctionInterface #-}
module OIS.OISObject(
object_delete,
object_type,
object_buffered,
object_getCreator,
object_setBuffered,
object_capture,
object_getID,
object_queryInterface,
object__initialize
)
where
import OIS.Types
import Control.Monad
import Foreign
import Foreign.C.String
import Foreign.C.Types
foreign import ccall "OISObject.h OIS_Object_delete" c_object_delete :: Object -> IO ()
object_delete :: Object -> IO ()
object_delete p1 = c_object_delete p1
foreign import ccall "OISObject.h OIS_Object_type" c_object_type :: Object -> IO CInt
object_type :: Object -> IO Type
object_type p1 = liftM cintToType $ c_object_type p1
foreign import ccall "OISObject.h OIS_Object_buffered" c_object_buffered :: Object -> IO CBool
object_buffered :: Object -> IO Bool
object_buffered p1 = liftM toBool $ c_object_buffered p1
foreign import ccall "OISObject.h OIS_Object_getCreator" c_object_getCreator :: Object -> IO InputManager
object_getCreator :: Object -> IO InputManager
object_getCreator p1 = c_object_getCreator p1
foreign import ccall "OISObject.h OIS_Object_setBuffered" c_object_setBuffered :: Object -> CBool -> IO ()
object_setBuffered :: Object -> Bool -> IO ()
object_setBuffered p1 p2 = c_object_setBuffered p1 (fromBool p2)
foreign import ccall "OISObject.h OIS_Object_capture" c_object_capture :: Object -> IO ()
object_capture :: Object -> IO ()
object_capture p1 = c_object_capture p1
foreign import ccall "OISObject.h OIS_Object_getID" c_object_getID :: Object -> IO CInt
object_getID :: Object -> IO Int
object_getID p1 = liftM fromIntegral $ c_object_getID p1
foreign import ccall "OISObject.h OIS_Object_queryInterface" c_object_queryInterface :: Object -> CInt -> IO Interface
object_queryInterface :: Object -> IType -> IO Interface
object_queryInterface p1 p2 = c_object_queryInterface p1 (iTypeToCInt p2)
foreign import ccall "OISObject.h OIS_Object__initialize" c_object__initialize :: Object -> IO ()
object__initialize :: Object -> IO ()
object__initialize p1 = c_object__initialize p1
| ghorn/hois | OIS/OISObject.hs | bsd-3-clause | 2,080 | 0 | 9 | 287 | 512 | 264 | 248 | 43 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
module Types where
import Control.Error (ExceptT)
import Control.Lens ((^.))
import Control.Lens.TH (makeLenses)
import Control.Monad.Reader (ask)
import Control.Monad.Trans (lift, MonadIO(..))
import Control.Monad.Trans.Reader (ReaderT)
import Data.Aeson (FromJSON(..), ToJSON(..),
object, (.=), (.:))
import Data.Aeson.Types (Value(..), typeMismatch)
import Data.Aeson.TH (deriveJSON, deriveFromJSON
,defaultOptions)
import Data.CaseInsensitive
import qualified Data.Configurator as C
import qualified Data.Configurator.Types as C
import Data.Profunctor.Product (p3, p4)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.Text (Text)
import Data.Singletons (Sing)
import Data.Singletons.TH (genSingletons)
import Database.PostgreSQL.Simple (Connection, ConnectInfo(..),
connect)
import Database.PostgreSQL.Simple.Internal (newNullConnection)
import Opaleye (Column, Table(Table),
required, optional,
PGInt4, PGText,
PGCitext)
import Opaleye.Internal.RunQuery (QueryRunnerColumnDefault(..))
import qualified Opaleye.PGTypes as P
import Servant (ServantErr)
import System.IO (FilePath)
import Web.HttpApiData (FromHttpApiData)
import Orphans ()
--------------------------------------------------------------------------------
-- | Environment
--------------------------------------------------------------------------------
data Environment =
Development
| Test
deriving (Eq, Show, Read)
mkEnv :: C.Config -> IO Environment
mkEnv cfg = read <$> C.require cfg "Environment"
--------------------------------------------------------------------------------
-- | Misc
--------------------------------------------------------------------------------
type f ~> g = forall a. f a -> g a
type Handler = ExceptT ServantErr IO
--------------------------------------------------------------------------------
-- | CRUD types
--------------------------------------------------------------------------------
data CrudType = CrudUser
| CrudMedia
genSingletons [''CrudType]
type family ReadData (c :: CrudType) :: *
type family BaseData (c :: CrudType) :: *
type family NewData (c :: CrudType) :: *
class PGEntity e where
type WriteRow e :: *
toPG :: e -> WriteRow e
class Example (k :: CrudType) where
exampleId :: Sing k -> ReadData k
exampleBase :: Sing k -> BaseData k
exampleNew :: Sing k -> NewData k
--------------------------------------------------------------------------------
-- | User
--------------------------------------------------------------------------------
data NewUser = NewUser
{ _newUserUsername :: CI Text
, _newUserEmail :: CI Text
} deriving (Eq, Show)
makeLenses ''NewUser
instance FromJSON NewUser where
parseJSON (Object v) =
NewUser <$> v .: "userUsername"
<*> v .: "userEmail"
parseJSON v = typeMismatch "NewUser" v
newtype UserId = UserId { _getUserId :: Int }
deriving (Eq, Show, FromJSON, ToJSON, FromHttpApiData, QueryRunnerColumnDefault PGInt4)
makeLenses ''UserId
data User' a b c = User'
{ _userId :: a
, _userUsername :: b
, _userEmail :: c
}
makeLenses ''User'
type User = User' UserId (CI Text) (CI Text)
-- $(deriveJSON defaultOptions ''User' UserId (CI Text) (CI Text))
instance FromJSON User where
parseJSON (Object v) =
User' <$> v .: "userId"
<*> v .: "userUsername"
<*> v .: "userEmail"
parseJSON v = typeMismatch "User" v
instance ToJSON User where
toJSON usr = object [ "userId" .= (usr ^. userId)
, "userUsername" .= (usr ^. userUsername)
, "userEmail" .= (usr ^. userEmail)
]
type NewUserRow = User' (Maybe (Column PGInt4)) (Column PGCitext) (Column PGCitext)
type UserRow = User' (Column PGInt4) (Column PGCitext) (Column PGCitext)
$(makeAdaptorAndInstance "pUser" ''User')
instance PGEntity NewUser where
type WriteRow NewUser = NewUserRow
toPG nUsr = let name = P.pgCiStrictText $ nUsr ^. newUserUsername
email = P.pgCiStrictText $ nUsr ^. newUserEmail
in User' Nothing name email
instance PGEntity User where
type WriteRow User = NewUserRow
toPG usr = let uId = Just $ P.pgInt4 $ usr ^. userId ^. getUserId
name = P.pgCiStrictText $ usr ^. userUsername
email = P.pgCiStrictText $ usr ^. userEmail
in User' uId name email
instance Example 'CrudUser where
exampleId _ = UserId 1
exampleBase _ = User' (UserId 1) (mk "ali") (mk "ali@gmail.com")
exampleNew _ = NewUser (mk "ali") (mk "ali@gmail.com")
userTable :: Table NewUserRow UserRow
userTable = Table "users" (pUser User' { _userId = optional "id"
, _userUsername = required "username"
, _userEmail = required "email" })
type instance ReadData 'CrudUser = UserId
type instance BaseData 'CrudUser = User
type instance NewData 'CrudUser = NewUser
----------------------------------------------------------------------------------
---- | Media
----------------------------------------------------------------------------------
--data NewMedia = NewMedia
-- { _newMediaOwner :: UserId
-- , _newMediaCaption :: CI Text
-- , _newMediaRef :: Text
-- }
--
--makeLenses ''NewMedia
---- $(deriveFromJSON defaultOptions ''NewMedia)
--
--newtype MediaId = MediaId {_getMediaId :: Int}
-- deriving (Eq, Show, FromJSON, ToJSON, FromHttpApiData, QueryRunnerColumnDefault PGInt4)
--
--makeLenses ''MediaId
--
--data Media' a b c d = Media'
-- { _mediaId :: a
-- , _mediaOwner :: b
-- , _mediaCaption :: c
-- , _mediaRef :: d
-- }
--
--makeLenses ''Media'
---- $(deriveJSON defaultOptions ''Media')
--
--type Media = Media' MediaId UserId (CI Text) Text
--type NewMediaColumn = Media' (Maybe (Column PGInt4)) (Column PGInt4) (Column PGCitext) (Column PGText)
--type MediaColumn = Media' (Column PGInt4) (Column PGInt4) (Column PGCitext) (Column PGText)
--
-- $(makeAdaptorAndInstance "pMedia" ''Media')
--
--mediaTable :: Table NewMediaColumn MediaColumn
--mediaTable = Table "media" (pMedia Media' { _mediaId = optional "id"
-- , _mediaOwner = required "owner_id"
-- , _mediaCaption = required "caption"
-- , _mediaRef = required "ref" })
--
--type instance ReadData 'CrudMedia = MediaId
--
--type instance BaseData 'CrudMedia = Media
--
--type instance NewData 'CrudMedia = NewMedia
--instance PGEntity MediaCrud where
-- toPG nMed = let owner = P.pgInt4 $ nMed ^. newMediaOwner ^. getUserId
-- cap = P.pgCiStrictText $ nMed ^. newMediaCaption
-- ref = P.pgStrictText $ nMed ^. newMediaRef
-- in Media' Nothing owner cap ref
--
--instance PGEntity Media MediaColumn where
-- toPG med = let mId = P.pgInt4 $ med ^. mediaId ^. getMediaId
-- owner = P.pgInt4 $ med ^. mediaOwner ^. getUserId
-- cap = P.pgCiStrictText $ med ^. mediaCaption
-- ref = P.pgStrictText $ med ^. mediaRef
-- in Media' mId owner cap ref
--------------------------------------------------------------------------------
-- | App Config
--------------------------------------------------------------------------------
data AppEnv = AppEnv
{ _pgConnection :: Connection
, _appEnv :: Environment
}
makeLenses ''AppEnv
mkPG :: C.Config -> IO Connection
mkPG cfg = do
host <- C.lookup cfg "host"
port <- C.lookup cfg "port"
user <- C.lookup cfg "user"
pwd <- C.lookup cfg "password"
db <- C.lookup cfg "db"
let info = ConnectInfo <$> host <*> port <*> user <*> pwd <*> db
case info of
Nothing -> newNullConnection
Just info -> connect info
mkAppEnv :: FilePath -> IO AppEnv
mkAppEnv fp = do
conf <- C.load [ C.Required fp ]
appE <- mkEnv $ C.subconfig "env" conf
pgConn <- mkPG $ C.subconfig "pg" conf
return $ AppEnv pgConn appE
| martyall/kafaka-test | src/Types.hs | bsd-3-clause | 9,181 | 0 | 14 | 2,415 | 1,683 | 954 | 729 | 143 | 2 |
{-# LANGUAGE
FlexibleInstances
, NoMonomorphismRestriction
, TypeFamilies
, UndecidableInstances
#-}
module Data.CRDT.Classes where
import Prelude hiding (null)
import Algebra.Enumerable (Enumerable(..))
import Control.Arrow ((***))
import qualified Data.Set as S
import qualified Data.IntSet as IS
import qualified Data.Map as M
import qualified Data.IntMap as IM
-- type SetLike a = (Function a, Codomain a ~ Bool)
member, notMember :: (Function a, Codomain a ~ Bool) => Domain a -> a -> Bool
member = flip value
notMember x = not . member x
add = (`update` True)
delete = (`update` zero)
compose :: (Function f, Function (g b), Domain f ~ b, Functor g)
=> f -> g b -> g (Codomain f)
compose = fmap . value
class Function a where
type Domain a :: *
type Codomain a :: *
value :: a -> Domain a -> Codomain a
instance Function (k -> v) where
type Domain (k -> v) = k
type Codomain (k -> v) = v
value = ($)
instance Ord a => Function (S.Set a) where
type Domain (S.Set a) = a
type Codomain (S.Set a) = Bool
value = flip S.member
instance Function IS.IntSet where
type Domain IS.IntSet = Int
type Codomain IS.IntSet = Bool
value = flip IS.member
instance Ord k => Function (M.Map k a) where
type Domain (M.Map k a) = k
type Codomain (M.Map k a) = Maybe a
value = flip M.lookup
instance Function (IM.IntMap a) where
type Domain (IM.IntMap a) = Int
type Codomain (IM.IntMap a) = Maybe a
value = flip IM.lookup
instance (Function a, Function b) => Function (a, b) where
type Domain (a, b) = ( Domain a, Domain b)
type Codomain (a, b) = (Codomain a, Codomain b)
value (f, g) = value f *** value g
class Function a => Update a where
update :: Domain a -> Codomain a -> a -> a
update x y = alter (const y) x
alter :: (Codomain a -> Codomain a) -> Domain a -> a -> a
alter f x s = update x (f $ value s x) s
instance Eq k => Update (k -> v) where
update k v f x
| k == x = v
| otherwise = f x
alter g k f x
| k == x = g $ f x
| otherwise = f x
instance Ord a => Update (S.Set a) where
update x True = S.insert x
update x False = S.delete x
instance Update IS.IntSet where
update x True = IS.insert x
update x False = IS.delete x
instance Ord k => Update (M.Map k a) where
update k (Just x) = M.insert k x
update k Nothing = M.delete k
alter = M.alter
instance Update (IM.IntMap a) where
update k (Just x) = IM.insert k x
update k Nothing = IM.delete k
alter = IM.alter
instance (Update a, Update b) => Update (a, b) where
update (x, x') (y, y') = update x y *** update x' y'
class Zero a where
zero :: a
null :: a -> Bool
clear :: a -> a
clear = const zero
instance Zero Bool where
zero = False
null = (==False)
instance (Enumerable k, Zero v) => Zero (k -> v) where
zero = zero
null f = not . any (null . f) $ universe
instance Zero (S.Set a) where
zero = S.empty
null = S.null
instance Zero IS.IntSet where
zero = IS.empty
null = IS.null
instance Zero (M.Map k a) where
zero = M.empty
null = M.null
instance Zero (IM.IntMap a) where
zero = IM.empty
null = IM.null
instance (Zero a, Zero b) => Zero (a, b) where
zero = (zero, zero)
null (a, b) = null a && null b
class Size a where
size :: Integral i => a -> i
instance (Enumerable k, Zero v) => Size (k -> v) where
size f = fromIntegral . length . filter (not . null . f) $ universe
instance Size (S.Set a) where
size = fromIntegral . S.size
instance Size IS.IntSet where
size = fromIntegral . IS.size
instance Size (M.Map k a) where
size = fromIntegral . M.size
instance Size (IM.IntMap a) where
size = fromIntegral . IM.size
{-
instance (Size a, Size b) => Zero (a, b) where
zero = (zero, zero)
null (a, b) = null a && null b
-}
{-
class (Function a, Function b, Domain a ~ Codomain b)
=> Composable a b where
type CompositionType a b
or
class ( Function r, Function a, Function b
, Domain r ~ Domain b, Codomain b ~ Domain a, Codomain a ~ Codomain r
) => Composable a b r where
compose :: a -> b -> CompositionType a b
instance (b ~ Codomain a) => Composable (b -> c) a (Domain a -> c) where
compose f g = f $ value g
instance Composable (b -> c) a (Domain a -> c) where
-} | mgsloan/crdt | src/Data/CRDT/Classes.hs | bsd-3-clause | 4,287 | 0 | 11 | 1,086 | 1,694 | 887 | 807 | 115 | 1 |
-------------------------------------------------------------------------------
-- |
-- Module : XMonad.Util.ClickableWorkspaces
-- Description : Make workspace tags clickable in XMobar (for switching focus).
-- Copyright : (c) Geoff deRosenroll <geoffderosenroll@gmail.com>
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Geoff deRosenroll <geoffderosenroll@gmail.com>
-- Stability : unstable
-- Portability : unportable
--
-- Provides @clickablePP@, which when applied to the 'PP' pretty-printer used
-- by "XMonad.Hooks.StatusBar" will make the workspace tags clickable in
-- XMobar (for switching focus).
--
-----------------------------------------------------------------------------
module XMonad.Util.ClickableWorkspaces (
-- * Usage
-- $usage
clickablePP,
clickableWrap,
) where
import XMonad.Prelude ((<&>), (>=>))
import XMonad
import XMonad.Hooks.StatusBar.PP (xmobarAction, PP(..))
import XMonad.Util.WorkspaceCompare (getSortByIndex)
import qualified XMonad.StackSet as W
import Data.List (elemIndex)
-- $usage
-- If you're using the "XMonad.Hooks.StatusBar" interface, apply 'clickablePP'
-- to the 'PP' passed to 'XMonad.Hooks.StatusBar.statusBarProp':
--
-- > mySB <- statusBarProp "xmobar" (clickablePP xmobarPP)
--
-- Or if you're using the old "XMonad.Hooks.DynamicLog" interface:
--
-- > logHook = clickablePP xmobarPP { ... } >>= dynamicLogWithPP
--
-- Requirements:
--
-- * @xdotool@ on system (in path)
-- * "XMonad.Hooks.EwmhDesktops" for @xdotool@ support (see Hackage docs for setup)
-- * use of UnsafeStdinReader\/UnsafeXMonadLog in xmobarrc (rather than StdinReader\/XMonadLog)
--
-- Note that UnsafeStdinReader is potentially dangerous if your workspace
-- names are dynamically generated from untrusted input (like window titles).
-- You may need to add @xmobarRaw@ to 'ppRename' before applying
-- 'clickablePP' in such case.
-- | Wrap string with an xmobar action that uses @xdotool@ to switch to
-- workspace @i@.
clickableWrap :: Int -> String -> String
clickableWrap i = xmobarAction ("xdotool set_desktop " ++ show i) "1"
-- | 'XMonad.Util.WorkspaceCompare.getWsIndex' extended to handle workspaces
-- not in the static 'workspaces' config, such as those created by
-- "XMonad.Action.DynamicWorkspaces".
--
-- Uses 'getSortByIndex', as that's what "XMonad.Hooks.EwmhDesktops" uses to
-- export the information to tools like @xdotool@. (Note that EwmhDesktops can
-- be configured with a custom sort function, and we don't handle that here
-- yet.)
getWsIndex :: X (WorkspaceId -> Maybe Int)
getWsIndex = do
wSort <- getSortByIndex
spaces <- gets (map W.tag . wSort . W.workspaces . windowset)
return $ flip elemIndex spaces
-- | Return a function that wraps workspace names in an xmobar action that
-- switches to that workspace.
--
-- This assumes that 'XMonad.Hooks.EwmhDesktops.ewmhDesktopsEventHook'
-- isn't configured to change the workspace order. We might need to add an
-- additional parameter if anyone needs that.
getClickable :: X (String -> WindowSpace -> String)
getClickable = getWsIndex <&> \idx s w -> maybe id clickableWrap (idx (W.tag w)) s
-- | Apply clickable wrapping to the given PP.
clickablePP :: PP -> X PP
clickablePP pp = getClickable <&> \ren -> pp{ ppRename = ppRename pp >=> ren }
| xmonad/xmonad-contrib | XMonad/Util/ClickableWorkspaces.hs | bsd-3-clause | 3,314 | 0 | 14 | 506 | 353 | 219 | 134 | 20 | 1 |
module BaristaSpec where
import Data.Aeson (Value(..), object, (.=))
import Test.Hspec
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Barista (app)
spec :: Spec
spec = with app $ do
describe "GET /test" $ do
it "responds with 200" $ do
get "/test" `shouldRespondWith` 200
it "responds with 'hello'" $ do
get "/test" `shouldRespondWith` "success"
it "responds with 200 / 'success'" $ do
get "/test" `shouldRespondWith` "success" { matchStatus = 200 }
it "has 'Content-Type: text/plain; charset=utf-8'" $ do
get "/test" `shouldRespondWith` 200 { matchHeaders = ["Content-Type" <:> "text/plain; charset=utf-8"] }
describe "GET /todays-filter/1000" $ do
it "responds with some JSON" $ do
get "/todays-filter/1000" `shouldRespondWith` 200 { matchHeaders = ["Content-Type" <:> "application/json; charset=utf-8"] }
| semaj/espresso | test/BaristaSpec.hs | bsd-3-clause | 878 | 0 | 18 | 172 | 237 | 125 | 112 | 20 | 1 |
{- |
Module : Graphics.XDot.Viewer
Copyright : (c) Dennis Felsing
License : 3-Clause BSD-style
Maintainer : dennis@felsin9.de
This module draws the operations of an xdot graph using Cairo and Pango on a
Gtk canvas.
-}
module Graphics.XDot.Viewer (
drawAll
)
where
import Graphics.XDot.Types hiding (w, h, filled, alignment, text, name, size)
import Graphics.UI.Gtk (PangoRectangle(..), layoutSetFontDescription,
layoutGetExtents, layoutContextChanged, fontDescriptionFromString,
fontDescriptionSetSize, showLayout, cairoContextSetFontOptions,
cairoContextGetFontOptions, layoutGetContext, createLayout)
import Graphics.Rendering.Cairo hiding (x, y)
import Control.Monad.State hiding (State)
import qualified Control.Monad.State as MS
type RGBA = (Double, Double, Double, Double)
data DState = DState
{ fontName :: String
, fontSize :: Double
, lineWidth :: Double
, lineStyle :: [Double]
, filledColor :: RGBA
, strokeColor :: RGBA
}
type DrawState a = MS.StateT DState Render a
-- | Draw an xdot graph, possibly highlighting a node.
drawAll :: Eq t =>
Object t -- ^ id of the node to highlight
-> Rectangle -- ^ dimensions of the graph, as returned by 'Graphics.XDot.Parser.getSize'
-> [(Object t, Operation)] -- ^ operations, as returned by 'Graphics.XDot.Parser.getOperations'
-> Render [(Object t, Rectangle)] -- ^ dimensions of the rendered nodes on the screen
drawAll hover (_,_,sw,sh) ops = do
let scalex = 1
scaley = -1
offsetx = -0.5 * sw
offsety = 0.5 * sh
save
translate offsetx offsety
scale scalex scaley
boundingBoxes <- evalStateT (mapM (draw hover) ops) $ DState "" 1 1 [] (1,1,1,1) (0,0,0,1)
restore
return
$ map (\(o, (x,y,w,h)) -> (o, (x*scalex+offsetx,y*scaley+offsety,w,h)))
$ concat boundingBoxes
stylizedDraw :: Eq t => Bool -> Object t -> Object t -> Render a -> DrawState ()
stylizedDraw filled mn hover renderOps = do
(rf,gf,bf,af) <- gets filledColor
(rs,gs,bs,as) <- gets strokeColor
lWidth <- gets lineWidth
lStyle <- gets lineStyle
lift $ do
when filled $ do
if mn /= None && mn == hover
then setSourceRGBA 1 0.8 0.8 1
else setSourceRGBA rf gf bf af
save
renderOps
restore
fillPreserve
fill
setLineWidth lWidth
setDash lStyle 0
if mn /= None && mn == hover
then setSourceRGBA 1 0 0 1
else setSourceRGBA rs gs bs as
save
renderOps
restore
stroke
draw :: Eq t => Object t -> (Object t, Operation) -> DrawState [(Object t, Rectangle)]
draw hover (mn, Ellipse (x,y) w h filled) = do
stylizedDraw filled hover mn $ do
translate x y
scale w h
moveTo 1 0
arc 0 0 1 0 (2 * pi)
return $ case mn of
None -> []
o -> [(o, (x - w, y + h, 2 * w, 2 * h))]
draw hover (mn, Polygon a@((x,y):xys) filled) = do
stylizedDraw filled hover mn $ do
moveTo x y
mapM_ (uncurry lineTo) xys
closePath
let xs = x : map fst a
let ys = y : map snd a
return $ case mn of
None -> []
o -> [(o, (minimum xs, maximum ys, maximum xs - minimum xs, maximum ys - minimum ys))]
draw _ (_, Polygon [] _) = return []
draw hover (mn, Polyline a@((x,y):xys)) = do
stylizedDraw False hover mn $ do
moveTo x y
mapM_ (uncurry lineTo) xys
let xs = x : map fst a
let ys = y : map snd a
return $ case mn of
None -> []
o -> [(o, (minimum xs, maximum ys, maximum xs - minimum xs, maximum ys - minimum ys))]
draw _ (_, Polyline []) = return []
draw hover (mn, BSpline ((x,y):xys) filled) = do
stylizedDraw filled hover mn $ do
moveTo x y
drawBezier xys
return $ case mn of
None -> []
o -> [ (o, (x - 15, y + 15, 30, 30))
, (o, (xe - 15, ye + 15, 30, 30))
]
where drawBezier ((x1,y1):(x2,y2):(x3,y3):xys2) = do
curveTo x1 y1 x2 y2 x3 y3
drawBezier xys2
drawBezier _ = return ()
(xe,ye) = last xys
draw _ (_, BSpline [] _) = return []
draw hover (mn, Text (x,y) alignment w text) = do
fontName' <- gets fontName
fontSize' <- gets fontSize
layout <- lift $ createLayout text
context <- liftIO $ layoutGetContext layout
fo <- liftIO $ cairoContextGetFontOptions context
fontOptionsSetAntialias fo AntialiasDefault
fontOptionsSetHintStyle fo HintStyleNone
fontOptionsSetHintMetrics fo HintMetricsOff
liftIO $ cairoContextSetFontOptions context fo
liftIO $ layoutContextChanged layout
-- This does not work with "Times Roman", but it works with a font that is
-- installed on the system
--font <- liftIO fontDescriptionNew
--liftIO $ fontDescriptionSetFamily font "Nimbus Roman No9 L, Regular"
--liftIO $ fontDescriptionSetFamily font "Times Roman"
--liftIO $ fontDescriptionSetSize font fontSize'
-- Only fontDescriptionFromString works as expected, choosing a similar
-- alternative font when the selected one is not available
font <- liftIO $ fontDescriptionFromString fontName'
liftIO $ fontDescriptionSetSize font fontSize'
liftIO $ layoutSetFontDescription layout (Just font)
(_, PangoRectangle _ _ w2 h2) <- liftIO $ layoutGetExtents layout
let (f, w3, h3, descent) = if w2 > w
then (w / w2, w, h2 * w / w2, 4 * w / w2)
else (1, w2, h2, 4)
let x3 = case alignment of
LeftAlign -> x
CenterAlign -> x - 0.5 * w3
RightAlign -> x - w3
y3 = y + h3 - descent
stylizedDraw False hover mn $ do
moveTo x3 y3
scale f (-f)
showLayout layout
return $ case mn of
None -> []
o -> [(o, (x3, y3, w3, h3))]
draw _ (_, Color color filled) = do
modify (\s -> if filled
then s{filledColor = color}
else s{strokeColor = color})
return []
draw _ (_, Font size name) = do
modify (\s -> s{fontName = fixedName, fontSize = size})
return []
-- Pango does not like "Times-Roman", but works with "Times Roman".
-- Graphviz handles this in plugin/pango/gvtextlayout_pango.c
where fixedName = map fixName name
fixName '-' = ' '
fixName x = x
draw _ (_, Style x) = do
case x of -- TODO: Some styles missing
"solid" -> modify (\s -> s{lineStyle = []}) -- always on
"dashed" -> modify (\s -> s{lineStyle = [6,6]}) -- 6 pts on, 6 pts off
"dotted" -> modify (\s -> s{lineStyle = [2,4]}) -- 2 pts on, 4 pts off
_ -> return ()
return []
draw _ (_, Image{}) = return [] -- TODO
draw _ (_, FontCharacteristics{}) = return [] -- TODO
| FranklinChen/xdot | src/Graphics/XDot/Viewer.hs | bsd-3-clause | 6,518 | 0 | 16 | 1,672 | 2,418 | 1,256 | 1,162 | 157 | 15 |
module Main where
import Data.Maybe (fromMaybe)
import System.Environment (getEnv, lookupEnv)
import Config
import Api
import Db
main :: IO ()
main =
do
config <- loadConfig
let dbConfig = subconfig "db" config
connection <- createConnection dbConfig
migrate dbConfig connection
runApi config
| b0oh/heroku-docker-haskell-stack-example | app/Main.hs | isc | 318 | 0 | 10 | 66 | 96 | 49 | 47 | 14 | 1 |
{-| Contains miscellaneous utility functions such as functions for working with signals and signal generators. -}
module FRP.Helm.Utilities (
-- * Angles
radians,
degrees,
turns,
-- * Applying
(<|),
(|>),
) where
{-| Converts radians into the standard angle measurement (radians). -}
radians :: Double -> Double
radians n = n
{-| Converts degrees into the standard angle measurement (radians). -}
degrees :: Double -> Double
degrees n = n * pi / 180
{-| Converts turns into the standard angle measurement (radians).
Turns are essentially full revolutions of the unit circle. -}
turns :: Double -> Double
turns n = 2 * pi * n
{-| Forward function application, think of it as a inverted '($)'. Provided for easy porting from Elm. -}
(|>) :: a -> (a -> b) -> b
(|>) = flip ($)
{-| Exactly the same as '($)', only there to make code using '(|>)'
more consistent. -}
(<|) :: (a -> b) -> a -> b
(<|) = ($)
| didmar/helm | src/FRP/Helm/Utilities.hs | mit | 927 | 0 | 8 | 192 | 169 | 105 | 64 | 16 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE Arrows #-}
module Main where
import Opaleye
import Data.Profunctor.Product
import Data.Profunctor.Product.Default
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromField (FromField(..), returnError, ResultError(..), Conversion)
import Prelude hiding (id)
import Control.Arrow
data UserType = SuperAdmin | Admin | Registered deriving (Show)
newtype UserId = UserId Int deriving (Show)
data UserPoly id name email utype = User { id :: id, name :: name, email :: email, utype :: utype } deriving (Show)
type User = UserPoly UserId String String UserType
type UserPGW = UserPoly (Column PGInt4) (Column PGText) (Column PGText) (Column PGText)
type UserPGR = UserPoly (Column PGInt4) (Column PGText) (Column PGText) (Column PGText)
$(makeAdaptorAndInstance "pUser" ''UserPoly)
userTable :: Table UserPGW UserPGR
userTable = Table "typed_users" (pUser User {
id = required "id",
name = required "name",
email = required "email",
utype = required "user_type"
}
)
instance FromField UserId where
fromField field bs = UserId <$> fromField field bs
instance QueryRunnerColumnDefault PGInt4 UserId where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance FromField UserType where
fromField field bs = utConversion $ fromField field bs
where
utConversion :: Conversion String -> Conversion UserType
utConversion cString = do
typeString <- cString
case mkUserType typeString of
Nothing -> returnError ConversionFailed field "Unrecognized user type"
Just ut -> return ut
mkUserType :: String -> Maybe UserType
mkUserType "superadmin" = Just SuperAdmin
mkUserType "admin" = Just Admin
mkUserType "registered" = Just Registered
mkUserType _ = Nothing
instance QueryRunnerColumnDefault PGText UserType where
queryRunnerColumnDefault = fieldQueryRunnerColumn
getUserRows :: IO [User]
getUserRows = do
conn <- connect defaultConnectInfo { connectDatabase = "scratch"}
runQuery conn $ proc () ->
do
user <- queryTable userTable -< ()
returnA -< user
main :: IO ()
main = do
rows <- getUserRows
putStrLn $ show rows
| vacationlabs/haskell-webapps | doc/docs/opaleye/code/opaleye-enums-handling.hs | mit | 2,428 | 1 | 13 | 485 | 648 | 346 | 302 | 57 | 1 |
{- |
Module : ./Maude/Meta/AsSymbol.hs
Description : Viewing Maude data types as Symbols
Copyright : (c) Martin Kuehl, Uni Bremen 2008-2009
License : GPLv2 or higher, see LICENSE.txt
Maintainer : mkhl@informatik.uni-bremen.de
Stability : experimental
Portability : portable
Viewing Maude data types as Symbols.
Defines a type class 'AsSymbol' that lets us treat Maude data types as
'Symbol's, converting back and forth between them as needed.
Consider importing "Maude.Meta" instead of this module.
-}
module Maude.Meta.AsSymbol (
-- * The AsSymbol type class
AsSymbol (..),
-- * Auxiliary functions
asSymbolSet,
mapAsSymbol,
) where
import Maude.AS_Maude
import Maude.Symbol
import Maude.Meta.HasName
import Maude.Util
import Data.Maybe (fromJust)
import qualified Data.Set as Set
-- * The AsSymbol type class
{- | Represents something that can be converted into a 'Symbol'.
Instances should only override /one/ of its class methods: -}
--
{- * Use 'asSymbol' when every member of the instance type can be
represented as a 'Symbol'. -}
--
-- * Use 'asSymbolMaybe' otherwise.
--
-- Each function is defined in terms of the other one by default.
class AsSymbol a where
-- | Convert the input into a 'Symbol'.
asSymbol :: a -> Symbol
asSymbol = fromJust . asSymbolMaybe
-- | Convert the input into 'Maybe' a 'Symbol'
asSymbolMaybe :: a -> Maybe Symbol
asSymbolMaybe = Just . asSymbol
-- * Auxiliary functions
-- | Instead of a single 'Symbol', convert the input into a 'SymbolSet'.
asSymbolSet :: (AsSymbol a) => a -> SymbolSet
asSymbolSet = maybe Set.empty Set.singleton . asSymbolMaybe
{- | Apply a 'SymbolMap' to the input, then convert the result back to
the original type. -}
mapAsSymbol :: (AsSymbol a) => (Symbol -> a) -> SymbolMap -> a -> a
mapAsSymbol ctr mp item = let extract = ctr . mapAsFunction mp
in maybe item extract $ asSymbolMaybe item
-- * Predefined 'AsSymbol' instances
instance AsSymbol Symbol where
asSymbol = id
instance AsSymbol Type where
asSymbol typ = case typ of
TypeSort sort -> asSymbol sort
TypeKind kind -> asSymbol kind
instance AsSymbol Sort where
asSymbol = Sort . getName
instance AsSymbol Kind where
asSymbol = Kind . getName
instance AsSymbol LabelId where
asSymbol = Labl . getName
instance AsSymbol OpId where
asSymbol = OpWildcard . getName
instance AsSymbol StmntAttr where
asSymbolMaybe attr = case attr of
Label name -> Just $ Labl name
_ -> Nothing
instance AsSymbol Operator where
asSymbol (Op op dom cod _) = let
op' = getName op
dom' = map asSymbol dom
cod' = asSymbol cod
in Operator op' dom' cod'
instance AsSymbol Term where
asSymbolMaybe term = case term of
Const _ _ -> Nothing
Var _ _ -> Nothing
Apply op ts tp -> let
dom = map (asSymbol . getTermType) ts
cod = asSymbol tp
in Just $ Operator op dom cod
| spechub/Hets | Maude/Meta/AsSymbol.hs | gpl-2.0 | 3,030 | 0 | 15 | 733 | 569 | 300 | 269 | 52 | 1 |
main :: String -> ()
main "hoi" = ()
main "" = ()
main (_:_) = () | roberth/uu-helium | test/staticwarnings/Complete5.hs | gpl-3.0 | 69 | 0 | 7 | 20 | 49 | 25 | 24 | 4 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module Includes_Types where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
data Included = Included
{ included_MyIntField :: Int.Int64
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Included where
encode = encode_Included
decode = decode_Included
instance Hashable.Hashable Included where
hashWithSalt salt record = salt `Hashable.hashWithSalt` included_MyIntField record
instance DeepSeq.NFData Included where
rnf _record0 =
DeepSeq.rnf (included_MyIntField _record0) `seq`
()
instance Arbitrary.Arbitrary Included where
arbitrary = Monad.liftM Included (Arbitrary.arbitrary)
shrink obj | obj == default_Included = []
| otherwise = Maybe.catMaybes
[ if obj == default_Included{included_MyIntField = included_MyIntField obj} then Nothing else Just $ default_Included{included_MyIntField = included_MyIntField obj}
]
from_Included :: Included -> Types.ThriftVal
from_Included record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v3 -> Just (1, ("MyIntField",Types.TI64 _v3))) $ included_MyIntField record
]
write_Included :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Included -> IO ()
write_Included oprot record = Thrift.writeVal oprot $ from_Included record
encode_Included :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Included -> BS.ByteString
encode_Included oprot record = Thrift.serializeVal oprot $ from_Included record
to_Included :: Types.ThriftVal -> Included
to_Included (Types.TStruct fields) = Included{
included_MyIntField = maybe (included_MyIntField default_Included) (\(_,_val5) -> (case _val5 of {Types.TI64 _val6 -> _val6; _ -> error "wrong type"})) (Map.lookup (1) fields)
}
to_Included _ = error "not a struct"
read_Included :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Included
read_Included iprot = to_Included <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Included)
decode_Included :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Included
decode_Included iprot bs = to_Included $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Included) bs
typemap_Included :: Types.TypeMap
typemap_Included = Map.fromList [("MyIntField",(1,Types.T_I64))]
default_Included :: Included
default_Included = Included{
included_MyIntField = 0}
| sinjar666/fbthrift | thrift/compiler/test/fixtures/includes/gen-hs/Includes_Types.hs | apache-2.0 | 4,291 | 0 | 15 | 690 | 1,113 | 661 | 452 | 73 | 2 |
module GHC.Coercion where
import GHC.Kind
import GHC.Var
import GHC.Type
import Utilities
data Coercion = CoVarCo CoVarId
| ReflCo Type
| AppCo Coercion Coercion
| SymCo Coercion
| TransCo Coercion Coercion
| NthCo Int Coercion
| ForAllCo TyVar Coercion
| InstCo Coercion Type
| UnsafeCo Type Type -- Also used for instantiated axioms
deriving (Eq, Show)
instance Pretty Coercion where
pPrint _ = text "co" -- FIXME
mkCoercionType :: Type -> Type -> Type
mkCoercionType ty1 ty2 = mkTyConAppTy (eqHashTyCon (typeKind ty1)) [ty1, ty2]
splitCoercionType :: Type -> (Type, Type)
splitCoercionType ty = case splitTyConAppTy_maybe ty of
Just (tc, [ty1, ty2]) | tc == eqHashTyCon (typeKind ty1) -> (ty1, ty2)
_ -> error "splitCoercionType"
coVarIdType' :: CoVarId -> (Type, Type)
coVarIdType' = splitCoercionType . idType
coercionType :: Coercion -> Type
coercionType = uncurry mkCoercionType . coercionType'
coercionType' :: Coercion -> (Type, Type)
coercionType' (CoVarCo x) = coVarIdType' x
coercionType' (ReflCo ty) = (ty, ty)
coercionType' (AppCo co1 co2) = (ty1a `AppTy` ty2a, ty1b `AppTy` ty2b)
where (ty1a, ty1b) = coercionType' co1
(ty2a, ty2b) = coercionType' co2
coercionType' (SymCo co) = (ty2, ty1)
where (ty1, ty2) = coercionType' co
coercionType' (TransCo co1 co2) = (ty1a, ty2b)
where (ty1a, _ty1b) = coercionType' co1
(_ty2a, ty2b) = coercionType' co2
coercionType' (NthCo n co) = (f ty1, f ty2)
where (ty1, ty2) = coercionType' co
f ty = case splitTyConAppTy_maybe ty of
Just (_, tys) | n < length tys -> tys !! n
_ -> error "coercionType': NthCo"
coercionType' (ForAllCo a co) = (ForAllTy a ty1, ForAllTy a ty2)
where (ty1, ty2) = coercionType' co
coercionType' (InstCo co ty) = (instTy ty1 ty, instTy ty2 ty)
where (ty1, ty2) = coercionType' co
coercionType' (UnsafeCo ty1 ty2) = (ty1, ty2)
| beni55/cps-core | GHC/Coercion.hs | bsd-3-clause | 2,022 | 0 | 14 | 503 | 723 | 385 | 338 | 48 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- Load information on package sources
module Stack.Build.Source
( loadSourceMap
, SourceMap
, PackageSource (..)
, localFlags
, getLocalPackageViews
, loadLocalPackage
, parseTargetsFromBuildOpts
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Exception (assert, catch)
import Control.Monad
import Control.Monad.Catch (MonadCatch)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Resource
import Crypto.Hash (Digest, SHA256)
import Crypto.Hash.Conduit (sinkHash)
import qualified Data.ByteString as S
import Data.Byteable (toBytes)
import Data.Conduit (($$), ZipSink (..))
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Either
import Data.Function
import qualified Data.HashSet as HashSet
import Data.List
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Distribution.Package (pkgName, pkgVersion)
import Distribution.PackageDescription (GenericPackageDescription, package, packageDescription)
import qualified Distribution.PackageDescription as C
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO
import Prelude
import Stack.Build.Cache
import Stack.Build.Target
import Stack.BuildPlan (loadMiniBuildPlan, shadowMiniBuildPlan,
parseCustomMiniBuildPlan)
import Stack.Constants (wiredInPackages)
import Stack.Package
import Stack.PackageIndex
import Stack.Types
import System.Directory
import System.IO (withBinaryFile, IOMode (ReadMode))
import System.IO.Error (isDoesNotExistError)
loadSourceMap :: (MonadIO m, MonadCatch m, MonadReader env m, HasBuildConfig env, MonadBaseControl IO m, HasHttpManager env, MonadLogger m, HasEnvConfig env)
=> NeedTargets
-> BuildOpts
-> m ( Map PackageName SimpleTarget
, MiniBuildPlan
, [LocalPackage]
, Set PackageName -- non-local targets
, SourceMap
)
loadSourceMap needTargets bopts = do
bconfig <- asks getBuildConfig
rawLocals <- getLocalPackageViews
(mbp0, cliExtraDeps, targets) <- parseTargetsFromBuildOpts needTargets bopts
menv <- getMinimalEnvOverride
caches <- getPackageCaches menv
let latestVersion = Map.fromListWith max $ map toTuple $ Map.keys caches
-- Extend extra-deps to encompass targets requested on the command line
-- that are not in the snapshot.
extraDeps0 <- extendExtraDeps
(bcExtraDeps bconfig)
cliExtraDeps
(Map.keysSet $ Map.filter (== STUnknown) targets)
latestVersion
locals <- mapM (loadLocalPackage bopts targets) $ Map.toList rawLocals
checkFlagsUsed bopts locals extraDeps0 (mbpPackages mbp0)
let
-- loadLocals returns PackageName (foo) and PackageIdentifier (bar-1.2.3) targets separately;
-- here we combine them into nonLocalTargets. This is one of the
-- return values of this function.
nonLocalTargets :: Set PackageName
nonLocalTargets =
Map.keysSet $ Map.filter (not . isLocal) targets
where
isLocal (STLocalComps _) = True
isLocal STLocalAll = True
isLocal STUnknown = False
isLocal STNonLocal = False
shadowed = Map.keysSet rawLocals <> Map.keysSet extraDeps0
(mbp, extraDeps1) = shadowMiniBuildPlan mbp0 shadowed
-- Add the extra deps from the stack.yaml file to the deps grabbed from
-- the snapshot
extraDeps2 = Map.union
(Map.map (\v -> (v, Map.empty)) extraDeps0)
(Map.map (\mpi -> (mpiVersion mpi, mpiFlags mpi)) extraDeps1)
-- Overwrite any flag settings with those from the config file
extraDeps3 = Map.mapWithKey
(\n (v, f) -> PSUpstream v Local $
case ( Map.lookup (Just n) $ boptsFlags bopts
, Map.lookup Nothing $ boptsFlags bopts
, Map.lookup n $ bcFlags bconfig
) of
-- Didn't have any flag overrides, fall back to the flags
-- defined in the snapshot.
(Nothing, Nothing, Nothing) -> f
-- Either command line flag for this package, general
-- command line flag, or flag in stack.yaml is defined.
-- Take all of those and ignore the snapshot flags.
(x, y, z) -> Map.unions
[ fromMaybe Map.empty x
, fromMaybe Map.empty y
, fromMaybe Map.empty z
])
extraDeps2
let sourceMap = Map.unions
[ Map.fromList $ flip map locals $ \lp ->
let p = lpPackage lp
in (packageName p, PSLocal lp)
, extraDeps3
, flip fmap (mbpPackages mbp) $ \mpi ->
(PSUpstream (mpiVersion mpi) Snap (mpiFlags mpi))
] `Map.difference` Map.fromList (map (, ()) (HashSet.toList wiredInPackages))
return (targets, mbp, locals, nonLocalTargets, sourceMap)
-- | Use the build options and environment to parse targets.
parseTargetsFromBuildOpts
:: (MonadIO m, MonadCatch m, MonadReader env m, HasBuildConfig env, MonadBaseControl IO m, HasHttpManager env, MonadLogger m, HasEnvConfig env)
=> NeedTargets
-> BuildOpts
-> m (MiniBuildPlan, M.Map PackageName Version, M.Map PackageName SimpleTarget)
parseTargetsFromBuildOpts needTargets bopts = do
bconfig <- asks getBuildConfig
mbp0 <-
case bcResolver bconfig of
ResolverSnapshot snapName -> do
$logDebug $ "Checking resolver: " <> renderSnapName snapName
loadMiniBuildPlan snapName
ResolverCompiler _ -> do
-- We ignore the resolver version, as it might be
-- GhcMajorVersion, and we want the exact version
-- we're using.
version <- asks (envConfigCompilerVersion . getEnvConfig)
return MiniBuildPlan
{ mbpCompilerVersion = version
, mbpPackages = Map.empty
}
ResolverCustom _ url -> do
stackYamlFP <- asks $ bcStackYaml . getBuildConfig
parseCustomMiniBuildPlan stackYamlFP url
rawLocals <- getLocalPackageViews
workingDir <- getWorkingDir
let snapshot = mpiVersion <$> mbpPackages mbp0
flagExtraDeps <- convertSnapshotToExtra
snapshot
(bcExtraDeps bconfig)
(catMaybes $ Map.keys $ boptsFlags bopts)
(cliExtraDeps, targets) <-
parseTargets
needTargets
(bcImplicitGlobal bconfig)
snapshot
(flagExtraDeps <> bcExtraDeps bconfig)
(fst <$> rawLocals)
workingDir
(boptsTargets bopts)
return (mbp0, cliExtraDeps <> flagExtraDeps, targets)
-- | For every package in the snapshot which is referenced by a flag, give the
-- user a warning and then add it to extra-deps.
convertSnapshotToExtra
:: MonadLogger m
=> Map PackageName Version -- ^ snapshot
-> Map PackageName Version -- ^ extra-deps
-> [PackageName] -- ^ packages referenced by a flag
-> m (Map PackageName Version)
convertSnapshotToExtra snapshot extra0 flags0 =
go Map.empty flags0
where
go !extra [] = return extra
go extra (flag:flags)
| Just _ <- Map.lookup flag extra0 = go extra flags
| otherwise = case Map.lookup flag snapshot of
Nothing -> go extra flags
Just version -> do
$logWarn $ T.concat
[ "- Implicitly adding "
, T.pack $ packageNameString flag
, " to extra-deps based on command line flag"
]
go (Map.insert flag version extra) flags
-- | Parse out the local package views for the current project
getLocalPackageViews :: (MonadThrow m, MonadIO m, MonadReader env m, HasEnvConfig env)
=> m (Map PackageName (LocalPackageView, GenericPackageDescription))
getLocalPackageViews = do
econfig <- asks getEnvConfig
locals <- forM (Map.toList $ envConfigPackages econfig) $ \(dir, validWanted) -> do
cabalfp <- getCabalFileName dir
gpkg <- readPackageUnresolved cabalfp
let cabalID = package $ packageDescription gpkg
name = fromCabalPackageName $ pkgName $ cabalID
checkCabalFileName name cabalfp
let lpv = LocalPackageView
{ lpvVersion = fromCabalVersion $ pkgVersion cabalID
, lpvRoot = dir
, lpvCabalFP = cabalfp
, lpvExtraDep = not validWanted
, lpvComponents = getNamedComponents gpkg
}
return (name, (lpv, gpkg))
checkDuplicateNames locals
return $ Map.fromList locals
where
getNamedComponents gpkg = Set.fromList $ concat
[ maybe [] (const [CLib]) (C.condLibrary gpkg)
, go CExe C.condExecutables
, go CTest C.condTestSuites
, go CBench C.condBenchmarks
]
where
go wrapper f = map (wrapper . T.pack . fst) $ f gpkg
-- | Check if there are any duplicate package names and, if so, throw an
-- exception.
checkDuplicateNames :: MonadThrow m => [(PackageName, (LocalPackageView, gpd))] -> m ()
checkDuplicateNames locals =
case filter hasMultiples $ Map.toList $ Map.fromListWith (++) $ map toPair locals of
[] -> return ()
x -> throwM $ DuplicateLocalPackageNames x
where
toPair (pn, (lpv, _)) = (pn, [lpvRoot lpv])
hasMultiples (_, _:_:_) = True
hasMultiples _ = False
splitComponents :: [NamedComponent]
-> (Set Text, Set Text, Set Text)
splitComponents =
go id id id
where
go a b c [] = (Set.fromList $ a [], Set.fromList $ b [], Set.fromList $ c [])
go a b c (CLib:xs) = go a b c xs
go a b c (CExe x:xs) = go (a . (x:)) b c xs
go a b c (CTest x:xs) = go a (b . (x:)) c xs
go a b c (CBench x:xs) = go a b (c . (x:)) xs
-- | Upgrade the initial local package info to a full-blown @LocalPackage@
-- based on the selected components
loadLocalPackage
:: forall m env.
(MonadReader env m, HasEnvConfig env, MonadCatch m, MonadLogger m, MonadIO m)
=> BuildOpts
-> Map PackageName SimpleTarget
-> (PackageName, (LocalPackageView, GenericPackageDescription))
-> m LocalPackage
loadLocalPackage bopts targets (name, (lpv, gpkg)) = do
bconfig <- asks getBuildConfig
econfig <- asks getEnvConfig
let config = PackageConfig
{ packageConfigEnableTests = False
, packageConfigEnableBenchmarks = False
, packageConfigFlags = localFlags (boptsFlags bopts) bconfig name
, packageConfigCompilerVersion = envConfigCompilerVersion econfig
, packageConfigPlatform = configPlatform $ getConfig bconfig
}
pkg = resolvePackage config gpkg
mtarget = Map.lookup name targets
(exes, tests, benches) =
case mtarget of
Just (STLocalComps comps) -> splitComponents $ Set.toList comps
Just STLocalAll ->
( packageExes pkg
, if boptsTests bopts
then packageTests pkg
else Set.empty
, if boptsBenchmarks bopts
then packageBenchmarks pkg
else Set.empty
)
Just STNonLocal -> assert False mempty
Just STUnknown -> assert False mempty
Nothing -> mempty
btconfig = config
{ packageConfigEnableTests = not $ Set.null tests
, packageConfigEnableBenchmarks = not $ Set.null benches
}
testconfig = config
{ packageConfigEnableTests = True
, packageConfigEnableBenchmarks = False
}
benchconfig = config
{ packageConfigEnableTests = False
, packageConfigEnableBenchmarks = True
}
btpkg
| Set.null tests && Set.null benches = Nothing
| otherwise = Just $ LocalPackageTB
{ lptbPackage = resolvePackage btconfig gpkg
, lptbTests = tests
, lptbBenches = benches
}
testpkg = resolvePackage testconfig gpkg
benchpkg = resolvePackage benchconfig gpkg
mbuildCache <- tryGetBuildCache $ lpvRoot lpv
(_,modFiles,otherFiles,mainFiles,extraFiles) <- getPackageFiles (packageFiles pkg) (lpvCabalFP lpv)
let files =
mconcat (M.elems modFiles) <>
mconcat (M.elems otherFiles) <>
Set.map mainIsFile (mconcat (M.elems mainFiles)) <>
extraFiles
(isDirty, newBuildCache) <- checkBuildCache
(fromMaybe Map.empty mbuildCache)
(map toFilePath $ Set.toList files)
return LocalPackage
{ lpPackage = pkg
, lpTestDeps = packageDeps $ testpkg
, lpBenchDeps = packageDeps $ benchpkg
, lpExeComponents =
case mtarget of
Nothing -> Nothing
Just _ -> Just exes
, lpTestBench = btpkg
, lpFiles = files
, lpDirtyFiles = isDirty || boptsForceDirty bopts
, lpNewBuildCache = newBuildCache
, lpCabalFile = lpvCabalFP lpv
, lpDir = lpvRoot lpv
, lpComponents = Set.unions
[ Set.map CExe exes
, Set.map CTest tests
, Set.map CBench benches
]
}
-- | Ensure that the flags specified in the stack.yaml file and on the command
-- line are used.
checkFlagsUsed :: (MonadThrow m, MonadReader env m, HasBuildConfig env)
=> BuildOpts
-> [LocalPackage]
-> Map PackageName extraDeps -- ^ extra deps
-> Map PackageName snapshot -- ^ snapshot, for error messages
-> m ()
checkFlagsUsed bopts lps extraDeps snapshot = do
bconfig <- asks getBuildConfig
-- Check if flags specified in stack.yaml and the command line are
-- used, see https://github.com/commercialhaskell/stack/issues/617
let flags = map (, FSCommandLine) [(k, v) | (Just k, v) <- Map.toList $ boptsFlags bopts]
++ map (, FSStackYaml) (Map.toList $ bcFlags bconfig)
localNameMap = Map.fromList $ map (packageName . lpPackage &&& lpPackage) lps
checkFlagUsed ((name, userFlags), source) =
case Map.lookup name localNameMap of
-- Package is not available locally
Nothing ->
case Map.lookup name extraDeps of
-- Also not in extra-deps, it's an error
Nothing ->
case Map.lookup name snapshot of
Nothing -> Just $ UFNoPackage source name
Just _ -> Just $ UFSnapshot name
-- We don't check for flag presence for extra deps
Just _ -> Nothing
-- Package exists locally, let's check if the flags are defined
Just pkg ->
let unused = Set.difference (Map.keysSet userFlags) (packageDefinedFlags pkg)
in if Set.null unused
-- All flags are defined, nothing to do
then Nothing
-- Error about the undefined flags
else Just $ UFFlagsNotDefined source pkg unused
unusedFlags = mapMaybe checkFlagUsed flags
unless (null unusedFlags)
$ throwM
$ InvalidFlagSpecification
$ Set.fromList unusedFlags
-- | All flags for a local package
localFlags :: (Map (Maybe PackageName) (Map FlagName Bool))
-> BuildConfig
-> PackageName
-> Map FlagName Bool
localFlags boptsflags bconfig name = Map.unions
[ fromMaybe Map.empty $ Map.lookup (Just name) $ boptsflags
, fromMaybe Map.empty $ Map.lookup Nothing $ boptsflags
, fromMaybe Map.empty $ Map.lookup name $ bcFlags bconfig
]
-- | Add in necessary packages to extra dependencies
--
-- Originally part of https://github.com/commercialhaskell/stack/issues/272,
-- this was then superseded by
-- https://github.com/commercialhaskell/stack/issues/651
extendExtraDeps :: (MonadThrow m, MonadReader env m, HasBuildConfig env)
=> Map PackageName Version -- ^ original extra deps
-> Map PackageName Version -- ^ package identifiers from the command line
-> Set PackageName -- ^ all packages added on the command line
-> Map PackageName Version -- ^ latest versions in indices
-> m (Map PackageName Version) -- ^ new extradeps
extendExtraDeps extraDeps0 cliExtraDeps unknowns latestVersion
| null errs = return $ Map.unions $ extraDeps1 : unknowns'
| otherwise = do
bconfig <- asks getBuildConfig
throwM $ UnknownTargets
(Set.fromList errs)
Map.empty -- TODO check the cliExtraDeps for presence in index
(bcStackYaml bconfig)
where
extraDeps1 = Map.union extraDeps0 cliExtraDeps
(errs, unknowns') = partitionEithers $ map addUnknown $ Set.toList unknowns
addUnknown pn =
case Map.lookup pn extraDeps1 of
Just _ -> Right Map.empty
Nothing ->
case Map.lookup pn latestVersion of
Just v -> Right $ Map.singleton pn v
Nothing -> Left pn
-- | Compare the current filesystem state to the cached information, and
-- determine (1) if the files are dirty, and (2) the new cache values.
checkBuildCache :: MonadIO m
=> Map FilePath FileCacheInfo -- ^ old cache
-> [FilePath] -- ^ files in package
-> m (Bool, Map FilePath FileCacheInfo)
checkBuildCache oldCache files = liftIO $ do
(Any isDirty, m) <- fmap mconcat $ mapM go files
return (isDirty, m)
where
go fp = do
mmodTime <- getModTimeMaybe fp
case mmodTime of
Nothing -> return (Any False, Map.empty)
Just modTime' -> do
(isDirty, newFci) <-
case Map.lookup fp oldCache of
Just fci
| fciModTime fci == modTime' -> return (False, fci)
| otherwise -> do
newFci <- calcFci modTime' fp
let isDirty =
fciSize fci /= fciSize newFci ||
fciHash fci /= fciHash newFci
return (isDirty, newFci)
Nothing -> do
newFci <- calcFci modTime' fp
return (True, newFci)
return (Any isDirty, Map.singleton fp newFci)
getModTimeMaybe fp =
liftIO
(catch
(liftM
(Just . modTime)
(getModificationTime fp))
(\e ->
if isDoesNotExistError e
then return Nothing
else throwM e))
calcFci modTime' fp =
withBinaryFile fp ReadMode $ \h -> do
(size, digest) <- CB.sourceHandle h $$ getZipSink
((,)
<$> ZipSink (CL.fold
(\x y -> x + fromIntegral (S.length y))
0)
<*> ZipSink sinkHash)
return FileCacheInfo
{ fciModTime = modTime'
, fciSize = size
, fciHash = toBytes (digest :: Digest SHA256)
}
| ant1441/stack | src/Stack/Build/Source.hs | bsd-3-clause | 20,978 | 0 | 27 | 7,268 | 4,831 | 2,506 | 2,325 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Test.StateMachine.Lockstep.Simple (
-- * Test type-level parameters
MockState
, Cmd
, Resp
, RealHandle
, MockHandle
, Test
-- * Test term-level parameters
, StateMachineTest(..)
-- * Handle instantiation
, At(..)
, (:@)
-- * Model state
, Model(..)
-- * Running the tests
, prop_sequential
, prop_parallel
-- * Translate to n-ary model model
, fromSimple
) where
import Data.Bifunctor
import Data.Functor.Classes
import Data.Kind
(Type)
import Data.SOP
import Data.Typeable
import Prelude
import Test.QuickCheck
import Test.StateMachine
import Test.StateMachine.Lockstep.Auxiliary
import Test.StateMachine.Lockstep.NAry
(MockState)
import qualified Test.StateMachine.Lockstep.NAry as NAry
{-------------------------------------------------------------------------------
Top-level parameters
-------------------------------------------------------------------------------}
data family Cmd t :: Type -> Type
data family Resp t :: Type -> Type
data family RealHandle t :: Type
data family MockHandle t :: Type
{-------------------------------------------------------------------------------
Default handle instantiation
-------------------------------------------------------------------------------}
type family Test (f :: Type -> Type) :: Type where
Test (Cmd t) = t
Test (Resp t) = t
-- @f@ will be instantiated with @Cmd@ or @Resp@
-- @r@ will be instantiated with 'Symbolic' or 'Concrete'
newtype At f r = At { unAt :: f (Reference (RealHandle (Test f)) r) }
type f :@ r = At f r
{-------------------------------------------------------------------------------
Simplified model
-------------------------------------------------------------------------------}
data Model t r = Model {
modelState :: MockState t
, modelRefs :: [(Reference (RealHandle t) r, MockHandle t)]
}
modelToSimple :: NAry.Model (Simple t) r -> Model t r
modelToSimple NAry.Model{modelRefss = NAry.Refss (NAry.Refs rs :* Nil), ..} = Model {
modelState = modelState
, modelRefs = map (second unSimpleToMock) rs
}
{-------------------------------------------------------------------------------
Wrap and unwrap
-------------------------------------------------------------------------------}
cmdAtFromSimple :: Functor (Cmd t)
=> Cmd t :@ Symbolic -> NAry.Cmd (Simple t) NAry.:@ Symbolic
cmdAtFromSimple = NAry.At . SimpleCmd . fmap NAry.FlipRef . unAt
cmdAtToSimple :: Functor (Cmd t)
=> NAry.Cmd (Simple t) NAry.:@ Symbolic -> Cmd t :@ Symbolic
cmdAtToSimple = At . fmap (NAry.unFlipRef) . unSimpleCmd . NAry.unAt
cmdMockToSimple :: Functor (Cmd t)
=> NAry.Cmd (Simple t) (NAry.MockHandle (Simple t)) '[RealHandle t]
-> Cmd t (MockHandle t)
cmdMockToSimple = fmap unSimpleToMock . unSimpleCmd
cmdRealToSimple :: Functor (Cmd t)
=> NAry.Cmd (Simple t) I '[RealHandle t]
-> Cmd t (RealHandle t)
cmdRealToSimple = fmap unI . unSimpleCmd
respMockFromSimple :: Functor (Resp t)
=> Resp t (MockHandle t)
-> NAry.Resp (Simple t) (NAry.MockHandle (Simple t)) '[RealHandle t]
respMockFromSimple = SimpleResp . fmap SimpleToMock
respRealFromSimple :: Functor (Resp t)
=> Resp t (RealHandle t)
-> NAry.Resp (Simple t) I '[RealHandle t]
respRealFromSimple = SimpleResp . fmap I
{-------------------------------------------------------------------------------
User defined values
-------------------------------------------------------------------------------}
data StateMachineTest t =
( Typeable t
-- Response
, Eq (Resp t (MockHandle t))
, Show (Resp t (Reference (RealHandle t) Symbolic))
, Show (Resp t (Reference (RealHandle t) Concrete))
, Show (Resp t (MockHandle t))
, Traversable (Resp t)
-- Command
, Show (Cmd t (Reference (RealHandle t) Symbolic))
, Show (Cmd t (Reference (RealHandle t) Concrete))
, Traversable (Cmd t)
-- Real handles
, Eq (RealHandle t)
, Show (RealHandle t)
, ToExpr (RealHandle t)
-- Mock handles
, Eq (MockHandle t)
, Show (MockHandle t)
, ToExpr (MockHandle t)
-- Mock state
, Show (MockState t)
, ToExpr (MockState t)
) => StateMachineTest {
runMock :: Cmd t (MockHandle t) -> MockState t -> (Resp t (MockHandle t), MockState t)
, runReal :: Cmd t (RealHandle t) -> IO (Resp t (RealHandle t))
, initMock :: MockState t
, newHandles :: forall h. Resp t h -> [h]
, generator :: Model t Symbolic -> Maybe (Gen (Cmd t :@ Symbolic))
, shrinker :: Model t Symbolic -> Cmd t :@ Symbolic -> [Cmd t :@ Symbolic]
, cleanup :: Model t Concrete -> IO ()
}
data Simple t
type instance NAry.MockState (Simple t) = MockState t
type instance NAry.RealHandles (Simple t) = '[RealHandle t]
type instance NAry.RealMonad (Simple _) = IO
data instance NAry.Cmd (Simple _) _f _hs where
SimpleCmd :: Cmd t (f h) -> NAry.Cmd (Simple t) f '[h]
data instance NAry.Resp (Simple _) _f _hs where
SimpleResp :: Resp t (f h) -> NAry.Resp (Simple t) f '[h]
newtype instance NAry.MockHandle (Simple t) (RealHandle t) =
SimpleToMock { unSimpleToMock :: MockHandle t }
unSimpleCmd :: NAry.Cmd (Simple t) f '[h] -> Cmd t (f h)
unSimpleCmd (SimpleCmd cmd) = cmd
unSimpleResp :: NAry.Resp (Simple t) f '[h] -> Resp t (f h)
unSimpleResp (SimpleResp resp) = resp
instance ( Functor (Resp t)
, Eq (Resp t (MockHandle t))
, Eq (MockHandle t)
) => Eq (NAry.Resp (Simple t) (NAry.MockHandle (Simple t)) '[RealHandle t]) where
SimpleResp r == SimpleResp r' = (unSimpleToMock <$> r) == (unSimpleToMock <$> r')
instance ( Functor (Resp t)
, Show (Resp t (MockHandle t))
) => Show (NAry.Resp (Simple t) (NAry.MockHandle (Simple t)) '[RealHandle t]) where
show (SimpleResp r) = show (unSimpleToMock <$> r)
instance ( Functor (Resp t)
, Show (Resp t (Reference (RealHandle t) r))
, Show1 r
) => Show (NAry.Resp (Simple t) (NAry.FlipRef r) '[RealHandle t]) where
show (SimpleResp r) = show (NAry.unFlipRef <$> r)
instance ( Functor (Cmd t)
, Show (Cmd t (Reference (RealHandle t) r))
, Show1 r
) => Show (NAry.Cmd (Simple t) (NAry.FlipRef r) '[RealHandle t]) where
show (SimpleCmd r) = show (NAry.unFlipRef <$> r)
deriving stock instance Eq (MockHandle t) => Eq (NAry.MockHandle (Simple t) (RealHandle t))
deriving stock instance Show (MockHandle t) => Show (NAry.MockHandle (Simple t) (RealHandle t))
instance Traversable (Resp t) => NTraversable (NAry.Resp (Simple t)) where
nctraverse _ f (SimpleResp x) = SimpleResp <$> traverse (f ElemHead) x
instance Traversable (Cmd t) => NTraversable (NAry.Cmd (Simple t)) where
nctraverse _ f (SimpleCmd x) = SimpleCmd <$> traverse (f ElemHead) x
instance ToExpr (MockHandle t)
=> ToExpr (NAry.MockHandle (Simple t) (RealHandle t)) where
toExpr (SimpleToMock h) = toExpr h
fromSimple :: StateMachineTest t -> NAry.StateMachineTest (Simple t)
fromSimple StateMachineTest{..} = NAry.StateMachineTest {
runMock = \cmd st -> first respMockFromSimple (runMock (cmdMockToSimple cmd) st)
, runReal = \cmd -> respRealFromSimple <$> (runReal (cmdRealToSimple cmd))
, initMock = initMock
, newHandles = \r -> Comp (newHandles (unSimpleResp r)) :* Nil
, generator = \m -> fmap cmdAtFromSimple <$> generator (modelToSimple m)
, shrinker = \m cmd -> cmdAtFromSimple <$> shrinker (modelToSimple m) (cmdAtToSimple cmd)
, cleanup = cleanup . modelToSimple
}
{-------------------------------------------------------------------------------
Running the tests
-------------------------------------------------------------------------------}
prop_sequential :: StateMachineTest t
-> Maybe Int -- ^ (Optional) minimum number of commands
-> Property
prop_sequential = NAry.prop_sequential . fromSimple
prop_parallel :: StateMachineTest t
-> Maybe Int -- ^ (Optional) minimum number of commands
-> Property
prop_parallel = NAry.prop_parallel . fromSimple
| advancedtelematic/quickcheck-state-machine-model | src/Test/StateMachine/Lockstep/Simple.hs | bsd-3-clause | 8,987 | 0 | 15 | 2,149 | 2,752 | 1,445 | 1,307 | -1 | -1 |
{-| Utility functions for MonadPlus operations
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Utils.Monad
( mretryN
, retryMaybeN
, anyM
, allM
, orM
, unfoldrM
, unfoldrM'
, retryErrorN
) where
import Control.Monad
import Control.Monad.Error.Class (MonadError(..))
import Control.Monad.Trans.Maybe
-- | Retries the given action up to @n@ times.
-- The action signals failure by 'mzero'.
mretryN :: (MonadPlus m) => Int -> (Int -> m a) -> m a
mretryN n = msum . flip map [1..n]
-- | Retries the given action up to @n@ times.
-- The action signals failure by 'mzero'.
retryMaybeN :: (Monad m) => Int -> (Int -> MaybeT m a) -> m (Maybe a)
retryMaybeN = (runMaybeT .) . mretryN
-- | Retries the given action up to @n@ times until it succeeds.
-- If all actions fail, the error of the last one is returned.
-- The action is always run at least once, even if @n@ is less than 1.
retryErrorN :: (MonadError e m) => Int -> (Int -> m a) -> m a
retryErrorN n f = loop 1
where
loop i | i < n = catchError (f i) (const $ loop (i + 1))
| otherwise = f i
-- * From monad-loops (until we can / want to depend on it):
-- | Short-circuit 'any' with a monadic predicate.
anyM :: (Monad m) => (a -> m Bool) -> [a] -> m Bool
anyM p = foldM (\v x -> if v then return True else p x) False
-- | Short-circuit 'all' with a monadic predicate.
allM :: (Monad m) => (a -> m Bool) -> [a] -> m Bool
allM p = foldM (\v x -> if v then p x else return False) True
-- | Short-circuit 'or' for values of type Monad m => m Bool
orM :: (Monad m) => [m Bool] -> m Bool
orM = anyM id
-- |See 'Data.List.unfoldr'. This is a monad-friendly version of that.
unfoldrM :: (Monad m) => (a -> m (Maybe (b,a))) -> a -> m [b]
unfoldrM = unfoldrM'
-- | See 'Data.List.unfoldr'. This is a monad-friendly version of that, with a
-- twist. Rather than returning a list, it returns any MonadPlus type of your
-- choice.
unfoldrM' :: (Monad m, MonadPlus f) => (a -> m (Maybe (b,a))) -> a -> m (f b)
unfoldrM' f z = do
x <- f z
case x of
Nothing -> return mzero
Just (x', z') -> do
xs <- unfoldrM' f z'
return (return x' `mplus` xs)
| leshchevds/ganeti | src/Ganeti/Utils/Monad.hs | bsd-2-clause | 3,508 | 0 | 15 | 781 | 681 | 365 | 316 | 36 | 2 |
-- |
-- Module : Crypto.PubKey.RSA.Prim
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : Good
--
module Crypto.PubKey.RSA.Prim
(
-- * Decrypt primitive
dp
-- * Encrypt primitive
, ep
) where
import Crypto.PubKey.RSA.Types
import Crypto.Number.ModArithmetic (expFast, expSafe)
import Crypto.Number.Serialize (os2ip, i2ospOf_)
import Crypto.Internal.ByteArray (ByteArray)
{- dpSlow computes the decrypted message not using any precomputed cache value.
only n and d need to valid. -}
dpSlow :: ByteArray ba => PrivateKey -> ba -> ba
dpSlow pk c = i2ospOf_ (private_size pk) $ expSafe (os2ip c) (private_d pk) (private_n pk)
{- dpFast computes the decrypted message more efficiently if the
precomputed private values are available. mod p and mod q are faster
to compute than mod pq -}
dpFast :: ByteArray ba => Blinder -> PrivateKey -> ba -> ba
dpFast (Blinder r rm1) pk c =
i2ospOf_ (private_size pk) (multiplication rm1 (m2 + h * (private_q pk)) (private_n pk))
where
re = expFast r (public_e $ private_pub pk) (private_n pk)
iC = multiplication re (os2ip c) (private_n pk)
m1 = expSafe iC (private_dP pk) (private_p pk)
m2 = expSafe iC (private_dQ pk) (private_q pk)
h = ((private_qinv pk) * (m1 - m2)) `mod` (private_p pk)
dpFastNoBlinder :: ByteArray ba => PrivateKey -> ba -> ba
dpFastNoBlinder pk c = i2ospOf_ (private_size pk) (m2 + h * (private_q pk))
where iC = os2ip c
m1 = expSafe iC (private_dP pk) (private_p pk)
m2 = expSafe iC (private_dQ pk) (private_q pk)
h = ((private_qinv pk) * (m1 - m2)) `mod` (private_p pk)
-- | Compute the RSA decrypt primitive.
-- if the p and q numbers are available, then dpFast is used
-- otherwise, we use dpSlow which only need d and n.
dp :: ByteArray ba => Maybe Blinder -> PrivateKey -> ba -> ba
dp blinder pk
| private_p pk /= 0 && private_q pk /= 0 = maybe dpFastNoBlinder dpFast blinder $ pk
| otherwise = dpSlow pk
-- | Compute the RSA encrypt primitive
ep :: ByteArray ba => PublicKey -> ba -> ba
ep pk m = i2ospOf_ (public_size pk) $ expFast (os2ip m) (public_e pk) (public_n pk)
-- | multiply 2 integers in Zm only performing the modulo operation if necessary
multiplication :: Integer -> Integer -> Integer -> Integer
multiplication a b m = (a * b) `mod` m
| tekul/cryptonite | Crypto/PubKey/RSA/Prim.hs | bsd-3-clause | 2,507 | 0 | 11 | 618 | 720 | 379 | 341 | 32 | 1 |
{-# OPTIONS_GHC -Wall #-}
module Reporting.Error.Docs where
import qualified Reporting.Report as Report
data Error
= NoDocs
| OnlyInDocs String
| OnlyInExports String
| NoComment String
| NoType String
-- TO REPORT
toReport :: Error -> Report.Report
toReport err =
case err of
NoDocs ->
Report.simple "NO MODULE DOCUMENTATION"
( "You must have a documentation comment between the module declaration and the\n"
++ "imports."
)
"Learn how at <http://package.elm-lang.org/help/documentation-format>"
OnlyInDocs name ->
Report.simple "UNDEFINED DOCUMENTATION"
("Your module documentation includes `" ++ name ++ "` which is not exported.")
"Is it misspelled? Should it be exported?"
OnlyInExports name ->
Report.simple "MISSING DOCUMENTATION"
("Your module exports `" ++ name ++ "` but it is not in the module documentation.")
( "It needs to be listed in the module documentation.\n"
++ "Learn how at <http://package.elm-lang.org/help/documentation-format>"
)
NoComment name ->
Report.simple "MISSING COMMENT"
("The value `" ++ name ++ "` does not have a documentation comment.")
( "Documentation comments start with {-| and end with -}. They should provide a\n"
++ "clear description of how they work, and ideally a small code example. This is\n"
++ "extremely valuable for users checking out your package!\n\n"
++ "If you think the docs are clearer without any words, you can use an empty\n"
++ "comment {-|-} which should be used sparingly. Maybe you have a section of 20\n"
++ "values all with the exact same type. The docs may read better if they are all\n"
++ "described in one place.\n\n"
++ "Learn more at <http://package.elm-lang.org/help/documentation-format>"
)
NoType name ->
Report.simple "MISSING ANNOTATION"
("The value `" ++ name ++ "` does not have a type annotation.")
( "Adding type annotations is best practice and it gives you a chance to name\n"
++ "types and type variables so they are as easy as possible to understand!"
)
| JoeyEremondi/elm-summer-opt | src/Reporting/Error/Docs.hs | bsd-3-clause | 2,294 | 0 | 16 | 649 | 250 | 132 | 118 | 42 | 5 |
yes = reverse xs `isPrefixOf` reverse ys | bitemyapp/apply-refact | tests/examples/Default18.hs | bsd-3-clause | 40 | 0 | 6 | 6 | 18 | 9 | 9 | 1 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-- Don't do the cunning new newtype-deriving thing
-- when the type constructor is recursive
module ShouldCompile where
newtype A = A [A] deriving (Eq)
test :: A -> A -> Bool
test x y = x == y
| hvr/jhc | regress/tests/1_typecheck/2_pass/ghc/tc159.hs | mit | 228 | 0 | 6 | 47 | 52 | 31 | 21 | 5 | 1 |
import System.IO
file = "readwrite003.txt"
main = do
writeFile file "ab\ncd\nef\ngh"
h <- openFile file ReadWriteMode
hGetLine h
hPutStrLn h "yz"
hClose h
h <- openBinaryFile file ReadMode
hSetNewlineMode stdout noNewlineTranslation
hGetContents h >>= putStr
| olsner/ghc | libraries/base/tests/IO/readwrite003.hs | bsd-3-clause | 277 | 0 | 8 | 53 | 86 | 36 | 50 | 11 | 1 |
{-# OPTIONS -XRecursiveDo #-}
-- test of user defined instance of MonadFix
module Main (main) where
import Control.Monad
import Control.Monad.Fix
data X a = X a deriving Show
instance Functor X where
fmap f (X a) = X (f a)
instance Applicative X where
pure = return
(<*>) = ap
instance Monad X where
return = X
(X a) >>= f = f a
instance MonadFix X where
mfix f = fix (f . unX)
where unX ~(X x) = x
z :: X [Int]
z = mdo x <- return (1:x)
return (take 4 x)
main = print z
| olsner/ghc | testsuite/tests/mdo/should_compile/mdo002.hs | bsd-3-clause | 516 | 0 | 11 | 145 | 219 | 115 | 104 | -1 | -1 |
import Control.Concurrent
import Control.Exception
-- test blocking & unblocking of async exceptions.
-- the first exception "foo" should be caught by the "caught1" handler,
-- since async exceptions are blocked outside this handler.
-- the second exception "bar" should be caught by the outer "caught2" handler,
-- (i.e. this tests that async exceptions are properly unblocked after
-- being blocked).
main = do
main_thread <- myThreadId
print =<< getMaskingState -- False
m <- newEmptyMVar
m2 <- newEmptyMVar
forkIO (do takeMVar m
throwTo main_thread (ErrorCall "foo")
throwTo main_thread (ErrorCall "bar")
putMVar m2 ()
)
( do
mask $ \restore -> do
putMVar m ()
print =<< getMaskingState -- True
sum [1..1] `seq` -- give 'foo' a chance to be raised
(restore $ myDelay 500000)
`Control.Exception.catch`
\e -> putStrLn ("caught1: " ++ show (e::SomeException))
threadDelay 10000
takeMVar m2
)
`Control.Exception.catch`
\e -> do print =<< getMaskingState
putStrLn ("caught2: " ++ show (e::SomeException))
-- compensate for the fact that threadDelay is non-interruptible
-- on Windows with the threaded RTS in 6.6.
myDelay usec = do
m <- newEmptyMVar
forkIO $ do threadDelay usec; putMVar m ()
takeMVar m
| ezyang/ghc | testsuite/tests/concurrent/should_run/conc015.hs | bsd-3-clause | 1,386 | 0 | 21 | 368 | 303 | 149 | 154 | 28 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module A where
import Data.Data
import Data.Typeable
data Foo = Foo Int
deriving (Show, Data, Typeable)
| olsner/ghc | testsuite/tests/quasiquotation/T4491/A.hs | bsd-3-clause | 145 | 0 | 6 | 25 | 39 | 23 | 16 | 6 | 0 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE BangPatterns, GeneralizedNewtypeDeriving, NoImplicitPrelude #-}
module GHC.Event.Unique
(
UniqueSource
, Unique(..)
, newSource
, newUnique
) where
import Data.Int (Int64)
import GHC.Base
import GHC.Conc.Sync (TVar, atomically, newTVarIO, readTVar, writeTVar)
import GHC.Num (Num(..))
import GHC.Show (Show(..))
-- We used to use IORefs here, but Simon switched us to STM when we
-- found that our use of atomicModifyIORef was subject to a severe RTS
-- performance problem when used in a tight loop from multiple
-- threads: http://ghc.haskell.org/trac/ghc/ticket/3838
--
-- There seems to be no performance cost to using a TVar instead.
newtype UniqueSource = US (TVar Int64)
newtype Unique = Unique { asInt64 :: Int64 }
deriving (Eq, Ord, Num)
instance Show Unique where
show = show . asInt64
newSource :: IO UniqueSource
newSource = US `fmap` newTVarIO 0
newUnique :: UniqueSource -> IO Unique
newUnique (US ref) = atomically $ do
u <- readTVar ref
let !u' = u+1
writeTVar ref u'
return $ Unique u'
{-# INLINE newUnique #-}
| tolysz/prepare-ghcjs | spec-lts8/base/GHC/Event/Unique.hs | bsd-3-clause | 1,121 | 0 | 11 | 217 | 254 | 146 | 108 | 27 | 1 |
笑 :: Int
笑 = 3
main = print 笑
| urbanslug/ghc | testsuite/tests/parser/should_compile/T3741.hs | bsd-3-clause | 37 | 6 | 4 | 11 | 20 | 10 | 10 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module NLP.Stemmer.Cistem (stem,stemCaseInsensitive,Segmentation(..),segment',segment,segment'CaseInsensitive,segmentCaseInsensitive) where
import Data.Char
import Data.Monoid
import Data.Text as T
-- | Guess the word stem. This module uses the CISTEM algorithm, published by L. Weißweiler and A. Fraser in "Developing a Stemmer for German Based on a Comparative Analysis of Publicly Available Stemmers" (2017).
stem :: Text -> Text
stem t =
let firstUpper = isUpper (T.head t)
in postpare $ loop firstUpper $ prepare t
-- | A case insensitive variant. Use only if the text may be incorrectly upper case.
stemCaseInsensitive :: Text -> Text
stemCaseInsensitive t = postpare $ loop False $ prepare t
data Segmentation = Segmentation { segPrefix :: Text, segStem :: Text, segSuffix :: Text } deriving (Show,Eq)
-- | Split the word into a prefix, the stem and a suffix. In contrast to the `stem` function umlauts remain unchanged.
segment' :: Text -> Segmentation
segment' t =
let firstUpper = isUpper (T.head t)
lower = T.toLower t
prepared = segmentPrepare t
theStem = postpare $ loop firstUpper prepared
thePrefix | theStem `isPrefixOf` lower = ""
| "ge" `isPrefixOf` lower = "ge"
| otherwise = error ("segment' should be debugged; extracted stem: "++ unpack theStem)
theSuffix = T.drop (T.length thePrefix + T.length theStem) lower
in Segmentation thePrefix theStem theSuffix
-- | Split the word into stem and suffix. This is supposed to be compatible to the `segment` function from the reference implementation.
segment :: Text -> (Text,Text)
segment t =
let Segmentation{..} = segment' t
in (segPrefix<>segStem, segSuffix)
-- | A case insensitive variant. Use only if the text may be incorrectly upper case.
segmentCaseInsensitive :: Text -> (Text,Text)
segmentCaseInsensitive = segment . T.toLower
-- | A case insensitive variant. Use only if the text may be incorrectly upper case.
segment'CaseInsensitive :: Text -> Segmentation
segment'CaseInsensitive = segment' . T.toLower
loop u t | T.length t <= 3 = t
| (T.length t > 5) && (["em","er","nd"] `isSuffixOf'` t) = loop u (stripSuffix' ["em","er","nd"] t)
| not u && ("t" `isSuffixOf` t) = loop u (stripSuffix' ["t"] t)
| ["e","s","n"] `isSuffixOf'` t = loop u (stripSuffix' ["e","s","n"] t)
| otherwise = t
prepare :: Text -> Text
prepare =
replace "ü" "u" .
replace "ö" "o" .
replace "ä" "a" .
replxx .
replace "ß" "ss" .
segmentPrepare
segmentPrepare :: Text -> Text
segmentPrepare =
replace "sch" "$" .
replace "ie" "&" .
replace "ei" "%" .
replxx .
stripge .
T.toLower
postpare :: Text -> Text
postpare =
replace "%" "ei" .
replace "&" "ie" .
replace "$" "sch" .
replxxback
replxx :: Text -> Text
replxx = snd . mapAccumL f '\0'
where f prev curr | prev == curr = (curr,'*')
| otherwise = (curr,curr)
replxxback :: Text -> Text
replxxback = snd . mapAccumL f '\0'
where f prev '*' = (prev,prev)
f prev curr = (curr,curr)
stripge :: Text -> Text
stripge t | T.length t >= 6 =
case stripPrefix "ge" t of
Nothing -> t
Just t -> t
| otherwise = t
isSuffixOf' [] _ = False
isSuffixOf' (s:ss) t = (s `isSuffixOf` t) || (ss `isSuffixOf'` t)
stripSuffix' :: [Text] -> Text -> Text
stripSuffix' [] hay = hay
stripSuffix' (suff:ss) hay =
case stripSuffix suff hay of
Just t -> t
Nothing -> stripSuffix' ss hay
| LeonieWeissweiler/CISTEM | Cistem.hs | mit | 3,759 | 0 | 13 | 990 | 1,086 | 567 | 519 | 80 | 2 |
module Test.Expr.GroupBy
( groupByTests,
)
where
import qualified Control.Monad.IO.Class as MIO
import qualified Data.ByteString.Char8 as B8
import qualified Data.Int as Int
import qualified Data.List.NonEmpty as NE
import qualified Data.Pool as Pool
import qualified Data.Text as T
import qualified Orville.PostgreSQL.Connection as Conn
import qualified Orville.PostgreSQL.Internal.ExecutionResult as ExecResult
import qualified Orville.PostgreSQL.Internal.Expr as Expr
import qualified Orville.PostgreSQL.Internal.RawSql as RawSql
import qualified Orville.PostgreSQL.Internal.SqlValue as SqlValue
import Test.Expr.TestSchema (assertEqualSqlRows)
import qualified Test.Property as Property
data FooBar = FooBar
{ foo :: Int.Int32
, bar :: String
}
groupByTests :: Pool.Pool Conn.Connection -> Property.Group
groupByTests pool =
Property.group
"Expr - GroupBy"
[ prop_groupByColumnsExpr pool
, prop_appendGroupByExpr pool
]
prop_groupByColumnsExpr :: Property.NamedDBProperty
prop_groupByColumnsExpr =
groupByTest "groupByColumnsExpr groups by columns" $
GroupByTest
{ groupByValuesToInsert = [FooBar 1 "dog", FooBar 2 "dingo", FooBar 3 "dog"]
, groupByExpectedQueryResults = [FooBar 2 "dingo", FooBar 3 "dog", FooBar 1 "dog"]
, groupByClause =
Just . Expr.groupByClause $
Expr.groupByColumnsExpr $
barColumn NE.:| [fooColumn]
}
prop_appendGroupByExpr :: Property.NamedDBProperty
prop_appendGroupByExpr =
groupByTest "appendGroupByExpr causes grouping on both clauses" $
GroupByTest
{ groupByValuesToInsert = [FooBar 1 "dog", FooBar 2 "dingo", FooBar 1 "dog", FooBar 3 "dingo", FooBar 1 "dog", FooBar 2 "dingo"]
, groupByExpectedQueryResults = [FooBar 1 "dog", FooBar 3 "dingo", FooBar 2 "dingo"]
, groupByClause =
Just . Expr.groupByClause $
Expr.appendGroupByExpr
(Expr.groupByExpr $ RawSql.toRawSql barColumn)
(Expr.groupByExpr $ RawSql.toRawSql fooColumn)
}
data GroupByTest = GroupByTest
{ groupByValuesToInsert :: [FooBar]
, groupByClause :: Maybe Expr.GroupByClause
, groupByExpectedQueryResults :: [FooBar]
}
mkGroupByTestInsertSource :: GroupByTest -> Expr.InsertSource
mkGroupByTestInsertSource test =
let mkRow foobar =
[ SqlValue.fromInt32 (foo foobar)
, SqlValue.fromText (T.pack $ bar foobar)
]
in Expr.insertSqlValues (map mkRow $ groupByValuesToInsert test)
mkGroupByTestExpectedRows :: GroupByTest -> [[(Maybe B8.ByteString, SqlValue.SqlValue)]]
mkGroupByTestExpectedRows test =
let mkRow foobar =
[ (Just (B8.pack "foo"), SqlValue.fromInt32 (foo foobar))
, (Just (B8.pack "bar"), SqlValue.fromText (T.pack $ bar foobar))
]
in fmap mkRow (groupByExpectedQueryResults test)
groupByTest :: String -> GroupByTest -> Property.NamedDBProperty
groupByTest testName test =
Property.singletonNamedDBProperty testName $ \pool ->
Pool.withResource pool $ \connection -> do
MIO.liftIO $ dropAndRecreateTestTable connection
MIO.liftIO . RawSql.executeVoid connection $
Expr.insertExpr testTable Nothing (mkGroupByTestInsertSource test) Nothing
result <-
MIO.liftIO $
RawSql.execute connection $
Expr.queryExpr
(Expr.selectClause $ Expr.selectExpr Nothing)
(Expr.selectColumns [fooColumn, barColumn])
(Just $ Expr.tableExpr testTable Nothing Nothing (groupByClause test) Nothing Nothing)
rows <- MIO.liftIO $ ExecResult.readRows result
rows `assertEqualSqlRows` mkGroupByTestExpectedRows test
testTable :: Expr.QualifiedTableName
testTable =
Expr.qualifiedTableName Nothing (Expr.tableName "expr_test")
fooColumn :: Expr.ColumnName
fooColumn =
Expr.columnName "foo"
barColumn :: Expr.ColumnName
barColumn =
Expr.columnName "bar"
dropAndRecreateTestTable :: Conn.Connection -> IO ()
dropAndRecreateTestTable connection = do
RawSql.executeVoid connection (RawSql.fromString "DROP TABLE IF EXISTS " <> RawSql.toRawSql testTable)
RawSql.executeVoid connection (RawSql.fromString "CREATE TABLE " <> RawSql.toRawSql testTable <> RawSql.fromString "(foo INTEGER, bar TEXT)")
| flipstone/orville | orville-postgresql-libpq/test/Test/Expr/GroupBy.hs | mit | 4,256 | 0 | 18 | 814 | 1,086 | 585 | 501 | 90 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module Mattermost.Api
( Api
, MessagePayload(..)
, Attachment(..)
, Field(..)
, hook
, api
) where
import Data.Text (Text)
import Servant
import Servant.Client
import Mattermost.Types
-- ----------------------------------------------
-- | Mattermost API type.
type Api =
"hooks" :> Capture "key" Text :> ReqBody '[JSON] MessagePayload :> Post '[JSON] NoContent
-- ----------------------------------------------
api :: Proxy Api
api = Proxy
--hook :: Text -> MessagePayload -> ClientM NoContent
-- | Client for the Mattermost API.
hook :: Client Api
hook = client api
| UlfS/ghmm | src/Mattermost/Api.hs | mit | 765 | 0 | 10 | 197 | 141 | 85 | 56 | 20 | 1 |
module Prepare.Source.Output where
import Prelude hiding (Word, last)
import qualified Data.Char as Char
import qualified Data.List as List
import qualified Data.List.Split as Split
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified System.Directory as FilePath
import qualified System.FilePath as FilePath
import Prepare.Decompose
import Primary
data Context = Context
{ contextIndent :: Text
}
type Output a = Context -> a -> Text
newtype ModuleName = ModuleName { getModuleName :: [Text] }
deriving (Show)
emptyModuleName :: ModuleName
emptyModuleName = ModuleName []
addModuleName :: Text -> ModuleName -> ModuleName
addModuleName t (ModuleName ps) = ModuleName (t : ps)
dottedModuleName :: ModuleName -> Text
dottedModuleName (ModuleName ps) = joinText "." (reverse ps)
moduleNamePath :: ModuleName -> FilePath
moduleNamePath (ModuleName ps) = FilePath.joinPath . fmap Text.unpack . reverse $ ps
data Module = Module
{ moduleName :: ModuleName
, moduleTermName :: Text
, moduleContents :: Text
, moduleChildren :: [Module]
}
deriving (Show)
flatModules :: Module -> [Module]
flatModules m = m : concatMap flatModules (moduleChildren m)
writeModule :: FilePath -> Module -> IO ()
writeModule dir m = do
let mp = moduleNamePath (moduleName m)
let mpf = FilePath.addExtension mp "agda"
let full = dir FilePath.</> mpf
let md = FilePath.takeDirectory full
let createParents = True
_ <- FilePath.createDirectoryIfMissing createParents md
_ <- putStrLn ("Writing " ++ full)
Text.writeFile full (moduleContents m)
groupModule :: Group -> Module
groupModule (Group gid gl gt gd gs) = Module groupModuleName termName groupContents srcModules
where
groupContents = Text.concat [ prologue, descText, contentsText, "\n" ]
groupModuleName = addModuleName gid (addModuleName (showText gl) sourceTypeModuleName)
imports = fmap moduleName srcModules
prologue = joinText "\n"
[ getModulePrefix groupModuleName (sourceTypeModuleName : imports)
, termType
, termDecl
]
ctx = increaseIndent emptyContext
termName = idAsTerm gid
termType = spacedText [ termName, ":", "Group" ]
termDecl = spacedText [ termName, "=", "group", quoted gid, showText gl, quoted gt ]
descText = onePerLine (const quoted) ctx gd
contentsText = onePerLine (const id) ctx $ fmap moduleTermName srcModules
srcModules = fmap (sourceModule groupModuleName) gs
sourceTypeModuleName :: ModuleName
sourceTypeModuleName = ModuleName [ "PrimarySource", "AncientLanguage" ]
sourceModule :: ModuleName -> Source -> Module
sourceModule pm (Source sid st _ sl sc) = srcModule
where
srcModule = Module srcModuleName termName srcContents []
srcContents = Text.concat [ prologue, licenseText, contentsText, "\n" ]
srcModuleName = addModuleName sid pm
prologue = joinText "\n"
[ getModulePrefix srcModuleName [sourceTypeModuleName]
, termType
, termDecl
]
ctx = increaseIndent emptyContext
termName = idAsTerm sid
termType = spacedText [ termName, ":", "Source" ]
termDecl = spacedText [ termName, "=", "source", quoted sid, quoted st ]
licenseText = onePerLine (const quoted) ctx sl
contentsText = contentChunkJoin ctx (chunkByMilestone sc)
getModulePrefix :: ModuleName -> [ModuleName] -> Text
getModulePrefix n is =
joinText "\n"
[ moduleDecl
, ""
, imports
, ""
]
where
moduleDecl = spacedText [ "module", dottedModuleName n, "where" ]
imports = joinText "\n" $ fmap (\x -> spacedText [ "open", "import", dottedModuleName x ]) is
chunkByMilestone :: [Content] -> [[Content]]
chunkByMilestone = Split.split . Split.dropInitBlank . Split.keepDelimsL . Split.condense . Split.whenElt $ isMilestone
isMilestone :: Content -> Bool
isMilestone (ContentMilestone _) = True
isMilestone _ = False
contentChunkJoin :: Output [[Content]]
contentChunkJoin ctx xs = spacedText
[ joinText "" [ newline ctx, "( join" ]
, onePerLine contentChunk ctx xs
, ")"
]
getChapter :: [Content] -> Maybe Integer
getChapter = foldr go Nothing
where
go (ContentMilestone (MilestoneDivision (Division _ (Just c) _ _ _))) _ = Just c
go _ r = r
groupByChapter :: [[Content]] -> [[[Content]]]
groupByChapter = List.groupBy f
where
f x y = getChapter x == getChapter y
contentChunk :: Output [Content]
contentChunk ctx = onePerLine content (increaseIndent ctx)
content :: Output Content
content ctx (ContentMilestone m) = milestone ctx m
content ctx (ContentWord w) = word ctx w
milestone :: Output Milestone
milestone _ MilestoneParagraph = "p"
milestone _ (MilestoneCard _) = "p"
milestone ctx (MilestoneDivision v) = division ctx v
division :: Output Division
division _ (Division _ (Just cn) (Just vn) _ _) = spacedText [ "v", num cn, num vn ]
division _ (Division _ _ _ _ _) = spacedText [ "v", num (0 :: Int), num (0 :: Int) ]
word :: Output Word
word _ (Word p t s) | Text.null p, Text.null s = spacedText [ "w", quoted (decompose t) ]
word _ (Word p t s) | Text.null p = spacedText [ "ws", quoted (decompose t), quoted (decompose s) ]
word _ (Word p t s) = spacedText [ "wp", quoted (decompose p), quoted (decompose t), quoted (decompose s) ]
joinText :: Text -> [Text] -> Text
joinText t = Text.intercalate t
spacedText :: [Text] -> Text
spacedText = joinText " "
showText :: Show a => a -> Text
showText = Text.pack . show
emptyContext :: Context
emptyContext = Context ""
increaseIndent :: Context -> Context
increaseIndent (Context i) = Context (Text.append " " i)
asIs :: Output Text
asIs _ t = t
quoted :: Text -> Text
quoted t = Text.concat [ "\"", t , "\"" ]
num :: (Num a, Show a) => a -> Text
num = Text.pack . show
newline :: Context -> Text
newline ctx = Text.append "\n" (contextIndent ctx)
join :: (Context -> Text) -> Output a -> Output [a]
join g _ ctx [] = Text.append (g ctx) "[]"
join g f ctx (x : xs) =
Text.concat . fmap (Text.append (g ctx)) $ first : middles ++ [last]
where
first = Text.append "( " (f ctx x)
middles = fmap (Text.append "∷ " . f ctx) xs
last = "∷ [] )"
onePerLine :: Output a -> Output [a]
onePerLine = join newline
spaced :: Output a -> Output [a]
spaced = join (const " ")
idAsTerm :: Text -> Text
idAsTerm t
| not . Text.null $ t
, h <- Text.head t
, Char.isUpper h
, tl <- Text.tail t
= Text.cons (Char.toLower h) tl
idAsTerm t = t
| ancientlanguage/haskell-analysis | prepare/src/Prepare/Source/Output.hs | mit | 6,385 | 0 | 15 | 1,203 | 2,369 | 1,238 | 1,131 | 155 | 2 |
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances, CPP, DeriveDataTypeable, DeriveFunctor #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Python.Common.AST
-- Copyright : (c) 2009 Bernie Pope
-- License : BSD-style
-- Maintainer : bjpop@csse.unimelb.edu.au
-- Stability : experimental
-- Portability : ghc
--
-- Representation of the Python abstract syntax tree (AST). The representation is
-- a superset of versions 2.x and 3.x of Python. In many cases they are
-- identical. The documentation in this module indicates where they are
-- different.
--
-- All the data types have a (polymorphic) parameter which allows the AST to
-- be annotated by an arbitrary type (for example source locations). Specialised
-- instances of the types are provided for source spans. For example @Module a@ is
-- the type of modules, and @ModuleSpan@ is the type of modules annoted with source
-- span information.
--
-- Note: there are cases where the AST is more liberal than the formal grammar
-- of the language. Therefore some care must be taken when constructing
-- Python programs using the raw AST.
-----------------------------------------------------------------------------
module Language.Python.Common.AST (
-- * Annotation projection
Annotated (..)
-- * Modules
, Module (..), ModuleSpan
-- * Identifiers and dotted names
, Ident (..), IdentSpan
, DottedName, DottedNameSpan
-- * Statements, suites, parameters, decorators and assignment operators
, Statement (..), StatementSpan
, Suite, SuiteSpan
, Parameter (..), ParameterSpan
, ParamTuple (..), ParamTupleSpan
, Decorator (..), DecoratorSpan
, AssignOp (..), AssignOpSpan
-- * Expressions, operators, arguments and slices
, Expr (..), ExprSpan
, Op (..), OpSpan
, Argument (..), ArgumentSpan
, Slice (..), SliceSpan
, DictMappingPair (..), DictMappingPairSpan
, YieldArg (..), YieldArgSpan
-- * Imports
, ImportItem (..), ImportItemSpan
, FromItem (..), FromItemSpan
, FromItems (..), FromItemsSpan
, ImportRelative (..), ImportRelativeSpan
-- * Exceptions
, Handler (..), HandlerSpan
, ExceptClause (..), ExceptClauseSpan
, RaiseExpr (..), RaiseExprSpan
-- * Comprehensions
, Comprehension (..), ComprehensionSpan
, ComprehensionExpr (..), ComprehensionExprSpan
, CompFor (..), CompForSpan
, CompIf (..), CompIfSpan
, CompIter (..), CompIterSpan
)
where
import Language.Python.Common.SrcLocation ( Span (getSpan), SrcSpan (..), spanning )
import Data.Data
--------------------------------------------------------------------------------
-- | Convenient access to annotations in annotated types.
class Annotated t where
-- | Given an annotated type, project out its annotation value.
annot :: t annot -> annot
-- | Identifier.
data Ident annot = Ident { ident_string :: !String, ident_annot :: annot }
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type IdentSpan = Ident SrcSpan
instance Span IdentSpan where
getSpan = annot
instance Annotated Ident where
annot = ident_annot
-- | A module (Python source file).
--
-- * Version 2.6 <http://www.python.org/doc/2.6/reference/toplevel_components.html>
--
-- * Version 3.1 <http://www.python.org/doc/3.1/reference/toplevel_components.html>
--
newtype Module annot = Module [Statement annot] -- ^ A module is just a sequence of top-level statements.
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ModuleSpan = Module SrcSpan
-- | A block of statements. A suite is a group of statements controlled by a clause,
-- for example, the body of a loop.
--
-- * Version 2.6 <http://www.python.org/doc/2.6/reference/compound_stmts.html>
--
-- * Version 3.1 <http://www.python.org/doc/3.1/reference/compound_stmts.html>
--
type Suite annot = [Statement annot]
type SuiteSpan = Suite SrcSpan
-- | A compound name constructed with the dot operator.
type DottedName annot = [Ident annot]
type DottedNameSpan = DottedName SrcSpan
-- | An entity imported using the \'import\' keyword.
--
-- * Version 2.6 <http://www.python.org/doc/2.6/reference/simple_stmts.html#the-import-statement>
--
-- * Version 3.1 <http://www.python.org/doc/3.1/reference/simple_stmts.html#the-import-statement>
--
data ImportItem annot =
ImportItem
{ import_item_name :: DottedName annot -- ^ The name of module to import.
, import_as_name :: Maybe (Ident annot) -- ^ An optional name to refer to the entity (the \'as\' name).
, import_item_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ImportItemSpan = ImportItem SrcSpan
instance Span ImportItemSpan where
getSpan = annot
instance Annotated ImportItem where
annot = import_item_annot
-- | An entity imported using the \'from ... import\' construct.
--
-- * Version 2.6 <http://www.python.org/doc/2.6/reference/simple_stmts.html#the-import-statement>
--
-- * Version 3.1 <http://www.python.org/doc/3.1/reference/simple_stmts.html#the-import-statement>
--
data FromItem annot =
FromItem
{ from_item_name :: Ident annot -- ^ The name of the entity imported.
, from_as_name :: Maybe (Ident annot) -- ^ An optional name to refer to the entity (the \'as\' name).
, from_item_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type FromItemSpan = FromItem SrcSpan
instance Span FromItemSpan where
getSpan = annot
instance Annotated FromItem where
annot = from_item_annot
-- | Items imported using the \'from ... import\' construct.
data FromItems annot
= ImportEverything { from_items_annot :: annot } -- ^ Import everything exported from the module.
| FromItems { from_items_items :: [FromItem annot], from_items_annot :: annot } -- ^ Import a specific list of items from the module.
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type FromItemsSpan = FromItems SrcSpan
instance Span FromItemsSpan where
getSpan = annot
instance Annotated FromItems where
annot = from_items_annot
-- | A reference to the module to import from using the \'from ... import\' construct.
data ImportRelative annot
= ImportRelative
{ import_relative_dots :: Int
, import_relative_module :: Maybe (DottedName annot)
, import_relative_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ImportRelativeSpan = ImportRelative SrcSpan
instance Span ImportRelativeSpan where
getSpan = annot
instance Annotated ImportRelative where
annot = import_relative_annot
-- | Statements.
--
-- * Simple statements:
--
-- * Version 2.6 <http://www.python.org/doc/2.6/reference/simple_stmts.html>
--
-- * Version 3.1 <http://www.python.org/doc/3.1/reference/simple_stmts.html>
--
-- * Compound statements:
--
-- * Version 2.6 <http://www.python.org/doc/2.6/reference/compound_stmts.html>
--
-- * Version 3.1 <http://www.python.org/doc/3.1/reference/compound_stmts.html>
--
data Statement annot
-- | Import statement.
= Import
{ import_items :: [ImportItem annot] -- ^ Items to import.
, stmt_annot :: annot
}
-- | From ... import statement.
| FromImport
{ from_module :: ImportRelative annot -- ^ Module to import from.
, from_items :: FromItems annot -- ^ Items to import.
, stmt_annot :: annot
}
-- | While loop.
| While
{ while_cond :: Expr annot -- ^ Loop condition.
, while_body :: Suite annot -- ^ Loop body.
, while_else :: Suite annot -- ^ Else clause.
, stmt_annot :: annot
}
-- | For loop.
| For
{ for_targets :: [Expr annot] -- ^ Loop variables.
, for_generator :: Expr annot -- ^ Loop generator.
, for_body :: Suite annot -- ^ Loop body
, for_else :: Suite annot -- ^ Else clause.
, stmt_annot :: annot
}
-- | Function definition.
| Fun
{ fun_name :: Ident annot -- ^ Function name.
, fun_args :: [Parameter annot] -- ^ Function parameter list.
, fun_result_annotation :: Maybe (Expr annot) -- ^ Optional result annotation.
, fun_body :: Suite annot -- ^ Function body.
, stmt_annot :: annot
}
-- | Class definition.
| Class
{ class_name :: Ident annot -- ^ Class name.
, class_args :: [Argument annot] -- ^ Class argument list. In version 2.x this is only ArgExprs.
, class_body :: Suite annot -- ^ Class body.
, stmt_annot :: annot
}
-- | Conditional statement (if-elif-else).
| Conditional
{ cond_guards :: [(Expr annot, Suite annot)] -- ^ Sequence of if-elif conditional clauses.
, cond_else :: Suite annot -- ^ Possibly empty unconditional else clause.
, stmt_annot :: annot
}
-- | Assignment statement.
| Assign
{ assign_to :: [Expr annot] -- ^ Entity to assign to.
, assign_expr :: Expr annot -- ^ Expression to evaluate.
, stmt_annot :: annot
}
-- | Augmented assignment statement.
| AugmentedAssign
{ aug_assign_to :: Expr annot -- ^ Entity to assign to.
, aug_assign_op :: AssignOp annot -- ^ Assignment operator (for example \'+=\').
, aug_assign_expr :: Expr annot -- ^ Expression to evaluate.
, stmt_annot :: annot
}
-- | Decorated definition of a function or class.
| Decorated
{ decorated_decorators :: [Decorator annot] -- ^ Decorators.
, decorated_def :: Statement annot -- ^ Function or class definition to be decorated.
, stmt_annot :: annot
}
-- | Return statement (may only occur syntactically nested in a function definition).
| Return
{ return_expr :: Maybe (Expr annot) -- ^ Optional expression to evaluate and return to caller.
, stmt_annot :: annot
}
-- | Try statement (exception handling).
| Try
{ try_body :: Suite annot -- ^ Try clause.
, try_excepts :: [Handler annot] -- ^ Exception handlers.
, try_else :: Suite annot -- ^ Possibly empty else clause, executed if and when control flows off the end of the try clause.
, try_finally :: Suite annot -- ^ Possibly empty finally clause.
, stmt_annot :: annot
}
-- | Raise statement (exception throwing).
| Raise
{ raise_expr :: RaiseExpr annot
, stmt_annot :: annot
}
-- | With statement (context management).
| With
{ with_context :: [(Expr annot, Maybe (Expr annot))] -- ^ Context expression(s) (yields a context manager).
, with_body :: Suite annot -- ^ Suite to be managed.
, stmt_annot :: annot
}
-- | Pass statement (null operation).
| Pass { stmt_annot :: annot }
-- | Break statement (may only occur syntactically nested in a for or while loop, but not nested in a function or class definition within that loop).
| Break { stmt_annot :: annot }
-- | Continue statement (may only occur syntactically nested in a for or while loop, but not nested in a function or class definition or finally clause within that loop).
| Continue { stmt_annot :: annot }
-- | Del statement (delete).
| Delete
{ del_exprs :: [Expr annot] -- ^ Items to delete.
, stmt_annot :: annot
}
-- | Expression statement.
| StmtExpr { stmt_expr :: Expr annot, stmt_annot :: annot }
-- | Global declaration.
| Global
{ global_vars :: [Ident annot] -- ^ Variables declared global in the current block.
, stmt_annot :: annot
}
-- | Nonlocal declaration. /Version 3.x only/.
| NonLocal
{ nonLocal_vars :: [Ident annot] -- ^ Variables declared nonlocal in the current block (their binding comes from bound the nearest enclosing scope).
, stmt_annot :: annot
}
-- | Assertion.
| Assert
{ assert_exprs :: [Expr annot] -- ^ Expressions being asserted.
, stmt_annot :: annot
}
-- | Print statement. /Version 2 only/.
| Print
{ print_chevron :: Bool -- ^ Optional chevron (>>)
, print_exprs :: [Expr annot] -- ^ Arguments to print
, print_trailing_comma :: Bool -- ^ Does it end in a comma?
, stmt_annot :: annot
}
-- | Exec statement. /Version 2 only/.
| Exec
{ exec_expr :: Expr annot -- ^ Expression to exec.
, exec_globals_locals :: Maybe (Expr annot, Maybe (Expr annot)) -- ^ Global and local environments to evaluate the expression within.
, stmt_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type StatementSpan = Statement SrcSpan
instance Span StatementSpan where
getSpan = annot
instance Annotated Statement where
annot = stmt_annot
-- | The argument for a @raise@ statement.
data RaiseExpr annot
= RaiseV3 (Maybe (Expr annot, Maybe (Expr annot))) -- ^ Optional expression to evaluate, and optional \'from\' clause. /Version 3 only/.
| RaiseV2 (Maybe (Expr annot, (Maybe (Expr annot, Maybe (Expr annot))))) -- ^ /Version 2 only/.
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type RaiseExprSpan = RaiseExpr SrcSpan
-- | Decorator.
data Decorator annot =
Decorator
{ decorator_name :: DottedName annot -- ^ Decorator name.
, decorator_args :: [Argument annot] -- ^ Decorator arguments.
, decorator_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type DecoratorSpan = Decorator SrcSpan
instance Span DecoratorSpan where
getSpan = annot
instance Annotated Decorator where
annot = decorator_annot
-- | Formal parameter of function definitions and lambda expressions.
--
-- * Version 2.6:
--
-- * <http://www.python.org/doc/2.6/reference/compound_stmts.html#function-definitions>
--
-- * <http://www.python.org/doc/2.6/reference/expressions.html#calls>
--
-- * Version 3.1:
--
-- * <http://www.python.org/doc/3.1/reference/compound_stmts.html#function-definitions>
--
-- * <http://www.python.org/doc/3.1/reference/expressions.html#calls>
--
data Parameter annot
-- | Ordinary named parameter.
= Param
{ param_name :: Ident annot -- ^ Parameter name.
, param_py_annotation :: Maybe (Expr annot) -- ^ Optional annotation.
, param_default :: Maybe (Expr annot) -- ^ Optional default value.
, param_annot :: annot
}
-- | Excess positional parameter (single asterisk before its name in the concrete syntax).
| VarArgsPos
{ param_name :: Ident annot -- ^ Parameter name.
, param_py_annotation :: Maybe (Expr annot) -- ^ Optional annotation.
, param_annot :: annot
}
-- | Excess keyword parameter (double asterisk before its name in the concrete syntax).
| VarArgsKeyword
{ param_name :: Ident annot -- ^ Parameter name.
, param_py_annotation :: Maybe (Expr annot) -- ^ Optional annotation.
, param_annot :: annot
}
-- | Marker for the end of positional parameters (not a parameter itself).
| EndPositional { param_annot :: annot }
-- | Tuple unpack. /Version 2 only/.
| UnPackTuple
{ param_unpack_tuple :: ParamTuple annot -- ^ The tuple to unpack.
, param_default :: Maybe (Expr annot) -- ^ Optional default value.
, param_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ParameterSpan = Parameter SrcSpan
instance Span ParameterSpan where
getSpan = annot
instance Annotated Parameter where
annot = param_annot
-- | Tuple unpack parameter. /Version 2 only/.
data ParamTuple annot
= ParamTupleName { param_tuple_name :: Ident annot, param_tuple_annot :: annot } -- ^ A variable name.
| ParamTupleAnnotatedName { param_tuple_name :: Ident annot, param_tuple_py_annotation :: Maybe (Expr annot), param_tuple_annot :: annot } -- ^ A variable name.
| ParamTuple { param_tuple :: [ParamTuple annot], param_tuple_annot :: annot } -- ^ A (possibly nested) tuple parameter.
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ParamTupleSpan = ParamTuple SrcSpan
instance Span ParamTupleSpan where
getSpan = annot
instance Annotated ParamTuple where
annot = param_tuple_annot
-- | Arguments to function calls, class declarations and decorators.
data Argument annot
-- | Ordinary argument expression.
= ArgExpr { arg_expr :: Expr annot, arg_annot :: annot }
-- | Excess positional argument.
| ArgVarArgsPos { arg_expr :: Expr annot, arg_annot :: annot }
-- | Excess keyword argument.
| ArgVarArgsKeyword { arg_expr :: Expr annot, arg_annot :: annot }
-- | Keyword argument.
| ArgKeyword
{ arg_keyword :: Ident annot -- ^ Keyword name.
, arg_expr :: Expr annot -- ^ Argument expression.
, arg_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ArgumentSpan = Argument SrcSpan
instance Span ArgumentSpan where
getSpan = annot
instance Annotated Argument where
annot = arg_annot
-- | Exception handler.
data Handler annot
= Handler
{ handler_clause :: ExceptClause annot
, handler_suite :: Suite annot
, handler_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type HandlerSpan = Handler SrcSpan
instance Span HandlerSpan where
getSpan = annot
instance Annotated Handler where
annot = handler_annot
-- | Exception clause.
data ExceptClause annot
= ExceptClause
-- NB: difference with version 3 (has NAME as target, but looks like bug in grammar)
{ except_clause :: Maybe (Expr annot, Maybe (Expr annot))
, except_clause_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ExceptClauseSpan = ExceptClause SrcSpan
instance Span ExceptClauseSpan where
getSpan = annot
instance Annotated ExceptClause where
annot = except_clause_annot
-- | Comprehension. In version 3.x this can be used for lists, sets, dictionaries and generators.
-- data Comprehension e annot
data Comprehension annot
= Comprehension
{ comprehension_expr :: ComprehensionExpr annot
, comprehension_for :: CompFor annot
, comprehension_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ComprehensionSpan = Comprehension SrcSpan
instance Span ComprehensionSpan where
getSpan = annot
instance Annotated Comprehension where
annot = comprehension_annot
data ComprehensionExpr annot
= ComprehensionExpr (Expr annot)
| ComprehensionDict (DictMappingPair annot)
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ComprehensionExprSpan = ComprehensionExpr SrcSpan
instance Span ComprehensionExprSpan where
getSpan (ComprehensionExpr e) = getSpan e
getSpan (ComprehensionDict d) = getSpan d
-- | Comprehension \'for\' component.
data CompFor annot =
CompFor
{ comp_for_exprs :: [Expr annot]
, comp_in_expr :: Expr annot
, comp_for_iter :: Maybe (CompIter annot)
, comp_for_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type CompForSpan = CompFor SrcSpan
instance Span CompForSpan where
getSpan = annot
instance Annotated CompFor where
annot = comp_for_annot
-- | Comprehension guard.
data CompIf annot =
CompIf
{ comp_if :: Expr annot
, comp_if_iter :: Maybe (CompIter annot)
, comp_if_annot :: annot
}
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type CompIfSpan = CompIf SrcSpan
instance Span CompIfSpan where
getSpan = annot
instance Annotated CompIf where
annot = comp_if_annot
-- | Comprehension iterator (either a \'for\' or an \'if\').
data CompIter annot
= IterFor { comp_iter_for :: CompFor annot, comp_iter_annot :: annot }
| IterIf { comp_iter_if :: CompIf annot, comp_iter_annot :: annot }
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type CompIterSpan = CompIter SrcSpan
instance Span CompIterSpan where
getSpan = annot
instance Annotated CompIter where
annot = comp_iter_annot
-- | Expressions.
--
-- * Version 2.6 <http://www.python.org/doc/2.6/reference/expressions.html>.
--
-- * Version 3.1 <http://www.python.org/doc/3.1/reference/expressions.html>.
--
data Expr annot
-- | Variable.
= Var { var_ident :: Ident annot, expr_annot :: annot }
-- | Literal integer.
| Int { int_value :: Integer, expr_literal :: String, expr_annot :: annot }
-- | Long literal integer. /Version 2 only/.
| LongInt { int_value :: Integer, expr_literal :: String, expr_annot :: annot }
-- | Literal floating point number.
| Float { float_value :: Double, expr_literal :: String, expr_annot :: annot }
-- | Literal imaginary number.
| Imaginary { imaginary_value :: Double, expr_literal :: String, expr_annot :: annot }
-- | Literal boolean.
| Bool { bool_value :: Bool, expr_annot :: annot }
-- | Literal \'None\' value.
| None { expr_annot :: annot }
-- | Ellipsis \'...\'.
| Ellipsis { expr_annot :: annot }
-- | Literal byte string.
| ByteStrings { byte_string_strings :: [String], expr_annot :: annot }
-- | Literal strings (to be concatentated together).
| Strings { strings_strings :: [String], expr_annot :: annot }
-- | Unicode literal strings (to be concatentated together). Version 2 only.
| UnicodeStrings { unicodestrings_strings :: [String], expr_annot :: annot }
-- | Function call.
| Call
{ call_fun :: Expr annot -- ^ Expression yielding a callable object (such as a function).
, call_args :: [Argument annot] -- ^ Call arguments.
, expr_annot :: annot
}
-- | Subscription, for example \'x [y]\'.
| Subscript { subscriptee :: Expr annot, subscript_expr :: Expr annot, expr_annot :: annot }
-- | Slicing, for example \'w [x:y:z]\'.
| SlicedExpr { slicee :: Expr annot, slices :: [Slice annot], expr_annot :: annot }
-- | Conditional expresison.
| CondExpr
{ ce_true_branch :: Expr annot -- ^ Expression to evaluate if condition is True.
, ce_condition :: Expr annot -- ^ Boolean condition.
, ce_false_branch :: Expr annot -- ^ Expression to evaluate if condition is False.
, expr_annot :: annot
}
-- | Binary operator application.
| BinaryOp { operator :: Op annot, left_op_arg :: Expr annot, right_op_arg :: Expr annot, expr_annot :: annot }
-- | Unary operator application.
| UnaryOp { operator :: Op annot, op_arg :: Expr annot, expr_annot :: annot }
-- Dot operator (attribute selection)
| Dot { dot_expr :: Expr annot, dot_attribute :: Ident annot, expr_annot :: annot }
-- | Anonymous function definition (lambda).
| Lambda { lambda_args :: [Parameter annot], lambda_body :: Expr annot, expr_annot :: annot }
-- | Tuple. Can be empty.
| Tuple { tuple_exprs :: [Expr annot], expr_annot :: annot }
-- | Generator yield.
| Yield
-- { yield_expr :: Maybe (Expr annot) -- ^ Optional expression to yield.
{ yield_arg :: Maybe (YieldArg annot) -- ^ Optional Yield argument.
, expr_annot :: annot
}
-- | Generator.
| Generator { gen_comprehension :: Comprehension annot, expr_annot :: annot }
-- | List comprehension.
| ListComp { list_comprehension :: Comprehension annot, expr_annot :: annot }
-- | List.
| List { list_exprs :: [Expr annot], expr_annot :: annot }
-- | Dictionary.
| Dictionary { dict_mappings :: [DictMappingPair annot], expr_annot :: annot }
-- | Dictionary comprehension. /Version 3 only/.
| DictComp { dict_comprehension :: Comprehension annot, expr_annot :: annot }
-- | Set.
| Set { set_exprs :: [Expr annot], expr_annot :: annot }
-- | Set comprehension. /Version 3 only/.
| SetComp { set_comprehension :: Comprehension annot, expr_annot :: annot }
-- | Starred expression. /Version 3 only/.
| Starred { starred_expr :: Expr annot, expr_annot :: annot }
-- | Parenthesised expression.
| Paren { paren_expr :: Expr annot, expr_annot :: annot }
-- | String conversion (backquoted expression). Version 2 only.
| StringConversion { backquoted_expr :: Expr annot, expr_anot :: annot }
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type ExprSpan = Expr SrcSpan
instance Span ExprSpan where
getSpan = annot
data YieldArg annot
= YieldFrom (Expr annot) annot -- ^ Yield from a generator (Version 3 only)
| YieldExpr (Expr annot) -- ^ Yield value of an expression
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type YieldArgSpan = YieldArg SrcSpan
instance Span YieldArgSpan where
getSpan (YieldFrom _e span) = span
getSpan (YieldExpr e) = getSpan e
instance Annotated Expr where
annot = expr_annot
data DictMappingPair annot =
DictMappingPair (Expr annot) (Expr annot)
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type DictMappingPairSpan = DictMappingPair SrcSpan
instance Span DictMappingPairSpan where
getSpan (DictMappingPair e1 e2) = spanning e1 e2
-- | Slice compenent.
data Slice annot
= SliceProper
{ slice_lower :: Maybe (Expr annot)
, slice_upper :: Maybe (Expr annot)
, slice_stride :: Maybe (Maybe (Expr annot))
, slice_annot :: annot
}
| SliceExpr
{ slice_expr :: Expr annot
, slice_annot :: annot
}
| SliceEllipsis { slice_annot :: annot }
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type SliceSpan = Slice SrcSpan
instance Span SliceSpan where
getSpan = annot
instance Annotated Slice where
annot = slice_annot
-- | Operators.
data Op annot
= And { op_annot :: annot } -- ^ \'and\'
| Or { op_annot :: annot } -- ^ \'or\'
| Not { op_annot :: annot } -- ^ \'not\'
| Exponent { op_annot :: annot } -- ^ \'**\'
| LessThan { op_annot :: annot } -- ^ \'<\'
| GreaterThan { op_annot :: annot } -- ^ \'>\'
| Equality { op_annot :: annot } -- ^ \'==\'
| GreaterThanEquals { op_annot :: annot } -- ^ \'>=\'
| LessThanEquals { op_annot :: annot } -- ^ \'<=\'
| NotEquals { op_annot :: annot } -- ^ \'!=\'
| NotEqualsV2 { op_annot :: annot } -- ^ \'<>\'. Version 2 only.
| In { op_annot :: annot } -- ^ \'in\'
| Is { op_annot :: annot } -- ^ \'is\'
| IsNot { op_annot :: annot } -- ^ \'is not\'
| NotIn { op_annot :: annot } -- ^ \'not in\'
| BinaryOr { op_annot :: annot } -- ^ \'|\'
| Xor { op_annot :: annot } -- ^ \'^\'
| BinaryAnd { op_annot :: annot } -- ^ \'&\'
| ShiftLeft { op_annot :: annot } -- ^ \'<<\'
| ShiftRight { op_annot :: annot } -- ^ \'>>\'
| Multiply { op_annot :: annot } -- ^ \'*\'
| Plus { op_annot :: annot } -- ^ \'+\'
| Minus { op_annot :: annot } -- ^ \'-\'
| Divide { op_annot :: annot } -- ^ \'\/\'
| FloorDivide { op_annot :: annot } -- ^ \'\/\/\'
| Invert { op_annot :: annot } -- ^ \'~\' (bitwise inversion of its integer argument)
| Modulo { op_annot :: annot } -- ^ \'%\'
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type OpSpan = Op SrcSpan
instance Span OpSpan where
getSpan = annot
instance Annotated Op where
annot = op_annot
-- | Augmented assignment operators.
data AssignOp annot
= PlusAssign { assignOp_annot :: annot } -- ^ \'+=\'
| MinusAssign { assignOp_annot :: annot } -- ^ \'-=\'
| MultAssign { assignOp_annot :: annot } -- ^ \'*=\'
| DivAssign { assignOp_annot :: annot } -- ^ \'\/=\'
| ModAssign { assignOp_annot :: annot } -- ^ \'%=\'
| PowAssign { assignOp_annot :: annot } -- ^ \'*=\'
| BinAndAssign { assignOp_annot :: annot } -- ^ \'&=\'
| BinOrAssign { assignOp_annot :: annot } -- ^ \'|=\'
| BinXorAssign { assignOp_annot :: annot } -- ^ \'^=\'
| LeftShiftAssign { assignOp_annot :: annot } -- ^ \'<<=\'
| RightShiftAssign { assignOp_annot :: annot } -- ^ \'>>=\'
| FloorDivAssign { assignOp_annot :: annot } -- ^ \'\/\/=\'
deriving (Eq,Ord,Show,Typeable,Data,Functor)
type AssignOpSpan = AssignOp SrcSpan
instance Span AssignOpSpan where
getSpan = annot
instance Annotated AssignOp where
annot = assignOp_annot
| TOSPIO/pyn | lib/Language/Python/Common/AST.hs | mit | 27,628 | 0 | 15 | 5,916 | 5,210 | 3,149 | 2,061 | 453 | 0 |
module ProjectEuler.Problem104
( problem
) where
import qualified Data.Set as S
import ProjectEuler.Types
problem :: Problem
problem = pureProblem 104 Solved result
fibs :: [Integer]
fibs = 0:1:zipWith (+) (tail fibs) fibs
-- internal only. as we are only interest in some particular parts
-- of the "show" result of some Integer.
isPandigital' :: String -> Bool
isPandigital' xs = S.size charNum == 9
where
charNum = S.delete '0' . S.fromList $ xs
fstPandigital :: Integer -> Bool
fstPandigital n = isPandigital' (take 9 (show n))
lstPandigital :: Integer -> Bool
lstPandigital n = isPandigital' (show $ n `mod` 1000000000)
result :: Integer
result =
fst $ head $ filter (\(_, x) -> lstPandigital x && fstPandigital x)
$ zip [0..] fibs
| Javran/Project-Euler | src/ProjectEuler/Problem104.hs | mit | 763 | 0 | 11 | 148 | 251 | 136 | 115 | 19 | 1 |
module RecursiveContents (getRecursiveContents) where
import Control.Monad (forM)
import System.Directory (doesDirectoryExist, getDirectoryContents)
import System.FilePath ((</>))
getRecursiveContents :: FilePath -> IO [FilePath]
getRecursiveContents topdir = do
names <- getDirectoryContents topdir
let properNames = filter (`notElem` [".", ".."]) names
paths <- forM properNames $ \name -> do
let path = topdir </> name
isDirectory <- doesDirectoryExist path
if isDirectory
then getRecursiveContents path
else return [path]
return (concat paths)
| mithril1992/HaskellPractice | RecursiveContents.hs | mit | 614 | 0 | 15 | 131 | 177 | 93 | 84 | 15 | 2 |
{-# LANGUAGE QuasiQuotes #-}
module Y2018.M02.D06.Solution where
{--
What were the articles uploaded or updated today? How do we find that out?
We ... can determine this, given our current upload process: we look in the
logs. This, however, is not desireable: we simply want to answer a question
with no manual (parsing) intervention. The logs are good for telling us what
happened, but they aren't good for simple, automatic answers to questions, like
the one above.
So, let's solve this specific problem by introducing a packet pivot table. For
each packet of articles insert or updated, we join the article id with the
packet id. The packet ...
BUT WAIT! The packet does not have a timestamp of when it was insert.
Thus the scope of today's Haskell exercise grows before our very eyes!
How often that Haskell exercises imitate Life.*
*Life, n.: scope creep, also known as 'project (mis)management.'
--}
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
-- below imports available via 1HaskellADay git repository
import Data.Time.Stamped (Stamped, stampIt)
import Store.SQL.Util.Indexed (IxValue, Index)
import Store.SQL.Util.Pivots (Pivot, joinValue)
import Y2017.M12.D20.Solution (Packet)
import Y2017.M12.D27.Solution (DatedArticle)
import Y2018.M01.D02.Solution (storeArticles)
import Y2018.M01.D16.Solution (readPacket)
import Y2018.M01.D19.Solution (etl)
-- note-to-self: ALTER TABLE packet ...
insertStampedPacketStmt :: Query
insertStampedPacketStmt =
[sql|INSERT INTO packet (time,view,prev,next,total,count)
VALUES (?,?,?,?,?,?)
returning id|]
-- now for the insert packet statement, is the exact time all that crucial?
-- I don't think so, so we can simply take unstamped packets as the argument
-- and enhance those values with timestamps on insert:
insertPackets :: Connection -> [Packet] -> IO [Index]
insertPackets conn packs =
mapM stampIt packs >>= returning conn insertStampedPacketStmt
-- we also need the indices to create the pivot between packets and articles.
-- Now, when you store the articles, add the pivot information to the database
-- see storeArticles
-- note-to-self: CREATE TABLE article_packet ...
insertPacketArtPvtStmt :: Query
insertPacketArtPvtStmt =
[sql|INSERT INTO article_packet (article_id,packet_id) VALUES (?,?)|]
pivotArtPacket :: [IxValue (DatedArticle a)] -> Index -> [Pivot]
pivotArtPacket arts = zipWith joinValue arts . repeat
-- hint: is there a function that does this in module Pivots?
-- with the pivotArtPackage function, you can use the pivot insert function
-- from Store.SQL.Util.Pivots
-- read a packet from the REST endpoint and insert the articles, as you have
-- done before. How many entries were made into the article_packet pivot table?
{--
$ select count(1) from article_packet;
132
woot.
--}
| geophf/1HaskellADay | exercises/HAD/Y2018/M02/D06/Solution.hs | mit | 2,924 | 0 | 9 | 456 | 275 | 177 | 98 | 25 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module AtnBot.UserTracker
( TrackerHandle
, newTrackerHandle
, trackUser
, getTrackedUsers
, lookupIMChannel
, channelIsIM
) where
import Control.Lens
import Control.Monad
import qualified Data.HashMap.Strict as HMS
import Data.IORef
import Data.Monoid
import qualified Data.Text.IO as T
import Web.Slack hiding (lines)
data TrackerHandle = TrackerHandle
{ trackedUsers :: IORef (HMS.HashMap UserId ChannelId)
, blacklist :: [UserId]
}
newTrackerHandle :: SlackSession -> [UserId] -> IO TrackerHandle
newTrackerHandle session blacklist = do
trackedUsers <- newIORef HMS.empty
let h = TrackerHandle{..}
traverseOf_ (slackIms . traverse) (trackUser h) session
return h
trackUser :: TrackerHandle -> IM -> IO ()
trackUser TrackerHandle{..} im = do
let uid = im ^. imUser
let cid = im ^. imId . to imToChannel
unless (elem uid blacklist) $ do
T.putStrLn $ "Tracking user " <> _getId uid
modifyIORef trackedUsers $ HMS.insert uid cid
getTrackedUsers :: TrackerHandle -> IO [UserId]
getTrackedUsers TrackerHandle{..} = HMS.keys <$> readIORef trackedUsers
lookupIMChannel :: TrackerHandle -> UserId -> IO (Maybe ChannelId)
lookupIMChannel TrackerHandle{..} uid = HMS.lookup uid <$> readIORef trackedUsers
channelIsIM :: TrackerHandle -> ChannelId -> IO Bool
channelIsIM TrackerHandle{..} cid = elem cid <$> readIORef trackedUsers
| asayers/attendancebot | src/AtnBot/UserTracker.hs | mit | 1,544 | 0 | 12 | 288 | 448 | 232 | 216 | -1 | -1 |
module Runtime(
typeCast,
defaultVal,
toBool,
boolType,
true,
false,
isTrue,
isFalse,
voidType,
void,
intType,
makePtr
)where
import Grammar
import Parse
import Type
import Value
import Control.Applicative
-- Input: Target type, a RIGHT value
typeCast :: Type -> Value -> Value
typeCast t1 v@(RVal t2 x)
| t1 == t1 = v
typeCast (Type "bool" 0) (RVal _ x) = if x == "0" then false else true
typeCast (Type "int" 0) (RVal Polymorphism x) = RVal intType $ show $ (read x :: Integer)
typeCast Polymorphism (RVal _ x) = RVal Polymorphism x
typeCast x v = error $ "Type Cast error: " ++ show x ++ " | " ++ show v
defaultVal :: Type -> Value
defaultVal (Type "bool" 0) = false
defaultVal (Type "int" 0) = RVal intType "0"
toBool = typeCast boolType
boolType = Type "bool" 0
true = RVal boolType "true"
false = RVal boolType "false"
isTrue = (== true) . toBool
isFalse = not . isTrue
voidType = Type "void" 0
void = RVal voidType ""
intType = Type "int" 0
makeInt = RVal intType . (show :: Integer -> String)
makePtr = Type
| sqd/haskell-C89-interpreter | Runtime.hs | mit | 1,027 | 0 | 8 | 200 | 406 | 216 | 190 | 39 | 2 |
{-
HAAP: Haskell Automated Assessment Platform
This module runs the @GHC@ plugin with the @-Wall@ flag to give all kinds of warnings on code quality.
-}
{-# LANGUAGE EmptyDataDecls, TypeFamilies, FlexibleInstances, FlexibleContexts, UndecidableInstances, MultiParamTypeClasses, OverloadedStrings #-}
module HAAP.Code.GHC
( GHCWallArgs(..)
, runGHCWall
) where
import HAAP.Compiler.GHC
import HAAP.Core
import HAAP.IO
import HAAP.Web.Hakyll
import HAAP.Utils
import HAAP.Plugin
import HAAP.Shelly
import Data.Default
import Data.List
import Data.List.Split
import Data.Maybe
import qualified Data.Text as Text
import Data.Proxy
import Control.Monad.Reader as Reader
import System.FilePath
data GHCWallArgs = GHCWallArgs
{ ghcWallSandbox :: Sandbox
, ghcWallArgs :: [String]
, ghcWallPath :: FilePath -- path relative to the project where to execute the ghc plugin
, ghcWallFiles :: [FilePath] -- relative to the path where homplexity is executed
, ghcWallHtmlPath :: FilePath -- relative to the project path
}
runGHCWall :: (MonadIO m,HasPlugin Hakyll t m) => GHCWallArgs -> Haap t m FilePath
runGHCWall wall = do
hp <- getHakyllP
let homerrorpath = ghcWallHtmlPath wall
orErrorHakyllPage homerrorpath homerrorpath $ do
tmp <- getProjectTmpPath
let ioArgs = def { ioSandbox = mapSandboxCfg (dirToRoot (ghcWallPath wall) </>) (ghcWallSandbox wall) }
let extras = ghcWallArgs wall
let files = ghcWallFiles wall
let ghcargs = def { ghcArgs = "-Wall":extras, ghcIO = ioArgs }
res <- orErrorWritePage (tmp </> ghcWallHtmlPath wall) mempty $ runBaseSh $ do
shCd $ ghcWallPath wall
shGhcWith ghcargs files
let messages = parseMessages $ splitWhen isEmptyLine $ lines (Text.unpack $ resStdout res `Text.append` resStderr res)
hakyllFocus ["templates"] $ hakyllRules $ do
-- copy the generated documentation
create [fromFilePath $ ghcWallHtmlPath wall] $ do
route $ idRoute `composeRoutes` funRoute (hakyllRoute hp)
compile $ do
let msgCtx = field "class" (return . fst3 . itemBody)
`mappend` field "suggestion" (return . snd3 . itemBody)
`mappend` field "message" (return . thr3 . itemBody)
let homCtx = constField "projectpath" (fileToRoot $ hakyllRoute hp $ ghcWallHtmlPath wall)
`mappend` listField "messages" msgCtx (mapM makeItem messages)
makeItem "" >>= loadAndApplyHTMLTemplate "templates/ghcwall.html" homCtx >>= hakyllCompile hp
return (hakyllRoute hp $ ghcWallHtmlPath wall)
parseMessages = catMaybes . map parseMessage
parseMessage [] = Nothing
parseMessage (x:xs)
| isInfixOf "warning:" x = Just ("hspec-warning",x,unlines xs)
| isInfixOf "error:" x = Just ("hspec-failure",x,unlines xs)
| otherwise = Nothing --Just ("hspec-debug",x,unlines xs)
| hpacheco/HAAP | src/HAAP/Code/GHC.hs | mit | 3,039 | 0 | 27 | 749 | 751 | 388 | 363 | 56 | 1 |
{-# LANGUAGE GADTs, KindSignatures #-}
data List :: * -> * where
Nil :: List a
Cons :: a -> List a -> List a
concat' = \as bs -> case as of
[] -> bs
(:) a as -> (:) a (concat' as bs)
showPosInt = \n ->
let
showDigit = \d -> case d of
0 -> '0'
1 -> '1'
2 -> '2'
3 -> '3'
4 -> '4'
5 -> '5'
6 -> '6'
7 -> '7'
8 -> '8'
9 -> '9'
go = \n -> case n of
0 -> []
_ -> (:) (showDigit (mod n 10)) (go (div n 10))
reverse = \ls ->
let
go = \ls rs -> case ls of
[] -> rs
(:) a as -> go as ((:) a rs)
in
go ls []
in
reverse (go n)
showPosIntList = \ls ->
let
go = \as -> case as of
Nil -> (:) ']' []
Cons a as ->
concat' (showPosInt a) ( case as of
Nil -> (:) ']' []
Cons _ _ -> (:) ',' (go as)
)
in
(:) '[' (go ls)
zipWithList = \f ->
let
go = \as bs -> case as of
Nil -> Nil
Cons a as -> case bs of
Nil -> Nil
Cons b bs -> Cons (f a b) (go as bs)
in
go
tailList = \ls -> case ls of
Nil -> Nil
Cons a as -> as
headList = \ls -> case ls of
Nil -> 0
Cons a as -> a
takeList = \n ls -> case n of
0 -> Nil
_ -> case ls of
Nil -> Nil
Cons a as -> Cons a (takeList ((-) n 1) as)
fibs = Cons 1 (Cons 1 (zipWithList (+) fibs (tailList fibs)))
main = putStrLn (showPosIntList (takeList 30 fibs))
| CindyLinz/Haskell.js | trans/sample/fibs.hs | mit | 1,453 | 0 | 21 | 585 | 738 | 380 | 358 | 60 | 12 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
module SoOSiM.Components.Thread.Behaviour where
import Control.Concurrent.STM
import Control.Concurrent.STM.TQueue
import Control.Lens
import Control.Monad
import Data.Maybe
import SoOSiM
import SoOSiM.Components.Common
import SoOSiM.Components.MemoryManager
import SoOSiM.Components.Scheduler.Interface
import SoOSiM.Components.SoOSApplicationGraph (AppCommand(..))
import SoOSiM.Components.Thread.Interface
import SoOSiM.Components.Thread.Types
data TH_State
= TH_State
{ _actual_id :: ThreadId
, _sched_id :: ComponentId
, _thread_state :: Maybe (TVar Thread)
, _appName :: String
}
makeLenses ''TH_State
data TH_Cmd
= TH_Start
| TH_Stop
deriving (Typeable,Show)
data TH_Msg
= TH_Void
deriving Typeable
threadIState :: TH_State
threadIState = TH_State (-1) (-1) Nothing ""
threadBehaviour ::
TH_State
-> Input TH_Cmd
-> Sim TH_State
threadBehaviour s@(TH_State _ _ Nothing _) _ = yield s
threadBehaviour s (Message _ TH_Start schedId) = do
let ts = maybe (error "fromJust: thread_state") id $ s ^. thread_state
t <- runSTM $ readTVar ts
case (t ^. execution_state) of
Executing -> do
-- Read timestamps from inputs ports
timestamps <- runSTM $ mapM readTQueue (t ^. in_ports)
-- Execute computation
case (t ^. program) of
[] -> compute ((t ^. exec_cycles) - 1) ()
p -> mapM_ interp p
traceMsgTag "Finished" ("ThreadEnd " ++ (s ^. appName) ++ ".T" ++ show (t ^. threadId) ++ " Proc" ++ show (t ^. res_id))
-- Write to output ports
currentTime <- getTime
let newTime = minimum $ map fst timestamps
runSTM $ mapM_ (\(_,q) -> writeTQueue q (newTime,currentTime)) (t^.out_ports)
-- Signal scheduler that thread has completed
runSTM $ modifyTVar' ts (execution_state .~ Waiting)
threadCompleted (returnAddress schedId) (t ^. threadId)
yield s
-- Waiting to start
Waiting -> do
traceMsg "Waiting"
yield s
-- Finished one execution cycle
Blocked -> do
traceMsg "Stopping"
yield s
Killed -> do
traceMsg "Killed"
yield s
threadBehaviour s (Message _ TH_Stop _) = stop
threadBehaviour s _ = yield s
interp :: AppCommand -> Sim ()
interp (ReadCmd (addr,sz)) = traceMsg ("Reading: " ++ show (addr,sz)) >> readMem addr sz
interp (DelayCmd i) = traceMsg ("Computing for: " ++ show i) >> compute (i-1) ()
interp (WriteCmd (addr,sz)) = traceMsg ("Writing: " ++ show (addr,sz)) >> writeMem addr sz
| christiaanb/SoOSiM-components | src/SoOSiM/Components/Thread/Behaviour.hs | mit | 2,600 | 0 | 20 | 586 | 826 | 430 | 396 | 67 | 5 |
{-|
Module : Control.Arrow.Needle
Description : ASCII-fied arrow notation.
Copyright : (c) 2014 Josh Kirklin
License : MIT
Maintainer : jjvk2@cam.ac.uk
Needle is a domain specific language for ASCII-fied arrow notation. This module enables the use of needle within Haskell by making use of Template Haskell.
In needle, data travels along rails. There are three types of rail, and data travels in different directions on each:
[@=@] left to right
[@\\@] down
[@/@] up
Data enters a rail with @}@, and exits with @>@.
When rails are joined, their contents are concatenated. When rails are split, their contents are duplicated.
An external arrow can be embedded in a rail by enclosing it between a @{@ and a @}@.
Inputs and outputs of rails can be asigned labels with a @:@.
Rails can cross one another, if one of the rails has gone \'underground\' by entering a \'tunnel\'. A tunnel entrance is specified by a @)@, and a tunnel exit is specified by a @(@.
Most questions should be answered by a short example:
> import Control.Arrow.Needle
>
> nTest :: (Int, Int, Int) -> (Int, (Int, Int, Int))
> nTest = [nd|
> aLabel:==={div 2}===\
> }====\ \
> {uncurry (+)}==\=================) \ (==>
> }====/ \ \
> \ \=={nTriple}=>
> \
> }=={uncurry (-)}====:aLabel
> |]
>
> nTriple = [nd|
> }==\==>
> \==>
> \==>
> |]
>>> nTest (3,2,1)
(5,(-1,-1,-1))
-}
module Control.Arrow.Needle (
nd
, ndFile
-- * Reexported for your convenience
, module Control.Arrow
) where
import Control.Arrow
import Control.Arrow.Needle.TH (nd, ndFile)
| ScrambledEggsOnToast/needle | Control/Arrow/Needle.hs | mit | 1,796 | 0 | 5 | 537 | 43 | 29 | 14 | 6 | 0 |
-- generic functions can be defined
:t fast
:t head
| luisgepeto/HaskellLearning | 03 Types and Typeclasses/02_type_variables.hs | mit | 52 | 4 | 5 | 10 | 18 | 7 | 11 | -1 | -1 |
module Parser.Parser where
import Control.Monad (liftM)
import Numeric (readFloat)
import Text.ParserCombinators.Parsec hiding (spaces)
import Parser.Types.LispVal
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber
<|> parseQuasiQuoted
<|> parseQuoted
<|> parseUnquoted
<|> parseLists
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
return $ case first:rest of
"#t" -> Bool True
"#f" -> Bool False
atom -> Atom atom
parseCharacter :: Parser LispVal
parseCharacter = do
c <- letter
return $ Character c
parseDottedList :: Parser LispVal
parseDottedList = do
head' <- endBy parseExpr spaces
tail' <- char '.' >> spaces >> parseExpr
return $ DottedList head' tail'
parseFloat :: Parser LispVal
parseFloat = do
integral <- many1 digit
_ <- char '.'
fractional <- many1 digit
return $ (Float . fst . head . readFloat) $ integral ++ "." ++ fractional
parseList :: Parser LispVal
parseList = liftM List $ sepBy parseExpr spaces
parseLists :: Parser LispVal
parseLists = do
_ <- char '('
x <- try parseList <|> parseDottedList
_ <- char ')'
return x
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) (many1 digit)
parseQuasiQuoted :: Parser LispVal
parseQuasiQuoted = do
_ <- char '`'
x <- parseExpr
return $ List [Atom "quasiquote", x]
parseQuoted :: Parser LispVal
parseQuoted = do
_ <- char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseString :: Parser LispVal
parseString = do
_ <- char '"'
x <- many (letter
<|> space
<|> quotedString
<|> carriageReturn
<|> lineFeed
<|> tabStop
<|> backslash)
_ <- char '"'
return $ String x
parseUnquoted :: Parser LispVal
parseUnquoted = do
_ <- char ','
x <- parseExpr
return $ List [Atom "unquote", x]
readExpr :: String -> String
readExpr input = case parse parseExpr "lisp" input of
Left err -> "No match: " ++ show err
Right _val -> "Found value"
carriageReturn :: Parser Char
carriageReturn = char '\\' >> char 'r'
lineFeed :: Parser Char
lineFeed = char '\\' >> char 'n'
tabStop :: Parser Char
tabStop = char '\\' >> char 't'
backslash :: Parser Char
backslash = char '\\' >> char '\\'
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_~"
spaces :: Parser ()
spaces = skipMany1 space
quotedString :: Parser Char
quotedString = char '\\' >> char '"'
| slogsdon/haskell-exercises | write-a-scheme/parsing/src/Parser/Parser.hs | mit | 2,561 | 0 | 15 | 627 | 866 | 416 | 450 | 91 | 3 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
-- | Module: Capnp.New.Accessors
-- Description: Functions for accessing parts of messaages.
module Capnp.New.Accessors
( readField
, getField
, setField
, newField
, hasField
, encodeField
, parseField
, setVariant
, initVariant
, encodeVariant
, structWhich
, unionWhich
, structUnion
, unionStruct
) where
import qualified Capnp.Fields as F
import Capnp.Message (Mutability(..))
import qualified Capnp.New.Classes as C
import qualified Capnp.Repr as R
import Capnp.TraversalLimit (evalLimitT)
import qualified Capnp.Untyped as U
import Data.Bits
import Data.Maybe (fromJust, isJust)
import Data.Word
import GHC.Prim (coerce)
{-# INLINE readField #-}
-- | Read the value of a field of a struct.
readField
:: forall k a b mut m.
( R.IsStruct a
, U.ReadCtx m mut
)
=> F.Field k a b
-> R.Raw a mut
-> m (R.Raw b mut)
readField (F.Field field) (R.Raw struct) =
case field of
F.DataField F.DataFieldLoc{ shift, index, mask, defaultValue } -> do
word <- U.getData (fromIntegral index) struct
pure $ R.Raw $ C.fromWord $ ((word .&. mask) `shiftR` fromIntegral shift) `xor` defaultValue
F.PtrField index ->
U.getPtr (fromIntegral index) struct >>= readPtrField
F.GroupField ->
pure $ R.Raw struct
F.VoidField ->
pure $ R.Raw ()
where
-- This is broken out because the type checker needs some extra help:
readPtrField
:: forall pr.
( R.ReprFor b ~ 'R.Ptr pr
, R.IsPtrRepr pr
) => Maybe (U.Ptr mut) -> m (R.Raw b mut)
readPtrField ptr =
R.Raw <$> R.fromPtr @pr (U.message @U.Struct struct) ptr
-- | Return whether the specified field is present. Only applicable for pointer
-- fields.
hasField ::
( U.ReadCtx m mut
, R.IsStruct a
, R.IsPtr b
) => F.Field 'F.Slot a b -> R.Raw a mut -> m Bool
hasField (F.Field (F.PtrField index)) (R.Raw struct) =
isJust <$> U.getPtr (fromIntegral index) struct
{-# INLINE getField #-}
-- | Like 'readField', but:
--
-- * Doesn't need the monadic context; can be used in pure code.
-- * Only works for immutable values.
-- * Only works for fields in the struct's data section.
getField
:: ( R.IsStruct a
, R.ReprFor b ~ 'R.Data sz
, C.Parse b bp
)
=> F.Field 'F.Slot a b
-> R.Raw a 'Const
-> bp
getField field struct =
fromJust $ evalLimitT maxBound $
readField field struct >>= C.parse
{-# INLINE setField #-}
-- | Set a struct field to a value. Not usable for group fields.
setField ::
forall a b m s.
( R.IsStruct a
, U.RWCtx m s
) => F.Field 'F.Slot a b -> R.Raw b ('Mut s) -> R.Raw a ('Mut s) -> m ()
setField (F.Field field) (R.Raw value) (R.Raw struct) =
case field of
F.DataField fieldLoc ->
setDataField fieldLoc
F.PtrField index ->
setPtrField index value struct
F.VoidField ->
pure ()
where
-- This was originally broken out because the type checker needs some extra
-- help, but it's probably more readable this way anyway.
setPtrField
:: forall pr.
( R.ReprFor b ~ 'R.Ptr pr
, R.IsPtrRepr pr
) => Word16 -> U.Unwrapped (R.UntypedPtr pr ('Mut s)) -> U.Struct ('Mut s) -> m ()
setPtrField index value struct =
U.setPtr (R.toPtr @pr value) (fromIntegral index) struct
setDataField
:: forall sz.
( R.ReprFor b ~ 'R.Data sz
, C.IsWord (R.UntypedData sz)
) => F.DataFieldLoc sz -> m ()
setDataField F.DataFieldLoc{ shift, index, mask, defaultValue } = do
oldWord <- U.getData (fromIntegral index) struct
let valueWord = C.toWord value `xor` defaultValue
newWord = (oldWord .&. complement mask)
.|. (valueWord `shiftL` fromIntegral shift)
U.setData newWord (fromIntegral index) struct
-- | Allocate space for the value of a field, and return it.
newField ::
forall a b m s.
( R.IsStruct a
, C.Allocate b
, U.RWCtx m s
) => F.Field 'F.Slot a b -> C.AllocHint b -> R.Raw a ('Mut s) -> m (R.Raw b ('Mut s))
newField field hint parent = do
value <- C.new @b hint (U.message @(R.Raw a) parent)
setField field value parent
pure value
-- | Marshal a parsed value into a struct's field.
encodeField ::
forall a b m s bp.
( R.IsStruct a
, C.Parse b bp
, U.RWCtx m s
) => F.Field 'F.Slot a b -> bp -> R.Raw a ('Mut s) -> m ()
encodeField field parsed struct = do
encoded <- C.encode (U.message @(R.Raw a) struct) parsed
setField field encoded struct
-- | parse a struct's field and return its parsed form.
parseField ::
( R.IsStruct a
, C.Parse b bp
, U.ReadCtx m 'Const
) => F.Field k a b -> R.Raw a 'Const -> m bp
parseField field raw =
readField field raw >>= C.parse
-- | Set the struct's anonymous union to the given variant, with the
-- supplied value as its argument. Not applicable for variants whose
-- argument is a group; use 'initVariant' instead.
setVariant
:: forall a b m s.
( F.HasUnion a
, U.RWCtx m s
) => F.Variant 'F.Slot a b -> R.Raw a ('Mut s) -> R.Raw b ('Mut s) -> m ()
setVariant F.Variant{field, tagValue} struct value = do
setField (F.unionField @a) (R.Raw tagValue) struct
setField field value struct
-- | Set the struct's anonymous union to the given variant, marshalling
-- the supplied value into the message to be its argument. Not applicable
-- for variants whose argument is a group; use 'initVariant' instead.
encodeVariant
:: forall a b m s bp.
( F.HasUnion a
, C.Parse b bp
, U.RWCtx m s
) => F.Variant 'F.Slot a b -> bp -> R.Raw a ('Mut s) -> m ()
encodeVariant F.Variant{field, tagValue} value struct = do
setField (F.unionField @a) (R.Raw tagValue) struct
encodeField field value struct
-- | Set the struct's anonymous union to the given variant, returning
-- the variant's argument, which must be a group (for non-group fields,
-- use 'setVariant' or 'encodeVariant'.
initVariant
:: forall a b m s. (F.HasUnion a, U.RWCtx m s)
=> F.Variant 'F.Group a b -> R.Raw a ('Mut s) -> m (R.Raw b ('Mut s))
initVariant F.Variant{field, tagValue} struct = do
setField (F.unionField @a) (R.Raw tagValue) struct
readField field struct
-- | Get the anonymous union for a struct.
structUnion :: F.HasUnion a => R.Raw a mut -> R.Raw (F.Which a) mut
structUnion = coerce
-- | Get the struct enclosing an anonymous union.
unionStruct :: F.HasUnion a => R.Raw (F.Which a) mut -> R.Raw a mut
unionStruct = coerce
-- | Get a non-opaque view on the struct's anonymous union, which
-- can be used to pattern match on.
structWhich :: forall a mut m. (U.ReadCtx m mut, F.HasUnion a) => R.Raw a mut -> m (F.RawWhich a mut)
structWhich struct = do
R.Raw tagValue <- readField (F.unionField @a) struct
F.internalWhich tagValue struct
-- | Get a non-opaque view on the anonymous union, which can be
-- used to pattern match on.
unionWhich :: forall a mut m. (U.ReadCtx m mut, F.HasUnion a) => R.Raw (F.Which a) mut -> m (F.RawWhich a mut)
unionWhich = structWhich . unionStruct
| zenhack/haskell-capnp | lib/Capnp/New/Accessors.hs | mit | 7,708 | 0 | 16 | 2,080 | 2,396 | 1,236 | 1,160 | 169 | 4 |
-- xmobar configuration file
--
-- Mess with this file to change the appearance of the contents of the status
-- bar which are not directly controlled by xmonad. You can see your changes
-- immediately by recompiling xmonad using "super-q".
--
-- There's a lot you can do here. Refer to "man xmobar".
--
-- Author: David Brewer
-- Repository: https://github.com/davidbrewer/xmonad-ubuntu-conf
Config {
-- Set font and default foreground/background colors. Note that the height of
-- xmobar is controlled by the font you use.
font = "xft:Source Code Pro:size=12:antialias=true"
, additionalFonts = ["xft:FontAwesome-10"]
, bgColor = "black"
, fgColor = "grey"
-- Position is top left, taking up 95% of screen. You are likely to have to
-- tweak the width here based on the width of your screen to make it play
-- nicely with stalonetray, which we want to be taking up the remainer of the
-- space on the right side of your screen.
, position = TopW L 95
-- list of commands which gather information about your system for
-- presentation in the bar.
, commands = [ Run Cpu [ "-t" , "<fn=1></fn> <total>"
, "-H" , "50"
, "--high" , "red"] 10
-- Gather and format memory usage information
, Run Memory [ "-t", "<fn=1></fn> <usedratio>"] 10
-- Date formatting
, Run Date "%b %d %H:%M" "date" 10
-- Battery information. This is likely to require some
-- customization based upon your specific hardware. Or, for a
-- desktop you may want to jusqt remove this section entirely.
-- TODO: figure out how to get the icons to show only when the battery is around that status?
-- http://fontawesome.io/icon/tasks/
-- https://r12a.github.io/apps/conversion/
, Run Battery [ "-t", "<fn=1></fn> <left>"
, "--"
, "-p", "orange"
, "-O", "<fn=1></fn>"
, "-i", "<fn=1></fn>"
, "-o", "Bat"
, "-h", "green"
, "-l", "red"] 10
-- To get volume information, we run a custom bash script. This
-- is because the built-in volume support in xmobar is disabled
-- in Debian and derivatives like Ubuntu.
, Run Com "/bin/bash" ["-c", "~/programming/dotfiles/scripts/get-volume"] "myvolume" 10
, Run Com "/bin/bash" ["-c", "~/programming/dotfiles/scripts/get-backlight"] "mybacklight" 10
, Run DynNetwork [ "-t", "<fn=1></fn> <rx> <fn=1></fn> <tx>"
, "-S", "True"] 10
-- 5.6.6 DynNetwork Args RefreshRate
-- This line tells xmobar to read input from stdin. That's how we
-- get the information that xmonad is sending it for display.
, Run StdinReader ]
-- Separator character used to wrape variables in the xmobar template
, sepChar = "%"
-- Alignment separater characer used in the xmobar template. Everything before
-- this will be aligned left, everything after aligned right.
, alignSep = "}{"
-- Overall template for the layout of the xmobar contents. Note that space is
-- significant and can be used to add padding.
, template = "%StdinReader% }{ %dynnetwork% | %cpu% | %memory% | <fn=1></fn> %mybacklight% | %battery% | <fn=1></fn> %myvolume% | <fc=#e6744c>%date%</fc> "
}
| diminishedprime/dotfiles | .xmonad/xmobarrc.hs | mit | 3,657 | 0 | 9 | 1,157 | 284 | 184 | 100 | -1 | -1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.LevelPuzzle.Helpers.PlayerPath
(
savePlayerPathEmpty,
savePlayerPath,
) where
import MyPrelude
import File.Binary
import Game
import Game.LevelPuzzle.LevelPuzzleWorld
savePlayerPathEmpty :: LevelPuzzleWorld -> MEnv' ()
savePlayerPathEmpty lvl = do
path <- filePlayerPathEmpty $ takeFileName (levelpuzzleFile lvl) ++
show (levelpuzzleLevelIx lvl) ++ ".plp"
writeBinary' (wPlayerPath player lvl) path
savePlayerPath :: Player -> LevelPuzzleWorld -> MEnv' ()
savePlayerPath player lvl = io $ do
path <- filePlayerPath player $ takeFileName (levelpuzzleFile lvl) ++
show (levelpuzzleLevelIx lvl) ++ ".plp"
writeBinary' (wPlayerPath player lvl) path
wPlayerPath :: Player -> LevelPuzzleWorld -> Writer
wPlayerPath player lvl = do
wWord8s version
wField fieldWorld
wCStringAlign 4 $ takeFileName (levelpuzzleFile lvl)
wField fieldLevel
wUInt32 $ levelpuzzleLevelIx lvl
wField fieldPlayerID
wCStringAlign 4 $ playerID player
wField fieldPlayerAlias
wCStringAlign 4 $ playerAlias player
wField fieldSegmentS
let path = gridPath $ levelpuzzleGrid lvl
forM_ (rangeMod (pathArraySize path) (pathArrayBegin path) (pathArrayEnd path))
$ \ix -> case segmentarrayRead (pathArray path) ix of
seg -> wSegment seg
where
rangeMod size b e =
if b == e then [] else b : rangeMod ((b + 1) `mod` size) e
wSegment (Segment (Turn a0 a1 a2 _ _ _ _ _ _) (Node n0 n1 n2)) = do
wWord8s [255, fI a0, fI a1, fI a2] -- fixme: assert bit conversion!
wInt32 $ fI n0
wInt32 $ fI n1
wInt32 $ fI n2
--------------------------------------------------------------------------------
--
-- | s000
version :: [Word8]
version =
[0x73, 0x30, 0x30, 0x30]
fieldWorld :: Word32
fieldWorld = 0x00000001
fieldLevel :: Word32
fieldLevel = 0x00000002
fieldPlayerID :: Word32
fieldPlayerID = 0x00000003
fieldPlayerAlias :: Word32
fieldPlayerAlias = 0x00000004
fieldSegments :: Word32
fieldSegmentS = 0x00000005
wField :: Word32 -> Writer
wField field =
case word32asWord8s field of
(w0, w1, w2, w3) -> wWord8s [w0, w1, w2, w3]
| karamellpelle/grid | source/Game/LevelPuzzle/Helpers/PlayerPath.hs | gpl-3.0 | 3,046 | 0 | 14 | 726 | 706 | 362 | 344 | 58 | 2 |
module Lab2.Util.Ibans where
validIbans :: [String]
validIbans = [ "AL47212110090000000235698741",
"AD1200012030200359100100",
"AT611904300234573201",
"AZ21NABZ00000000137010001944",
"BH67BMAG00001299123456",
"BE62510007547061",
"BA391290079401028494",
"BG80BNBG96611020345678",
"HR1210010051863000160",
"CY17002001280000001200527600",
"CZ6508000000192000145399",
"DK5000400440116243",
"EE382200221020145685",
"FO9754320388899944",
"FI2112345600000785",
"FR1420041010050500013M02606",
"GE29NB0000000101904917",
"DE89370400440532013000",
"GI75NWBK000000007099453",
"GR1601101250000000012300695",
"GL5604449876543210",
"HU42117730161111101800000000",
"IS140159260076545510730339",
"IE29AIBK93115212345678",
"IL620108000000099999999",
"IT40S0542811101000000123456",
"JO94CBJO0010000000000131000302",
"KW81CBKU0000000000001234560101",
"LV80BANK0000435195001",
"LB62099900000001001901229114",
"LI21088100002324013AA",
"LT121000011101001000",
"LU280019400644750000",
"MK07250120000058984",
"MT84MALT011000012345MTLCAST001S",
"MU17BOMM0101101030300200000MUR",
"MD24AG000225100013104168",
"MC9320052222100112233M44555",
"ME25505000012345678951",
"NL39RABO0300065264",
"NO9386011117947",
"PK36SCBL0000001123456702",
"PL60102010260000042270201111",
"PT50000201231234567890154",
"QA58DOHB00001234567890ABCDEFG",
"RO49AAAA1B31007593840000",
"SM86U0322509800000000270100",
"SA0380000000608010167519",
"RS35260005601001611379",
"SK3112000000198742637541",
"SI56191000000123438",
"ES8023100001180000012345",
"SE3550000000054910000003",
"CH9300762011623852957",
"TN5910006035183598478831",
"TR330006100519786457841326",
"AE070331234567890123456",
"GB86RBOS60161331926819" ] | vdweegen/UvA-Software_Testing | Lab2/Util/Ibans.hs | gpl-3.0 | 2,513 | 0 | 5 | 923 | 194 | 129 | 65 | 60 | 1 |
module Main where
import System.Environment
import Data.Maybe
import Data.List
import Control.Monad
import Data.Char
split :: Eq a => a -> [a] -> [[a]]
split _ [] = []
split s w = takeWhile (/= s) w : (split s $ drop 1 $ dropWhile (/= s) w)
processFragment :: String -> String
processFragment inp = reverse $ takeWhile (\a -> not (isControl a)) $ reverse inp
main = do
args <- getArgs
case args of
[a, b] -> do
contents <- readFile b
let minLength = read a :: Int
let strings = filter (\a -> length a >= minLength)
$ map processFragment
$ split '\0' contents
forM_ strings putStrLn
_ ->
putStrLn "Usage: strings [min-length: 4?] [binary]" | aauger/HaskellCoreUtils | HaskellCoreUtils/src/strings.hs | gpl-3.0 | 679 | 4 | 21 | 158 | 289 | 145 | 144 | 23 | 2 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Grid.GridData.Plain.SoundPath
(
SoundPath (..),
loadSoundPath,
unloadSoundPath,
) where
import MyPrelude
import Game.Values
import File
import OpenAL
import OpenAL.Helpers
data SoundPath =
SoundPath
{
soundPathBuf :: !ALuint,
soundPathSrc :: !ALuint
}
loadSoundPath :: IO SoundPath
loadSoundPath = do
-- buffer
buf <- genBuf
fileStaticData "Grid/Output/path_newsegment.mp3" >>= loadBuf buf
-- src
src <- genSrc
-- cone
alSourcef src al_CONE_INNER_ANGLE valueSoundPathConeInnerAngle
alSourcef src al_CONE_OUTER_ANGLE valueSoundPathConeOuterAngle
alSourcef src al_CONE_OUTER_GAIN valueSoundPathConeOuterGain
-- distance properties
alSourcef src al_REFERENCE_DISTANCE valueSoundPathReferenceDistance
alSourcef src al_MAX_DISTANCE valueSoundPathMaxDistance
alSourcef src al_ROLLOFF_FACTOR valueSoundPathRolloffFactor
-- set default buffer
alSourcei src al_BUFFER $ fI buf
return SoundPath
{
soundPathBuf = buf,
soundPathSrc = src
}
unloadSoundPath :: SoundPath -> IO ()
unloadSoundPath sound = do
-- alStopSource?
delSrc $ soundPathSrc sound
delBuf $ soundPathBuf sound
| karamellpelle/grid | source/Game/Grid/GridData/Plain/SoundPath.hs | gpl-3.0 | 2,021 | 0 | 9 | 452 | 268 | 145 | 123 | 37 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Dataproc.Projects.Regions.WorkflowTemplates.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates (replaces) workflow template. The updated template must contain
-- version that matches the current server version.
--
-- /See:/ <https://cloud.google.com/dataproc/ Cloud Dataproc API Reference> for @dataproc.projects.regions.workflowTemplates.update@.
module Network.Google.Resource.Dataproc.Projects.Regions.WorkflowTemplates.Update
(
-- * REST Resource
ProjectsRegionsWorkflowTemplatesUpdateResource
-- * Creating a Request
, projectsRegionsWorkflowTemplatesUpdate
, ProjectsRegionsWorkflowTemplatesUpdate
-- * Request Lenses
, prwtuXgafv
, prwtuUploadProtocol
, prwtuAccessToken
, prwtuUploadType
, prwtuPayload
, prwtuName
, prwtuCallback
) where
import Network.Google.Dataproc.Types
import Network.Google.Prelude
-- | A resource alias for @dataproc.projects.regions.workflowTemplates.update@ method which the
-- 'ProjectsRegionsWorkflowTemplatesUpdate' request conforms to.
type ProjectsRegionsWorkflowTemplatesUpdateResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] WorkflowTemplate :>
Put '[JSON] WorkflowTemplate
-- | Updates (replaces) workflow template. The updated template must contain
-- version that matches the current server version.
--
-- /See:/ 'projectsRegionsWorkflowTemplatesUpdate' smart constructor.
data ProjectsRegionsWorkflowTemplatesUpdate =
ProjectsRegionsWorkflowTemplatesUpdate'
{ _prwtuXgafv :: !(Maybe Xgafv)
, _prwtuUploadProtocol :: !(Maybe Text)
, _prwtuAccessToken :: !(Maybe Text)
, _prwtuUploadType :: !(Maybe Text)
, _prwtuPayload :: !WorkflowTemplate
, _prwtuName :: !Text
, _prwtuCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsRegionsWorkflowTemplatesUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prwtuXgafv'
--
-- * 'prwtuUploadProtocol'
--
-- * 'prwtuAccessToken'
--
-- * 'prwtuUploadType'
--
-- * 'prwtuPayload'
--
-- * 'prwtuName'
--
-- * 'prwtuCallback'
projectsRegionsWorkflowTemplatesUpdate
:: WorkflowTemplate -- ^ 'prwtuPayload'
-> Text -- ^ 'prwtuName'
-> ProjectsRegionsWorkflowTemplatesUpdate
projectsRegionsWorkflowTemplatesUpdate pPrwtuPayload_ pPrwtuName_ =
ProjectsRegionsWorkflowTemplatesUpdate'
{ _prwtuXgafv = Nothing
, _prwtuUploadProtocol = Nothing
, _prwtuAccessToken = Nothing
, _prwtuUploadType = Nothing
, _prwtuPayload = pPrwtuPayload_
, _prwtuName = pPrwtuName_
, _prwtuCallback = Nothing
}
-- | V1 error format.
prwtuXgafv :: Lens' ProjectsRegionsWorkflowTemplatesUpdate (Maybe Xgafv)
prwtuXgafv
= lens _prwtuXgafv (\ s a -> s{_prwtuXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
prwtuUploadProtocol :: Lens' ProjectsRegionsWorkflowTemplatesUpdate (Maybe Text)
prwtuUploadProtocol
= lens _prwtuUploadProtocol
(\ s a -> s{_prwtuUploadProtocol = a})
-- | OAuth access token.
prwtuAccessToken :: Lens' ProjectsRegionsWorkflowTemplatesUpdate (Maybe Text)
prwtuAccessToken
= lens _prwtuAccessToken
(\ s a -> s{_prwtuAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
prwtuUploadType :: Lens' ProjectsRegionsWorkflowTemplatesUpdate (Maybe Text)
prwtuUploadType
= lens _prwtuUploadType
(\ s a -> s{_prwtuUploadType = a})
-- | Multipart request metadata.
prwtuPayload :: Lens' ProjectsRegionsWorkflowTemplatesUpdate WorkflowTemplate
prwtuPayload
= lens _prwtuPayload (\ s a -> s{_prwtuPayload = a})
-- | Output only. The resource name of the workflow template, as described in
-- https:\/\/cloud.google.com\/apis\/design\/resource_names. For
-- projects.regions.workflowTemplates, the resource name of the template
-- has the following format:
-- projects\/{project_id}\/regions\/{region}\/workflowTemplates\/{template_id}
-- For projects.locations.workflowTemplates, the resource name of the
-- template has the following format:
-- projects\/{project_id}\/locations\/{location}\/workflowTemplates\/{template_id}
prwtuName :: Lens' ProjectsRegionsWorkflowTemplatesUpdate Text
prwtuName
= lens _prwtuName (\ s a -> s{_prwtuName = a})
-- | JSONP
prwtuCallback :: Lens' ProjectsRegionsWorkflowTemplatesUpdate (Maybe Text)
prwtuCallback
= lens _prwtuCallback
(\ s a -> s{_prwtuCallback = a})
instance GoogleRequest
ProjectsRegionsWorkflowTemplatesUpdate
where
type Rs ProjectsRegionsWorkflowTemplatesUpdate =
WorkflowTemplate
type Scopes ProjectsRegionsWorkflowTemplatesUpdate =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsRegionsWorkflowTemplatesUpdate'{..}
= go _prwtuName _prwtuXgafv _prwtuUploadProtocol
_prwtuAccessToken
_prwtuUploadType
_prwtuCallback
(Just AltJSON)
_prwtuPayload
dataprocService
where go
= buildClient
(Proxy ::
Proxy ProjectsRegionsWorkflowTemplatesUpdateResource)
mempty
| brendanhay/gogol | gogol-dataproc/gen/Network/Google/Resource/Dataproc/Projects/Regions/WorkflowTemplates/Update.hs | mpl-2.0 | 6,321 | 0 | 16 | 1,287 | 785 | 462 | 323 | 119 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.URLShortener.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.URLShortener.Types
(
-- * Service Configuration
uRLShortenerService
-- * OAuth Scopes
, urlShortenerScope
-- * URL
, URL
, url
, uStatus
, uKind
, uCreated
, uAnalytics
, uLongURL
, uId
-- * StringCount
, StringCount
, stringCount
, scCount
, scId
-- * URLListProjection
, URLListProjection (..)
-- * AnalyticsSnapshot
, AnalyticsSnapshot
, analyticsSnapshot
, asPlatforms
, asShortURLClicks
, asReferrers
, asCountries
, asLongURLClicks
, asBrowsers
-- * AnalyticsSummary
, AnalyticsSummary
, analyticsSummary
, asWeek
, asAllTime
, asDay
, asTwoHours
, asMonth
-- * URLGetProjection
, URLGetProjection (..)
-- * URLHistory
, URLHistory
, urlHistory
, uhTotalItems
, uhNextPageToken
, uhItemsPerPage
, uhKind
, uhItems
) where
import Network.Google.Prelude
import Network.Google.URLShortener.Types.Product
import Network.Google.URLShortener.Types.Sum
-- | Default request referring to version 'v1' of the URL Shortener API. This contains the host and root path used as a starting point for constructing service requests.
uRLShortenerService :: ServiceConfig
uRLShortenerService
= defaultService (ServiceId "urlshortener:v1")
"www.googleapis.com"
-- | Manage your goo.gl short URLs
urlShortenerScope :: Proxy '["https://www.googleapis.com/auth/urlshortener"]
urlShortenerScope = Proxy;
| rueshyna/gogol | gogol-urlshortener/gen/Network/Google/URLShortener/Types.hs | mpl-2.0 | 2,067 | 0 | 7 | 510 | 219 | 152 | 67 | 55 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudKMS.Projects.Locations.KeyRings.CryptoKeys.CryptoKeyVersions.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns metadata for a given CryptoKeyVersion.
--
-- /See:/ <https://cloud.google.com/kms/ Cloud Key Management Service (KMS) API Reference> for @cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.get@.
module Network.Google.Resource.CloudKMS.Projects.Locations.KeyRings.CryptoKeys.CryptoKeyVersions.Get
(
-- * REST Resource
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGetResource
-- * Creating a Request
, projectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet
, ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet
-- * Request Lenses
, plkrckckvgXgafv
, plkrckckvgUploadProtocol
, plkrckckvgAccessToken
, plkrckckvgUploadType
, plkrckckvgName
, plkrckckvgCallback
) where
import Network.Google.CloudKMS.Types
import Network.Google.Prelude
-- | A resource alias for @cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.get@ method which the
-- 'ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet' request conforms to.
type ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGetResource
=
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] CryptoKeyVersion
-- | Returns metadata for a given CryptoKeyVersion.
--
-- /See:/ 'projectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet' smart constructor.
data ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet =
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet'
{ _plkrckckvgXgafv :: !(Maybe Xgafv)
, _plkrckckvgUploadProtocol :: !(Maybe Text)
, _plkrckckvgAccessToken :: !(Maybe Text)
, _plkrckckvgUploadType :: !(Maybe Text)
, _plkrckckvgName :: !Text
, _plkrckckvgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plkrckckvgXgafv'
--
-- * 'plkrckckvgUploadProtocol'
--
-- * 'plkrckckvgAccessToken'
--
-- * 'plkrckckvgUploadType'
--
-- * 'plkrckckvgName'
--
-- * 'plkrckckvgCallback'
projectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet
:: Text -- ^ 'plkrckckvgName'
-> ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet
projectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet pPlkrckckvgName_ =
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet'
{ _plkrckckvgXgafv = Nothing
, _plkrckckvgUploadProtocol = Nothing
, _plkrckckvgAccessToken = Nothing
, _plkrckckvgUploadType = Nothing
, _plkrckckvgName = pPlkrckckvgName_
, _plkrckckvgCallback = Nothing
}
-- | V1 error format.
plkrckckvgXgafv :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet (Maybe Xgafv)
plkrckckvgXgafv
= lens _plkrckckvgXgafv
(\ s a -> s{_plkrckckvgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plkrckckvgUploadProtocol :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet (Maybe Text)
plkrckckvgUploadProtocol
= lens _plkrckckvgUploadProtocol
(\ s a -> s{_plkrckckvgUploadProtocol = a})
-- | OAuth access token.
plkrckckvgAccessToken :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet (Maybe Text)
plkrckckvgAccessToken
= lens _plkrckckvgAccessToken
(\ s a -> s{_plkrckckvgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plkrckckvgUploadType :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet (Maybe Text)
plkrckckvgUploadType
= lens _plkrckckvgUploadType
(\ s a -> s{_plkrckckvgUploadType = a})
-- | Required. The name of the CryptoKeyVersion to get.
plkrckckvgName :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet Text
plkrckckvgName
= lens _plkrckckvgName
(\ s a -> s{_plkrckckvgName = a})
-- | JSONP
plkrckckvgCallback :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet (Maybe Text)
plkrckckvgCallback
= lens _plkrckckvgCallback
(\ s a -> s{_plkrckckvgCallback = a})
instance GoogleRequest
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet
where
type Rs
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet
= CryptoKeyVersion
type Scopes
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloudkms"]
requestClient
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGet'{..}
= go _plkrckckvgName _plkrckckvgXgafv
_plkrckckvgUploadProtocol
_plkrckckvgAccessToken
_plkrckckvgUploadType
_plkrckckvgCallback
(Just AltJSON)
cloudKMSService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGetResource)
mempty
| brendanhay/gogol | gogol-cloudkms/gen/Network/Google/Resource/CloudKMS/Projects/Locations/KeyRings/CryptoKeys/CryptoKeyVersions/Get.hs | mpl-2.0 | 6,206 | 0 | 15 | 1,245 | 701 | 411 | 290 | 115 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ContainerAnalysis.Projects.Occurrences.BatchCreate
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates new occurrences in batch.
--
-- /See:/ <https://cloud.google.com/container-analysis/api/reference/rest/ Container Analysis API Reference> for @containeranalysis.projects.occurrences.batchCreate@.
module Network.Google.Resource.ContainerAnalysis.Projects.Occurrences.BatchCreate
(
-- * REST Resource
ProjectsOccurrencesBatchCreateResource
-- * Creating a Request
, projectsOccurrencesBatchCreate
, ProjectsOccurrencesBatchCreate
-- * Request Lenses
, pobcParent
, pobcXgafv
, pobcUploadProtocol
, pobcAccessToken
, pobcUploadType
, pobcPayload
, pobcCallback
) where
import Network.Google.ContainerAnalysis.Types
import Network.Google.Prelude
-- | A resource alias for @containeranalysis.projects.occurrences.batchCreate@ method which the
-- 'ProjectsOccurrencesBatchCreate' request conforms to.
type ProjectsOccurrencesBatchCreateResource =
"v1beta1" :>
Capture "parent" Text :>
"occurrences:batchCreate" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] BatchCreateOccurrencesRequest :>
Post '[JSON] BatchCreateOccurrencesResponse
-- | Creates new occurrences in batch.
--
-- /See:/ 'projectsOccurrencesBatchCreate' smart constructor.
data ProjectsOccurrencesBatchCreate =
ProjectsOccurrencesBatchCreate'
{ _pobcParent :: !Text
, _pobcXgafv :: !(Maybe Xgafv)
, _pobcUploadProtocol :: !(Maybe Text)
, _pobcAccessToken :: !(Maybe Text)
, _pobcUploadType :: !(Maybe Text)
, _pobcPayload :: !BatchCreateOccurrencesRequest
, _pobcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsOccurrencesBatchCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pobcParent'
--
-- * 'pobcXgafv'
--
-- * 'pobcUploadProtocol'
--
-- * 'pobcAccessToken'
--
-- * 'pobcUploadType'
--
-- * 'pobcPayload'
--
-- * 'pobcCallback'
projectsOccurrencesBatchCreate
:: Text -- ^ 'pobcParent'
-> BatchCreateOccurrencesRequest -- ^ 'pobcPayload'
-> ProjectsOccurrencesBatchCreate
projectsOccurrencesBatchCreate pPobcParent_ pPobcPayload_ =
ProjectsOccurrencesBatchCreate'
{ _pobcParent = pPobcParent_
, _pobcXgafv = Nothing
, _pobcUploadProtocol = Nothing
, _pobcAccessToken = Nothing
, _pobcUploadType = Nothing
, _pobcPayload = pPobcPayload_
, _pobcCallback = Nothing
}
-- | Required. The name of the project in the form of
-- \`projects\/[PROJECT_ID]\`, under which the occurrences are to be
-- created.
pobcParent :: Lens' ProjectsOccurrencesBatchCreate Text
pobcParent
= lens _pobcParent (\ s a -> s{_pobcParent = a})
-- | V1 error format.
pobcXgafv :: Lens' ProjectsOccurrencesBatchCreate (Maybe Xgafv)
pobcXgafv
= lens _pobcXgafv (\ s a -> s{_pobcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pobcUploadProtocol :: Lens' ProjectsOccurrencesBatchCreate (Maybe Text)
pobcUploadProtocol
= lens _pobcUploadProtocol
(\ s a -> s{_pobcUploadProtocol = a})
-- | OAuth access token.
pobcAccessToken :: Lens' ProjectsOccurrencesBatchCreate (Maybe Text)
pobcAccessToken
= lens _pobcAccessToken
(\ s a -> s{_pobcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pobcUploadType :: Lens' ProjectsOccurrencesBatchCreate (Maybe Text)
pobcUploadType
= lens _pobcUploadType
(\ s a -> s{_pobcUploadType = a})
-- | Multipart request metadata.
pobcPayload :: Lens' ProjectsOccurrencesBatchCreate BatchCreateOccurrencesRequest
pobcPayload
= lens _pobcPayload (\ s a -> s{_pobcPayload = a})
-- | JSONP
pobcCallback :: Lens' ProjectsOccurrencesBatchCreate (Maybe Text)
pobcCallback
= lens _pobcCallback (\ s a -> s{_pobcCallback = a})
instance GoogleRequest ProjectsOccurrencesBatchCreate
where
type Rs ProjectsOccurrencesBatchCreate =
BatchCreateOccurrencesResponse
type Scopes ProjectsOccurrencesBatchCreate =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsOccurrencesBatchCreate'{..}
= go _pobcParent _pobcXgafv _pobcUploadProtocol
_pobcAccessToken
_pobcUploadType
_pobcCallback
(Just AltJSON)
_pobcPayload
containerAnalysisService
where go
= buildClient
(Proxy ::
Proxy ProjectsOccurrencesBatchCreateResource)
mempty
| brendanhay/gogol | gogol-containeranalysis/gen/Network/Google/Resource/ContainerAnalysis/Projects/Occurrences/BatchCreate.hs | mpl-2.0 | 5,682 | 0 | 17 | 1,238 | 781 | 456 | 325 | 117 | 1 |
module Network.Haskoin.Wallet.Block where
import Control.Exception (throw)
import Control.Monad.Catch (MonadThrow, throwM)
import Control.Monad.Trans (MonadIO)
import Data.Maybe (fromMaybe)
import Database.Persist.Sql (SqlPersistT)
import Network.Haskoin.Block
import Network.Haskoin.Node.HeaderTree
import Network.Haskoin.Wallet.Model
import Network.Haskoin.Wallet.Transaction
import Network.Haskoin.Wallet.Types
mainChain :: (MonadIO m, MonadThrow m)
=> Either BlockHeight BlockHash
-> ListRequest
-> SqlPersistT m (ListResult NodeBlock)
mainChain blockE ListRequest{..} = do
bestHash <- fst <$> walletBestBlock
bestM <- getBlockByHash bestHash
best <- maybe (throwM $ WalletException "Could not find wallet best block")
return bestM
remoteNode <- case blockE of
Right h -> do
remoteNodeM <- getBlockByHash h
maybe (throwM $ WalletException "Colud not get remote node")
return remoteNodeM
Left h -> do
heightNodeM <- getBlockByHeight best h
maybe (throwM $ WalletException "Could not find bock height")
return heightNodeM
frst <- (+1) . nodeBlockHeight <$> splitBlock best remoteNode
if nodeBlockHeight best < frst
then return $ ListResult [] 0
else do
let cnt = nodeBlockHeight best - frst
limit = min listLimit (cnt - listOffset)
offset =
if listReverse
then cnt - listOffset - limit
else listOffset
nodes <- getBlocksFromHeight best limit (frst + offset)
return $ ListResult nodes cnt
blockTxs :: [NodeBlock] -> [WalletTx] -> [(NodeBlock, [WalletTx])]
blockTxs blocks transactions = reverse $ go [] blocks transactions
where
go bs [] _ = bs
go bs (n:ns) [] = go ((n,[]):bs) ns []
go [] (n:ns) xs = go [(n,[])] ns xs
go (b:bs) (n:ns) (x:xs)
| nodeHash (fst b) == blockHashOf x =
go ((fst b, x : snd b) : bs) (n:ns) xs
| nodeHash n == blockHashOf x =
go ((n, [x]) : b : bs) ns xs
| otherwise = go ((n, []) : b : bs) ns (x:xs)
blockHashOf t = fromMaybe
(throw $ WalletException "Unexpected unconfirmed transaction")
(walletTxConfirmedBy t)
| plaprade/haskoin | haskoin-wallet/src/Network/Haskoin/Wallet/Block.hs | unlicense | 2,513 | 0 | 16 | 856 | 797 | 412 | 385 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
-- |
-- Module : DNA.Logging
-- Copyright : (C) 2014-2015 Braam Research, LLC.
-- License : Apache-2.0
--
-- Logging and profiling facilities. Log messages are written to GHC's
-- eventlog in the following format:
--
-- > TAG [ATTR]* message
--
-- The tag is a sequence of alphanumeric characters, usually in all
-- caps. The tag can be:
--
-- * MSG: for simple log messages
-- * PANIC: internal error. Should only be triggered by implementation bug
-- * FATAL: fatal error from which actor could not recover
-- * ERROR: ordinary error
-- * DEBUG: debug messages
-- * SYNC: for synchronising time between nodes
-- * START x/END x: sample of a performance metric (for profiling)
--
-- Attributes can be used to add (possibly optional) extra data to the
-- message. They are enclosed in square brackets and must precede the
-- message. Possible attributes are
--
-- * [pid=PID]: for the Cloud Haskell process ID. For messages
-- concerning the whole program, this may not be set.
--
-- * [SAMPLER:METRIC=VAL/TIME]: records the value of a performance
-- metric. When a time is given, it is the time that the counter
-- has been running.
--
-- * [hint:METRIC=VAL]: A performance hint by the program, such as
-- expected number of floating point operations.
module DNA.Logging (
-- * Options and basic API
MonadLog(..)
, LoggerOpt(..)
, DebugPrint(..)
, initLogging
, processAttributes
-- * Logging API
, taggedMessage
, eventMessage
, message
-- ** Actor operations
, logSpawn
, logConnect
-- ** Error logging
, panicMsg
, fatalMsg
, errorMsg
, warningMsg
, debugMsg
-- * Profiling API
, synchronizationPoint
, logDuration
, logProfile
-- * Profiling hints
, ProfileHint(..)
, floatHint, memHint, ioHint, haskellHint, cudaHint
) where
import Control.Applicative
import Control.Concurrent
import Control.Distributed.Process (getSelfPid,Process,ProcessId)
import Control.Exception (evaluate)
import Control.Monad (when,unless,liftM,forM_)
import Control.Monad.IO.Class
import Control.Monad.Except (ExceptT)
import Control.Monad.Trans.Class
import Data.Time
import Data.Maybe (fromMaybe)
import Data.IORef
import Data.Tuple (swap)
import Data.Typeable (Typeable)
import Data.List (unfoldr)
import Data.Binary (Binary)
import qualified Data.Map.Strict as Map
import Data.Word (Word64)
import GHC.Stats
import GHC.Generics (Generic)
import Debug.Trace (traceEventIO)
#ifdef USE_CUDA
import Profiling.CUDA.Activity
import Profiling.CUDA.Metrics
#endif
import Profiling.Linux.Perf.Stat
import System.IO
import System.IO.Unsafe (unsafePerformIO)
import System.Locale (defaultTimeLocale)
import System.Mem (performGC)
import DNA.Types (AID)
----------------------------------------------------------------
-- Basic logging API
----------------------------------------------------------------
-- | Type class for monad from which we can write messages to the
-- log. This API only covers getting current settings and doesn't
-- describe how to change them. Note than not all monads support
-- changing settings
class MonadIO m => MonadLog m where
-- | Who created log message. It could be process ID, actor name etc.
logSource :: m String
logSource = return ""
-- | Logger options
--
-- Verbosity of logging:
--
-- * -2 - only fatal errors logged
-- * -1 - error and more severe events logged
-- * 0 - warnings and more severe events logged
-- * 1 - everything is logged
logLoggerOpt :: m LoggerOpt
logLoggerOpt = return $ LoggerOpt 0 NoDebugPrint ""
instance MonadLog IO
instance MonadLog Process where
logSource = show <$> getSelfPid
instance MonadLog m => MonadLog (ExceptT e m) where
logSource = lift logSource
logLoggerOpt = lift logLoggerOpt
-- | Is debug printing enabled
data DebugPrint
= NoDebugPrint
-- ^ Debug printing disabled (default).
| DebugPrintEnabled
-- ^ Debug printing enabled but will NOT be inherited by child processes
| DebugPrintInherited
-- ^ Debug printing enabled but will be inherited by child processes
deriving (Show,Eq,Typeable,Generic)
instance Binary DebugPrint
-- | Modes of operation for the logger. Most additional attributes
-- cost performance, therefore default state is to run without any of
-- these modifiers.
data LoggerOpt = LoggerOpt
{ logOptVerbose :: Int
-- ^ The higher, the more additional information we output about
-- what we are doing.
, logOptDebugPrint :: DebugPrint
-- ^ Whether debug printing is enabled
, logOptMeasure :: String
-- ^ Gather detailed statistics about the given group of
-- performance metrics, at the possible expense of performance.
}
deriving (Show,Typeable,Generic)
instance Binary LoggerOpt
-- Sequence number for splitting messages when writing them to
-- eventlog. We need global per-program supply of unique numbers.
loggerMsgId :: IORef Int
loggerMsgId = unsafePerformIO $ newIORef 0
{-# NOINLINE loggerMsgId #-}
loggerFloatCounters :: IORef (Map.Map ThreadId PerfStatGroup)
loggerFloatCounters = unsafePerformIO $ newIORef Map.empty
{-# NOINLINE loggerFloatCounters #-}
loggerCacheCounters :: IORef (Map.Map ThreadId PerfStatGroup)
loggerCacheCounters = unsafePerformIO $ newIORef Map.empty
{-# NOINLINE loggerCacheCounters #-}
#ifdef USE_CUDA
-- Whether CUDA CUPTI is enabled
loggerCuptiEnabled :: MVar Bool
loggerCuptiEnabled = unsafePerformIO $ newMVar False
{-# NOINLINE loggerCuptiEnabled #-}
#endif
-- | Initialise logging facilities. This must be called once at
-- program start, before the first messages are being created.
initLogging :: LoggerOpt -> IO ()
initLogging opt = do
-- Initialise profiling sub-modules
#ifdef USE_CUDA
cudaInit opt
#endif
-- Set console output to be line-buffered. We want it for
-- diagnostics, no reason to buffer.
hSetBuffering stdout LineBuffering
hSetBuffering stderr LineBuffering -- Should be default
----------------------------------------------------------------
-- Primitives for logging
----------------------------------------------------------------
type Attr = (String, String)
-- | Generate the specified eventlog message
rawMessage :: String -- ^ Message tag
-> [Attr] -- ^ Message attributes
-> String -- ^ Message body
-> Bool -- ^ If True then message is written to stdout too
-> IO ()
rawMessage tag attrs msg logStdout = do
-- Make message text
let formatAttr (attr, val) = ' ':'[':attr ++ '=': val ++ "]"
text = concat (tag : map formatAttr attrs) ++ ' ':msg
-- Check whether it's too long for the RTS to output in one
-- piece. This is rare, but we don't want to lose information.
when logStdout $
putStrLn text
let splitThreshold = 512
if length text < splitThreshold then traceEventIO text
else do
-- Determine marker. We need this because the message chunks
-- might get split up.
msgId <- atomicModifyIORef' loggerMsgId (\x -> (x+1,x))
let mark = "[[" ++ show msgId ++ "]]"
-- Now split message up and output it. Every message but the
-- last gets the continuation marker at the end, and every
-- message but the first is a continuation.
let split "" = Nothing
split str = Just $ splitAt (splitThreshold - 20) str
pieces = unfoldr split text
start = head pieces; (mid, end:_) = splitAt (length pieces-2) (tail pieces)
traceEventIO (start ++ mark)
forM_ mid $ \m -> traceEventIO (mark ++ m ++ mark)
traceEventIO (mark ++ end)
-- | Generate identification attributes it uses 'logSource' method for
-- getting name of PID
processAttributes :: MonadLog m => m [Attr]
processAttributes = do
pid <- logSource
case pid of
"" -> return []
_ -> return [("pid", pid)]
----------------------------------------------------------------
-- Logging API
----------------------------------------------------------------
-- | Output a custom-tag process message into the eventlog.
taggedMessage :: MonadLog m
=> String -- ^ Message tag
-> String -- ^ Message
-> m ()
taggedMessage tag msg = do
attrs <- processAttributes
liftIO $ rawMessage tag attrs msg True
-- | Put a global message into eventlog, (verbosity = 0).
eventMessage :: MonadLog m => String -> m ()
eventMessage = message 0
-- | Put a message at the given verbosity level
message :: MonadLog m => Int -> String -> m ()
message v msg = do
verbosity <- logOptVerbose `liftM` logLoggerOpt
attrs <- processAttributes
when (verbosity >= v) $ liftIO $
rawMessage "MSG" (("v", show v) : attrs) msg True
-- | Log fact that actor with given PID was spawned
logSpawn :: MonadLog m => ProcessId -> AID -> m ()
logSpawn pid aid = do
attrs <- processAttributes
liftIO $ rawMessage "SPAWN" attrs (show pid ++ " " ++ show aid) False
-- | Log that connection between actor was established. N.B. It means
-- that actor now knows where to send data.
logConnect :: MonadLog m => Maybe AID -> Maybe AID -> m ()
logConnect aidSrc aidDst = do
attrs <- processAttributes
liftIO $ rawMessage "CONNECT" attrs (render aidSrc ++ " -> " ++ render aidDst) False
where
render = maybe "-" show
----------------------------------------------------------------
-- API for logging
----------------------------------------------------------------
-- | Put message into event log that panic occured.
panicMsg :: MonadLog m => String -> m ()
panicMsg msg = do
attrs <- processAttributes
liftIO $ rawMessage "PANIC" attrs msg True
-- | Put message into log about fatal error
fatalMsg :: MonadLog m => String -> m ()
fatalMsg msg = do
verbosity <- logOptVerbose `liftM` logLoggerOpt
when (verbosity >= -2) $ do
attrs <- processAttributes
liftIO $ rawMessage "FATAL" attrs msg True
-- | Put message into log about fatal error
errorMsg :: MonadLog m => String -> m ()
errorMsg msg = do
verbosity <- logOptVerbose `liftM` logLoggerOpt
when (verbosity >= -1) $ do
attrs <- processAttributes
liftIO $ rawMessage "FATAL" attrs msg True
-- | Put a warning message. Warnings have a verbosity of 0.
warningMsg :: MonadLog m => String -> m ()
warningMsg msg = do
verbosity <- logOptVerbose `liftM` logLoggerOpt
when (verbosity >= -2) $ do
attrs <- processAttributes
liftIO $ rawMessage "FATAL" attrs msg True
-- | Put a debug message
debugMsg :: MonadLog m => String -> m ()
debugMsg msg = do
debugEnabled <- logOptDebugPrint `liftM` logLoggerOpt
case debugEnabled of
NoDebugPrint -> return ()
_ -> do
attrs <- processAttributes
liftIO $ rawMessage "DEBUG" attrs msg True
-- | Synchronize timings - put into eventlog an event with current wall time.
synchronizationPoint :: MonadIO m => String -> m ()
synchronizationPoint msg = liftIO $ do
utcTime <- getCurrentTime
-- we are formatting time to number of seconds in POSIX epoch and
-- fractional part in picoseconds.
let timeString = formatTime defaultTimeLocale "%s.%q" utcTime
humanReadable = formatTime defaultTimeLocale "%F %X" utcTime
rawMessage "SYNC" [("time", timeString)] msg False
rawMessage "MSG" [] ("started at " ++ humanReadable) False
----------------------------------------------------------------
-- Profiling basics
----------------------------------------------------------------
data SamplePoint = StartSample | EndSample
-- | Put measurements about execution time of monadic action into
-- eventlog. Result of action is evaluated to WHNF.
measurement :: MonadIO m
=> (SamplePoint -> m [Attr])
-- ^ Measurements, might add extra attributes
-> String -- ^ Message
-> [Attr] -- ^ Attributes
-> m a -- ^ DNA action to profile
-> m a
measurement sample msg attrs dna = do
-- Get start sample
sample0 <- sample StartSample
liftIO $ rawMessage "START" (attrs ++ sample0) msg False
-- Perform action
r <- liftIO . evaluate =<< dna
-- Get end sample, return
sample1 <- sample EndSample
liftIO $ rawMessage "END" (attrs ++ sample1) msg False
return r
-- | Put measurements about execution time of monadic action into
-- eventlog. Result of action is evaluated to WHNF.
logDuration :: MonadLog m => String -> m a -> m a
logDuration msg dna = do
attrs <- processAttributes
let sample _ = return [] -- measurement is implicit from START/END timestamp
measurement sample msg attrs dna
----------------------------------------------------------------
-- Profiling
----------------------------------------------------------------
-- | A program annotation providing additional information about how
-- much work we expect the program to be doing in a certain phase. The
-- purpose of this hint is that we can set-up measurements to match
-- these numbers to the program's real performance. Note that the
-- hint must only be a best-effort estimate. As a rule of thumb, it is
-- better to use a more conservative estimate, as this will generally
-- result in lower performance estimates.
data ProfileHint
= FloatHint { hintFloatOps :: !Int
, hintDoubleOps :: !Int
}
-- ^ Estimate for how many floating point operations the code is
-- executing. Profiling will use @perf_event@ in order to take
-- measurements. Keep in mind that this has double-counting
-- issues (20%-40% are not uncommon for SSE or AVX code).
| MemHint { hintMemoryReadBytes :: !Int
}
-- ^ Estimate for the amount of data that will have to be read
-- from RAM over the course of the kernel calculation.
| IOHint { hintReadBytes :: !Int
, hintWriteBytes :: !Int
}
-- ^ Estimate for how much data the program is reading or
-- writing from/to external sources.
| HaskellHint { hintAllocation :: !Int
}
-- ^ Rough estimate for how much Haskell work we are doing.
| CUDAHint { hintCopyBytesHost :: !Int
, hintCopyBytesDevice :: !Int
, hintCudaFloatOps :: !Int
, hintCudaDoubleOps :: !Int
}
-- ^ CUDA statistics. The values are hints about how much data
-- transfers we expect to be targetting the device and the host
-- respectively.
--
-- The FLOP hints will only be checked if logging is running in
-- either @"float-ops"@ or @"double-ops"@ mode,
-- respectively. Note that this requires instrumentation, which
-- will reduce overall performance!
floatHint :: ProfileHint
floatHint = FloatHint 0 0
memHint :: ProfileHint
memHint = MemHint 0
ioHint :: ProfileHint
ioHint = IOHint 0 0
haskellHint :: ProfileHint
haskellHint = HaskellHint 0
cudaHint :: ProfileHint
cudaHint = CUDAHint 0 0 0 0
-- | Main profiling function. The concrete information output to the
-- event log depends on the hints about the code's actions.
--
-- Generally, the more hints we have about the code's actions, the
-- better. However, also note that more hints generally means that we
-- are going to safe more information, so keep in mind that every hint
-- means a certain (constant) profiling overhead.
logProfile :: String -- ^ Message. Will be used in profile view
-- to identify costs, so short and
-- recognisable names are preferred.
-> [ProfileHint] -- ^ Hints about the code's complexity.
-> [Attr] -- ^ Extra attributes to add to profile messages
-> IO a -- ^ The code to profile
-> IO a
logProfile msg hints attrs = measurement (liftIO . sample) msg attrs
where sample pt = concat `liftM` mapM (hintToSample pt) hints
-- | Takes a sample according to the given hint
hintToSample :: SamplePoint -> ProfileHint -> IO [Attr]
hintToSample pt fh@FloatHint{}
= consHint pt "hint:float-ops" (hintFloatOps fh)
. consHint pt "hint:double-ops" (hintDoubleOps fh)
<$> floatCounterAttrs pt
hintToSample pt fh@MemHint{}
= consHint pt "hint:mem-read-bytes" (hintMemoryReadBytes fh)
<$> cacheCounterAttrs pt
hintToSample pt ioh@IOHint{}
= consHint pt "hint:read-bytes" (hintReadBytes ioh)
. consHint pt "hint:write-bytes" (hintWriteBytes ioh)
<$> ioAttrs
hintToSample pt hh@HaskellHint{}
= consHint pt "hint:haskell-alloc" (hintAllocation hh)
<$> haskellAttrs
hintToSample pt ch@CUDAHint{}
= consHint pt "hint:memcpy-bytes-host" (hintCopyBytesHost ch)
. consHint pt "hint:memcpy-bytes-device" (hintCopyBytesDevice ch)
. consHint pt "hint:gpu-float-ops" (hintCudaFloatOps ch)
. consHint pt "hint:gpu-double-ops" (hintCudaDoubleOps ch)
<$> cudaAttrs pt
-- | Prepend an attribute if this is the start point, and it is non-zero
consHint :: (Eq a, Num a, Show a)
=> SamplePoint -> String -> a -> [Attr] -> [Attr]
consHint EndSample _ _ = id
consHint _ _ 0 = id
consHint StartSample n v = ((n, show v):)
-- | Prepend an attribute if it is non-zero
consAttrNZ :: (Eq a, Num a, Show a)
=> String -> a -> [Attr] -> [Attr]
consAttrNZ _ 0 = id
consAttrNZ n v = ((n, show v):)
-- | As @consAttrNZ@, but with reference time
consAttrNZT :: (Eq a, Num a, Show a)
=> String -> a -> a -> [Attr] -> [Attr]
consAttrNZT _ 0 _ = id
consAttrNZT n v t = ((n, show v ++ "/" ++ show t):)
----------------------------------------------------------------
-- perf_events sampling
----------------------------------------------------------------
-- | De/initialise perf_events. We re-initialise them for every single
-- kernel call for now - this is probably wasteful, as the RTS
-- ought to be reusing bound threads at some level. Still, I can't
-- think of a good solution to make sure that perf_events handles
-- wouldn't leak, so let's do this for now.
samplePerfEvents :: IORef (Map.Map ThreadId PerfStatGroup) -> [PerfStatDesc]
-> SamplePoint
-> IO [PerfStatCount]
samplePerfEvents countersVar countersDesc StartSample = do
-- Check that the thread is actually bound
isBound <- isCurrentThreadBound
when (not isBound) $ warningMsg "perf_events not running in bound thread!"
-- Open the counters
counters <- perfEventOpen countersDesc
-- Store them as thread-local state
tid <- myThreadId
atomicModifyIORef' countersVar $ flip (,) () . Map.insert tid counters
-- Read values, then enable
vals <- perfEventRead counters
perfEventEnable counters
return vals
samplePerfEvents countersVar _ EndSample = do
tid <- myThreadId
Just counters <- atomicModifyIORef' countersVar $
swap . Map.updateLookupWithKey (\_ _ -> Nothing) tid
-- Take end sample, close counters
vals <- perfEventRead counters
perfEventDisable counters
perfEventClose counters
return vals
-- | Format perf_events counter value for output. We "normalise" the
-- counter values if they have not been running for the full time they
-- have been enabled. This happens due to the kernel multiplexing the
-- counters.
--
-- Note that we lose some accuracy in the process, at some point it
-- might be worthwhile to document this in the profile as well.
formatPerfStat :: Word64 -> PerfStatCount -> String
formatPerfStat multi (PerfStatCount _ _ _ 0) = ""
formatPerfStat multi (PerfStatCount _ val enabled running) =
let f = 4096 -- overflow-save up to about 1250 hours
normalised = val * (enabled * f `div` running) `div` f
in show (multi * normalised) ++ "/" ++ show enabled
-- | The floating point counters, with associated names
floatCounterDescs :: [(String, PerfStatDesc)]
floatCounterDescs
= [ ("cpu-cycles", PerfDesc $ PERF_TYPE_HARDWARE PERF_COUNT_HW_CPU_CYCLES)
, ("cpu-instructions", PerfDesc $ PERF_TYPE_HARDWARE PERF_COUNT_HW_INSTRUCTIONS)
, ("x87-ops", PfmDesc "FP_COMP_OPS_EXE:X87")
, ("scalar-float-ops", PfmDesc "FP_COMP_OPS_EXE:SSE_FP_SCALAR_SINGLE")
, ("scalar-double-ops", PfmDesc "FP_COMP_OPS_EXE:SSE_SCALAR_DOUBLE")
, ("sse-float-ops", PfmDesc "FP_COMP_OPS_EXE:SSE_PACKED_SINGLE")
, ("sse-double-ops", PfmDesc "FP_COMP_OPS_EXE:SSE_FP_PACKED_DOUBLE")
, ("avx-float-ops", PfmDesc "SIMD_FP_256:PACKED_SINGLE")
, ("avx-double-ops", PfmDesc "SIMD_FP_256:PACKED_DOUBLE")
]
-- | Generate message attributes from current floating point counter values
floatCounterAttrs :: SamplePoint -> IO [Attr]
floatCounterAttrs pt = do
-- Get counters from perf_event
vals <- samplePerfEvents loggerFloatCounters (map snd floatCounterDescs) pt
-- Generate attributes
let fmtName (name, _) = "perf:" ++ name
return $ filter (not . null . snd)
$ zip (map fmtName floatCounterDescs) (map (formatPerfStat 1) vals)
-- | The floating point counters, with associated names
cacheCounterDescs :: [(String, PerfStatDesc)]
cacheCounterDescs
= [ ("mem-read-bytes", PfmDesc "OFFCORE_RESPONSE_0:ANY_DATA:LLC_MISS_LOCAL")
]
-- | Generate message attributes from current cache performance counter values
cacheCounterAttrs :: SamplePoint -> IO [Attr]
cacheCounterAttrs pt = do
-- Get counters from perf_event
vals <- samplePerfEvents loggerCacheCounters (map snd cacheCounterDescs) pt
-- Constant enough to hard-code it, I think.
let cacheLine = 32
-- Generate attributes
let fmtName (name, _) = "perf:" ++ name
return $ filter (not . null . snd)
$ zip (map fmtName cacheCounterDescs) (map (formatPerfStat cacheLine) vals)
----------------------------------------------------------------
-- I/O data sampling
----------------------------------------------------------------
-- | Generate message attributes for procces I/O statistics
ioAttrs :: IO [Attr]
ioAttrs = do
-- Read /proc/self/io - not the full story by any means, especially
-- when consindering mmap I/O (TODO!), but it's easy.
ios <- map (break (==':')) . lines <$> readFile "/proc/self/io"
let io name = drop 2 $ fromMaybe "" $ lookup name ios
return [ ("proc:read-bytes", io "read_bytes")
, ("proc:write-bytes", io "write_bytes")
]
----------------------------------------------------------------
-- Haskell RTS sampling
----------------------------------------------------------------
-- | Generate message attributes for procces I/O statistics
haskellAttrs :: IO [Attr]
haskellAttrs = do
-- This might be slightly controversial: This forces a GC so we get
-- statistics about the *true* memory residency.
performGC
-- Now get statistics
available <- getGCStatsEnabled
if not available then return [] else do
stats <- getGCStats
return [ ("rts:haskell-alloc", show $ bytesAllocated stats)
, ("rts:gc-bytes-copied", show $ bytesCopied stats)
, ("rts:mut-time", show $ mutatorCpuSeconds stats)
, ("rts:gc-time", show $ gcCpuSeconds stats)
, ("rts:heap-size", show $ currentBytesUsed stats)
]
----------------------------------------------------------------
-- CUDA statistics sampling
----------------------------------------------------------------
#ifdef USE_CUDA
-- | CUPTI metrics to use depending on configuration. Returns a table
-- relating metrics to output attribute names.
cudaMetricNames :: LoggerOpt -> [(String, String)]
cudaMetricNames opt = case logOptMeasure opt of
"fp-inst" -> [ ("cuda:gpu-double-instrs", "inst_fp_32")
, ("cuda:gpu-float-instrs", "inst_fp_64")
]
"float-ops" -> [ ("cuda:gpu-float-ops", "flop_count_sp")
, ("cuda:gpu-float-ops-add", "flop_count_sp_add")
, ("cuda:gpu-float-ops-mul", "flop_count_sp_mul")
, ("cuda:gpu-float-ops-fma", "flop_count_sp_fma")
]
"double-ops" -> [ ("cuda:gpu-double-ops", "flop_count_dp")
, ("cuda:gpu-double-ops-add", "flop_count_dp_add")
, ("cuda:gpu-double-ops-mul", "flop_count_dp_mul")
, ("cuda:gpu-double-ops-fma", "flop_count_dp_fma")
]
_other -> [ ]
cudaInit :: LoggerOpt -> IO ()
cudaInit opt = do
when (logOptMeasure opt `elem` ["help", "list"]) $
putStrLn "Supported metric groups: fp-inst, float-ops, double-ops"
cuptiMetricsInit $ map snd $ cudaMetricNames opt
cudaAttrs :: MonadLog m => SamplePoint -> m [Attr]
cudaAttrs pt = do
-- Get metrics
state <- logLoggerOpt
let metricNames = map fst $ cudaMetricNames state
-- Enable CUPTI if required
case pt of
StartSample -> liftIO $ modifyMVar_ loggerCuptiEnabled $ \f -> do
unless f $ do
cuptiEnable
when (not $ null metricNames) cuptiMetricsEnable
return True
EndSample -> liftIO $ modifyMVar_ loggerCuptiEnabled $ \f -> do
when f $ do
cuptiDisable
when (not $ null metricNames) cuptiMetricsDisable
return False
liftIO $ do
-- Flush, so statistics are current
cuptiFlush
-- Then read stats
memsetTime <- cuptiGetMemsetTime
kernelTime <- cuptiGetKernelTime
overheadTime <- cuptiGetOverheadTime
memsetBytes <- cuptiGetMemsetBytes
memcpyTimeH <- cuptiGetMemcpyTimeTo CUptiHost
memcpyTimeD <- (+) <$> cuptiGetMemcpyTimeTo CUptiDevice
<*> cuptiGetMemcpyTimeTo CUptiArray
memcpyBytesH <- cuptiGetMemcpyBytesTo CUptiHost
memcpyBytesD <- (+) <$> cuptiGetMemcpyBytesTo CUptiDevice
<*> cuptiGetMemcpyBytesTo CUptiArray
-- Read metrics
metrics <- cuptiGetMetrics
let formatMetric m = show m ++ "/" ++ show kernelTime
metricAttrs = zipWith (,) metricNames (map formatMetric metrics)
-- Generate attributes
return $ consAttrNZ "cuda:kernel-time" kernelTime
$ consAttrNZ "cuda:overhead-time" overheadTime
$ consAttrNZT "cuda:memset-bytes" memsetBytes memsetTime
$ consAttrNZT "cuda:memcpy-bytes-host" memcpyBytesH memcpyTimeH
$ consAttrNZT "cuda:memcpy-bytes-device" memcpyBytesD memcpyTimeD
$ metricAttrs
#else
cudaAttrs :: SamplePoint -> IO [Attr]
cudaAttrs _ = return []
#endif
| SKA-ScienceDataProcessor/RC | MS3/lib/DNA/Logging.hs | apache-2.0 | 27,011 | 0 | 20 | 6,381 | 4,905 | 2,610 | 2,295 | 349 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module CloudTests (cloudTests) where
import Control.Monad
import Control.Monad.IO.Class
import Data.ByteArray as BA
import System.Directory
import System.Directory.Tree
import System.FilePath
import System.FilePath.Glob
import System.IO
import System.IO.Temp
#ifndef WINBUILD
import System.Posix.Files
#endif
import Test.Tasty
import Test.Tasty.HUnit
import qualified Data.ByteString.Lazy as BL
import PrivateCloud.Cloud.Crypto
import PrivateCloud.Cloud.DirTree
import PrivateCloud.Cloud.LocalDb
import PrivateCloud.Cloud.Monad
import PrivateCloud.Cloud.Sync
import PrivateCloud.Provider.Types
import Provider
cloudTests :: TestTree
cloudTests = testGroup "PrivateCloud tests"
[ testGroup "Helper function tests"
[ testCase "zipLists3" testZipLists3
, testCase "path2entry" testPath2Entry
, testCase "entry2path" testEntry2Path
, testCase "entryFile" testEntryFile
]
, testGroup "DirTree tests"
[ testCase "makeTree" testMakeTree
, testCase "unrollTreeFiles" testUnrollTreeFiles
]
, testGroup "Crypto tests"
[ testCase "File HMAC correct" testFileHMAC
]
, testGroup "Local database tests"
[ testCase "Add and read works" testDbAddRead
, testCase "Update works" testDbUpdate
, testCase "Remove works" testDbDelete
]
, testGroup "Sync tests"
[ testCase "getFileChanges" testGetFileChanges
]
]
dbPattern :: Pattern
dbPattern = compile dbName
normalizeTree :: DirTree (Maybe LocalFileInfo) -> DirTree (Maybe LocalFileInfo)
normalizeTree = fmap (fmap fixModTime)
where
fixModTime f = f{lfModTime = Timestamp 42}
sampleTree :: DirTree (Maybe LocalFileInfo)
sampleTree = Dir { name = "root", contents =
[ Dir { name = "a", contents =
[ Dir { name = "b", contents =
[ Dir { name = "c", contents =
[ Dir { name = "d", contents = [] }
, File
{ name = "foo"
, file = Just LocalFileInfo
{ lfLength = 3
, lfModTime = Timestamp 42
}
}
#ifndef WINBUILD
, File { name = "pipe", file = Nothing }
#endif
]}
, Dir { name = "e", contents =
[ Dir { name = "f", contents =
[ File
{ name = "foo"
, file = Just LocalFileInfo
{ lfLength = 4
, lfModTime = Timestamp 18
}
}
]}
]}
]}
]}
]}
testMakeTree :: Assertion
testMakeTree = withSystemTempDirectory "privatecloud.test" $ \tmpdir -> do
let tmpname = last $ splitDirectories tmpdir
createDirectoryIfMissing True (tmpdir </> "a" </> "b" </> "c" </> "d")
createDirectoryIfMissing True (tmpdir </> "a" </> "b" </> "e" </> "f")
#ifndef WINBUILD
createNamedPipe (tmpdir </> "a" </> "b" </> "c" </> "pipe") ownerReadMode
#endif
writeFile (tmpdir </> "a" </> "b" </> "c" </> "foo") "foo"
writeFile (tmpdir </> "a" </> "b" </> "e" </> "f" </> "foo") "barr"
tree <- makeTree tmpdir
assertEqual "Incorrect tree read"
Dir { name = tmpname, contents =
[ Dir { name = "a", contents =
[ Dir { name = "b", contents =
[ Dir { name = "c", contents =
[ Dir { name = "d", contents = [] }
, File
{ name = "foo"
, file = Just LocalFileInfo { lfLength = 3, lfModTime = Timestamp 42 }
}
#ifndef WINBUILD
, File { name = "pipe", file = Nothing }
#endif
]}
, Dir { name = "e", contents =
[ Dir { name = "f", contents =
[ File
{ name = "foo"
, file = Just LocalFileInfo { lfLength = 4, lfModTime = Timestamp 42 }
}
]}
]}
]}
]}
]}
(normalizeTree tree)
testUnrollTreeFiles :: Assertion
testUnrollTreeFiles = do
let files = unrollTreeFiles sampleTree
assertEqual "Incorrect files extracted"
[ (EntryName "a/b/c/foo", LocalFileInfo { lfLength = 3, lfModTime = Timestamp 42 })
, (EntryName "a/b/e/f/foo", LocalFileInfo { lfLength = 4, lfModTime = Timestamp 18 })
]
files
testFileHMAC :: Assertion
testFileHMAC = withSystemTempFile "hmactest.dat" $ \filename h -> do
BL.hPut h $ BL.take (1024 * 1024 * 3 + 150) $ BL.iterate (+ 1) 0
hClose h
hmac <- makeFileHash filename
assertEqual "HMAC BASE64 mismatch" (Hash "xDyiz7u/CYHtdXw9ouvCYngPMUYMT0pEZBOwqvh355M=") hmac
testDbAddRead :: Assertion
testDbAddRead = withSystemTempDirectory "sqlite.test" $ \tmpdir -> do
let srchash = Hash "12345"
let srcsize = 123
let srcts = Timestamp 9876
_ <- setupTestCloud tmpdir "foobar"
[(fname, info)] <- runTestCloud tmpdir [] (const $ pure $ Just BA.empty) $ do
putFileInfo (EntryName "foo") DbFileInfo
{ dfHash = srchash
, dfLength = srcsize
, dfModTime = srcts
}
getFileList
assertEqual "invalid filename read" (EntryName "foo") fname
assertEqual "invalid hash read" srchash (dfHash info)
assertEqual "invalid size read" srcsize (dfLength info)
assertEqual "invalid modtime read" srcts (dfModTime info)
testDbUpdate :: Assertion
testDbUpdate = withSystemTempDirectory "sqlite.test" $ \tmpdir -> do
let srchash = Hash "12345"
let srcsize = 123
let srcts = Timestamp 9876
let secondHash = Hash "78901"
let secondSize = 1024
let secondts = Timestamp 5436
_ <- setupTestCloud tmpdir "foobar"
runTestCloud tmpdir [] (const $ pure $ Just BA.empty) $ do
putFileInfo (EntryName "foo") DbFileInfo
{ dfHash = srchash
, dfLength = srcsize
, dfModTime = srcts
}
putFileInfo (EntryName "foo") DbFileInfo
{ dfHash = secondHash
, dfLength = secondSize
, dfModTime = secondts
}
[(fname, info)] <- runTestCloud tmpdir [] (const $ pure $ Just BA.empty) getFileList
assertEqual "invalid filename read" (EntryName "foo") fname
assertEqual "invalid hash read" secondHash (dfHash info)
assertEqual "invalid size read" secondSize (dfLength info)
assertEqual "invalid modtime read" secondts (dfModTime info)
testDbDelete :: Assertion
testDbDelete = withSystemTempDirectory "sqlite.test" $ \tmpdir -> do
let srchash = Hash "12345"
let srcsize = 123
let srcts = Timestamp 9876
_ <- setupTestCloud tmpdir "foobar"
runTestCloud tmpdir [] (const $ pure $ Just BA.empty) $ do
v <- getFileList
liftIO $ assertEqual "unexpected data found" [] v
putFileInfo (EntryName "foo") DbFileInfo
{ dfHash = srchash
, dfLength = srcsize
, dfModTime = srcts
}
[(fname, info)] <- runTestCloud tmpdir [] (const $ pure $ Just BA.empty) $ do
v <- getFileList
deleteFileInfo (EntryName "foo")
pure v
assertEqual "invalid filename read" (EntryName "foo") fname
assertEqual "invalid hash read" srchash (dfHash info)
assertEqual "invalid size read" srcsize (dfLength info)
assertEqual "invalid modtime read" srcts (dfModTime info)
v <- runTestCloud tmpdir [] (const $ pure $ Just BA.empty) getFileList
assertEqual "data found after delete" [] v
testGetFileChanges :: Assertion
testGetFileChanges = withSystemTempDirectory "privatecloud.test" $ \root -> do
_ <- setupTestCloud root "foobar"
createDirectoryIfMissing True (root </> "a" </> "b" </> "c" </> "d")
createDirectoryIfMissing True (root </> "a" </> "b" </> "e" </> "f")
#ifndef WINBUILD
createNamedPipe (root </> "a" </> "b" </> "c" </> "pipe") ownerReadMode
#endif
writeFile (root </> "a" </> "b" </> "c" </> "foo") "foo"
writeFile (root </> "a" </> "b" </> "e" </> "f" </> "foo") "barr"
let getChanges' func serverFiles = do
localFiles <- map func . unrollTreeFiles . normalizeTree <$> makeTree root
runTestCloud root [dbPattern] (const $ pure $ Just BA.empty) $ do
dbFiles <- getFileList
getAllFileChanges localFiles dbFiles serverFiles
let getChanges = getChanges' id
let cloud2db CloudFileInfo{..} = DbFileInfo
{ dfLength = cfLength
, dfModTime = cfModTime
, dfHash = cfHash
}
let local2db filename LocalFileInfo{..} = do
hash <- liftIO $ makeFileHash (root </> entry2path filename)
pure DbFileInfo
{ dfHash = hash
, dfLength = lfLength
, dfModTime = lfModTime
}
let updateDb changes =
runTestCloud root [dbPattern] (const $ pure $ Just BA.empty) $ forM_ changes $ \case
UpdateLocalFile{..} -> putFileInfo faFilename (cloud2db faCloudInfo)
UpdateLocalMetadata{..} -> putFileInfo faFilename (cloud2db faCloudInfo)
DeleteLocalFile{..} -> deleteFileInfo faFilename
UpdateCloudFile{..} -> do
info <- local2db faFilename faLocalInfo
putFileInfo faFilename info
UpdateCloudMetadata{..} -> do
info <- local2db faFilename faLocalInfo
putFileInfo faFilename info
DeleteCloudFile{..} -> deleteFileInfo faFilename
_ -> pure ()
let check msg server golden = do
diff <- getChanges server
assertEqual msg golden diff
updateDb diff
check "incorrect change list on initial add" []
[ UpdateCloudFile
{ faFilename = EntryName "a/b/c/foo"
, faLocalInfo = LocalFileInfo
{ lfLength = 3
, lfModTime = Timestamp 42
}
}
, UpdateCloudFile
{ faFilename = EntryName "a/b/e/f/foo"
, faLocalInfo = LocalFileInfo
{ lfLength = 4
, lfModTime = Timestamp 42
}
}
]
check "can't detect absense of changes"
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "uP6ff2JVpvoI9mirYyqNCBrYeYPHfNJ05IzkUPCzSf0="
, cfLength = 3
, cfModTime = Timestamp 42
, cfStorageId = StorageId "100"
}
)
, ( EntryName "a/b/e/f/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "fX09aHuHgTdR56bXs4MJrPLp4ejuE9TFmVirwomIXgg="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "101"
}
)
]
[]
writeFile (root </> "a" </> "b" </> "c" </> "foo") "fooo"
check "can't detect file write"
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "uP6ff2JVpvoI9mirYyqNCBrYeYPHfNJ05IzkUPCzSf0="
, cfLength = 3
, cfModTime = Timestamp 42
, cfStorageId = StorageId "100"
}
)
, ( EntryName "a/b/e/f/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "fX09aHuHgTdR56bXs4MJrPLp4ejuE9TFmVirwomIXgg="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "101"
}
)
]
[ UpdateCloudFile
{ faFilename = EntryName "a/b/c/foo"
, faLocalInfo = LocalFileInfo
{ lfLength = 4
, lfModTime = Timestamp 42
}
}
]
writeFile (root </> "a" </> "b" </> "c" </> "foo") "foo1"
diff3 <- getChanges'
( \(f, i) -> if f == EntryName "a/b/c/foo"
then (f, i { lfModTime = Timestamp 1 })
else (f, i)
)
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "gr1/Vw2+cz8R9FBnSoLbUsHMtU0ZIyNVbTfKfFKlOQ8="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "105"
}
)
, ( EntryName "a/b/e/f/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "fX09aHuHgTdR56bXs4MJrPLp4ejuE9TFmVirwomIXgg="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "101"
}
)
]
assertEqual "can't detect file write without len change"
[ UpdateCloudFile
{ faFilename = EntryName "a/b/c/foo"
, faLocalInfo = LocalFileInfo
{ lfLength = 4
, lfModTime = Timestamp 1
}
}
]
diff3
updateDb diff3
check "can't detect timestamp only update"
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "jQLp51ZKQgMBfxkw7OA4t1mvD7Jwq8ek/x2NL6jh+7o="
, cfLength = 4
, cfModTime = Timestamp 1
, cfStorageId = StorageId "108"
}
)
, ( EntryName "a/b/e/f/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "fX09aHuHgTdR56bXs4MJrPLp4ejuE9TFmVirwomIXgg="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "101"
}
)
]
[ UpdateCloudMetadata
{ faFilename = EntryName "a/b/c/foo"
, faLocalInfo = LocalFileInfo
{ lfLength = 4
, lfModTime = Timestamp 42
}
, faExpectedHash = Hash "jQLp51ZKQgMBfxkw7OA4t1mvD7Jwq8ek/x2NL6jh+7o="
}
]
removeFile (root </> "a" </> "b" </> "e" </> "f" </> "foo")
check "can't detect file removal"
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "jQLp51ZKQgMBfxkw7OA4t1mvD7Jwq8ek/x2NL6jh+7o="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "108"
}
)
, ( EntryName "a/b/e/f/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "fX09aHuHgTdR56bXs4MJrPLp4ejuE9TFmVirwomIXgg="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "101"
}
)
]
[ DeleteCloudFile
{ faFilename = EntryName "a/b/e/f/foo"
}
]
check "can't detect server add"
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "jQLp51ZKQgMBfxkw7OA4t1mvD7Jwq8ek/x2NL6jh+7o="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "108"
}
)
, ( EntryName "a/b/e/buzz"
, CloudFile CloudFileInfo
{ cfHash = Hash "fX09aHuHgTdR56bXs4MJrPLp4ejuE9TFmVirwomIXgg="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "1"
}
)
]
[ UpdateLocalFile
{ faFilename = EntryName "a/b/e/buzz"
, faCloudInfo = CloudFileInfo
{ cfHash = Hash "fX09aHuHgTdR56bXs4MJrPLp4ejuE9TFmVirwomIXgg="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "1"
}
}
]
writeFile (root </> "a" </> "b" </> "e" </> "buzz") "barr"
check "can't detect server edit"
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "jQLp51ZKQgMBfxkw7OA4t1mvD7Jwq8ek/x2NL6jh+7o="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "108"
}
)
, ( EntryName "a/b/e/buzz"
, CloudFile CloudFileInfo
{ cfHash = Hash "gr1/Vw2+cz8R9FBnSoLbUsHMtU0ZIyNVbTfKfFKlOQ8="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "2"
}
)
]
[ UpdateLocalFile
{ faFilename = EntryName "a/b/e/buzz"
, faCloudInfo = CloudFileInfo
{ cfHash = Hash "gr1/Vw2+cz8R9FBnSoLbUsHMtU0ZIyNVbTfKfFKlOQ8="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "2"
}
}
]
writeFile (root </> "a" </> "b" </> "e" </> "buzz") "fooo"
check "can't detect server metadata change"
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "jQLp51ZKQgMBfxkw7OA4t1mvD7Jwq8ek/x2NL6jh+7o="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "108"
}
)
, ( EntryName "a/b/e/buzz"
, CloudFile CloudFileInfo
{ cfHash = Hash "gr1/Vw2+cz8R9FBnSoLbUsHMtU0ZIyNVbTfKfFKlOQ8="
, cfLength = 4
, cfModTime = Timestamp 50
, cfStorageId = StorageId "2"
}
)
]
[ UpdateLocalMetadata
{ faFilename = EntryName "a/b/e/buzz"
, faCloudInfo = CloudFileInfo
{ cfHash = Hash "gr1/Vw2+cz8R9FBnSoLbUsHMtU0ZIyNVbTfKfFKlOQ8="
, cfLength = 4
, cfModTime = Timestamp 50
, cfStorageId = StorageId "2"
}
}
]
check "can't detect server delete with marker"
[ ( EntryName "a/b/c/foo"
, CloudFile CloudFileInfo
{ cfHash = Hash "jQLp51ZKQgMBfxkw7OA4t1mvD7Jwq8ek/x2NL6jh+7o="
, cfLength = 4
, cfModTime = Timestamp 42
, cfStorageId = StorageId "108"
}
)
, ( EntryName "a/b/e/buzz"
, CloudDeleteMarker
)
]
[ DeleteLocalFile
{ faFilename = EntryName "a/b/e/buzz"
}
]
removeFile (root </> "a" </> "b" </> "e" </> "buzz")
check "can't detect server delete with missing record"
[ ( EntryName "a/b/e/buzz"
, CloudDeleteMarker
)
]
[ DeleteLocalFile
{ faFilename = EntryName "a/b/c/foo"
}
]
removeFile (root </> "a" </> "b" </> "c" </> "foo")
testZipLists3 :: Assertion
testZipLists3 =
assertEqual "zipLists"
[ (1, Just "1a", Just "1b", Nothing)
, (2, Just "2a", Nothing, Nothing)
, (3, Nothing, Just "3b", Nothing)
, (4, Nothing, Nothing, Just "4c")
, (5, Just "5a", Just "5b", Just "5c")
, (6, Just "6a", Nothing, Just "6c")
, (8, Nothing, Just "8b", Just "8c")
, (9, Nothing, Nothing, Just "9c")
]
(zipLists3
[ (1 :: Int, "1a" :: String)
, (2, "2a")
, (5, "5a")
, (6, "6a")
]
[ (1, "1b" :: String)
, (3, "3b")
, (5, "5b")
, (8, "8b")
]
[ (4, "4c" :: String)
, (5, "5c")
, (6, "6c")
, (8, "8c")
, (9, "9c")
]
)
testPath2Entry :: Assertion
testPath2Entry = do
assertEqual "single name" (EntryName "foo") $ path2entry "foo"
assertEqual "path with directory"
(EntryName "foo/bar.dat/buzz.txt")
(path2entry $ joinPath ["foo", "bar.dat", "buzz.txt"])
assertEqual "multi-slash name"
(EntryName "foo/bar/buzz.txt")
(path2entry $ "foo" ++ pathSeparator : pathSeparator : "bar" </> "buzz.txt")
testEntry2Path :: Assertion
testEntry2Path = do
assertEqual "single name" "foo" (entry2path $ EntryName "foo")
assertEqual "path with directory"
(joinPath ["foo", "bar.dat", "buzz.txt"])
(entry2path $ EntryName "foo/bar.dat/buzz.txt")
testEntryFile :: Assertion
testEntryFile = do
assertEqual "single name" "foo" (entryFile $ EntryName "foo")
assertEqual "path with directory"
"buzz.txt"
(entryFile $ EntryName "foo/bar.dat/buzz.txt")
| rblaze/private-cloud | test/CloudTests.hs | apache-2.0 | 20,596 | 0 | 30 | 7,502 | 4,854 | 2,578 | 2,276 | 438 | 8 |
import Control.Concurrent
import Control.Exception
import System.Posix
main = do
pid <- forkProcess $ do
handle (\UserInterrupt{} -> putStrLn "caught")
$ threadDelay 2000000
signalProcess sigINT pid
threadDelay 2000000
| mettekou/ghc | testsuite/tests/rts/T12903.hs | bsd-3-clause | 255 | 0 | 15 | 63 | 74 | 35 | 39 | 9 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
module Duckling.Distance.Types where
import Control.DeepSeq
import Data.Aeson
import Data.Hashable
import Data.Text (Text)
import GHC.Generics
import Prelude
import qualified Data.HashMap.Strict as H
import qualified Data.Text as Text
import Duckling.Resolve (Resolve(..))
data Unit
= Centimetre
| Foot
| Inch
| Kilometre
| M -- ambiguous between Mile and Metre
| Metre
| Mile
| Millimetre
| Yard
deriving (Eq, Generic, Hashable, Ord, Show, NFData)
instance ToJSON Unit where
toJSON = String . Text.toLower . Text.pack . show
data DistanceData = DistanceData
{ unit :: Maybe Unit
, value :: Maybe Double
, minValue :: Maybe Double
, maxValue :: Maybe Double
}
deriving (Eq, Generic, Hashable, Ord, Show, NFData)
instance Resolve DistanceData where
type ResolvedValue DistanceData = DistanceValue
resolve _ _ DistanceData {unit = Just unit, value = Just val} =
Just (simple unit val, False)
resolve _ _ DistanceData {unit = Just unit, value = Nothing
, minValue = Just from, maxValue = Just to} =
Just (between unit (from, to), False)
resolve _ _ DistanceData {unit = Just unit, value = Nothing
, minValue = Just from, maxValue = Nothing} =
Just (above unit from, False)
resolve _ _ DistanceData {unit = Just unit, value = Nothing
, minValue = Nothing, maxValue = Just to} =
Just (under unit to, False)
resolve _ _ _ = Nothing
data IntervalDirection = Above | Under
deriving (Eq, Generic, Hashable, Ord, Show, NFData)
data SingleValue = SingleValue
{ vUnit :: Unit
, vValue :: Double
}
deriving (Eq, Ord, Show)
instance ToJSON SingleValue where
toJSON (SingleValue unit value) = object
[ "value" .= value
, "unit" .= unit
]
data DistanceValue
= SimpleValue SingleValue
| IntervalValue (SingleValue, SingleValue)
| OpenIntervalValue (SingleValue, IntervalDirection)
deriving (Eq, Ord, Show)
instance ToJSON DistanceValue where
toJSON (SimpleValue value) = case toJSON value of
Object o -> Object $ H.insert "type" (toJSON ("value" :: Text)) o
_ -> Object H.empty
toJSON (IntervalValue (from, to)) = object
[ "type" .= ("interval" :: Text)
, "from" .= toJSON from
, "to" .= toJSON to
]
toJSON (OpenIntervalValue (from, Above)) = object
[ "type" .= ("interval" :: Text)
, "from" .= toJSON from
]
toJSON (OpenIntervalValue (to, Under)) = object
[ "type" .= ("interval" :: Text)
, "to" .= toJSON to
]
-- -----------------------------------------------------------------
-- Value helpers
simple :: Unit -> Double -> DistanceValue
simple u v = SimpleValue $ single u v
between :: Unit -> (Double, Double) -> DistanceValue
between u (from, to) = IntervalValue (single u from, single u to)
above :: Unit -> Double -> DistanceValue
above = openInterval Above
under :: Unit -> Double -> DistanceValue
under = openInterval Under
openInterval :: IntervalDirection -> Unit -> Double -> DistanceValue
openInterval direction u v = OpenIntervalValue (single u v, direction)
single :: Unit -> Double -> SingleValue
single u v = SingleValue {vUnit = u, vValue = v}
| facebookincubator/duckling | Duckling/Distance/Types.hs | bsd-3-clause | 3,600 | 0 | 13 | 783 | 1,096 | 604 | 492 | 90 | 1 |
{-# LANGUAGE TupleSections #-}
module Value (
Infix(..),
Value(..),
Pattern(..),
patVars,
match,
Env,
Var(V),
initialize,
setVars,
setPat,
setPats,
getVal,
getVars,
mapEnv
) where
import Prelude hiding (showList)
import Env (Var(V), getVars, mapEnv)
import qualified Env (Env, initialize, setVars, setPat, setPats, getVal)
import Control.Monad (liftM, zipWithM)
--------------------------------------------------------------------------------
data Infix = Op String Value Infix
| Value Value
deriving Show
data Value =
Nil |
Empty |
Integer Integer |
Char Char |
Fun (Value -> Value) |
IOAction (IO Value) |
Var String Int |
Con String [Value] |
App Value Value |
Lambda [Pattern] Value |
Closure Env [Pattern] Value |
Case Value [(Pattern, Value)] |
Letin [(Pattern, Value)] Value |
Module [(Pattern, Value)] |
Let [(Pattern, Value)] |
Infix Infix |
Err String
data Pattern =
PatNil |
PatEmpty |
PatInteger Integer |
PatVar String Int |
PatUScore |
PatCon String [Pattern]
deriving (Eq, Show)
instance Show Value where
show Nil = "()"
show Empty = "[]"
show (Integer n) = show n
show (Char c) = show c
show (Fun _) = "<function>"
show (IOAction _) = "<IO>"
show (Var i 0) = i
show (Var i n) = i ++ "~" ++ show n
show v@(Con ":" [Char _, _])
= "\"" ++ showStr v ++ "\""
show v@(Con ":" _) = "[" ++ showList v ++ "]"
show (Con n []) = n
show (Con n mems) = "(" ++ n ++ " " ++ unwordsMap show mems ++ ")"
show (App f a) = "(" ++ show f ++ " " ++ show a ++ ")"
show (Lambda ps ex) = showLambda ps ex
show (Closure _ _ _) = "<closure>"
show (Case key alts) = showCase key alts
show (Letin defs ex) = "let " ++ showDefs defs ++ " in " ++ show ex
show (Module defs) = "module " ++ showDefs defs
show (Let defs) = "let " ++ showDefs defs
show (Infix _) = "<infix>"
show (Err msg) = "Error: " ++ msg
showStr :: Value -> String
showStr Empty = ""
showStr (Con ":" [Char '\\', s]) = '\\' : '\\' : showStr s
showStr (Con ":" [Char '\n', s]) = '\\' : 'n' : showStr s
showStr (Con ":" [Char c, s]) = c : showStr s
showStr _ = "Error: bad String"
showList :: Value -> String
showList (Con ":" [v, Empty]) = show v
showList (Con ":" [v, lst]) = show v ++ "," ++ showList lst
showList _ = "Error: bad List"
showLambda :: [Pattern] -> Value -> String
showLambda ps ex = "(\\" ++ unwordsMap showPat ps ++ " -> " ++ show ex ++ ")"
showCase :: Value -> [(Pattern, Value)] -> String
showCase key alts = "case " ++ show key ++ " of { " ++
unwordsMap (\(t, ex) -> showPat t ++ " -> " ++ show ex ++ "; ") alts
++ " }"
showDefs :: [(Pattern, Value)] -> String
showDefs = unwordsMap (\(p, v) -> showPat p ++ " = " ++ show v ++ ";")
showPat :: Pattern -> String
showPat (PatVar var 0) = var
showPat (PatVar var n) = var ++ "~" ++ show n
showPat p = show p
unwordsMap :: (a -> String) -> [a] -> String
unwordsMap = (.) unwords . map
match :: Value -> Pattern -> Maybe [(Var, Value)]
match val (PatVar var n) = Just [(V var n, val)]
match _ PatUScore = Just []
match (Integer i1) (PatInteger i0)
| i1 == i0 = Just []
| otherwise = Nothing
match (Con name1 vals) (PatCon name0 pats)
| name1 == name0 = liftM concat $ zipWithM match vals pats
| otherwise = Nothing
match Empty PatEmpty = Just []
match _ _ = Nothing
patVars :: Pattern -> [String]
patVars = map (\(V x _) -> x) . patToVars
patToVars :: Pattern -> [Var]
patToVars (PatCon _ pats) = patToVars `concatMap` pats
patToVars (PatVar var n) = [V var n]
patToVars _ = []
--------------------------------------------------------------------------------
type Env = Env.Env Pattern Value
initialize :: [(String, Value)] -> Env
initialize = Env.initialize (flip PatVar 0)
setVars :: Env -> [(Var, Value)] -> Env
setVars = Env.setVars (\(V x n) -> PatVar x n)
setPat :: Env -> Pattern -> Value -> Env
setPat = Env.setPat patToVars
setPats :: Env -> [(Pattern, Value)] -> Env
setPats = Env.setPats patToVars
getVal :: (Value -> Value) -> Env -> Var -> Maybe Value
getVal = Env.getVal match
| YoshikuniJujo/toyhaskell_haskell | src/Value.hs | bsd-3-clause | 4,069 | 168 | 13 | 902 | 1,998 | 1,056 | 942 | 127 | 1 |
-- |
-- Module : Data.Git.Pack
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unix
--
module Data.Git.Pack
( PackedObjectInfo(..)
, PackedObjectRaw
-- * Enumerators of packs
, packEnumerate
-- * Helpers to process packs
, packOpen
, packClose
-- * Command for the content of a pack
, packReadHeader
, packReadMapAtOffset
, packReadAtOffset
, packReadRawAtOffset
, packEnumerateObjects
-- * turn a packed object into a
{-, packedObjectToObject-}
, packObjectFromRaw
) where
import Control.Applicative ((<$>))
import Control.Arrow (second)
import Control.Monad
import System.IO
import System.FilePath
import System.Directory
import Data.Bits
import Data.List
import qualified Data.ByteString.Lazy as L
import Data.Attoparsec (anyWord8)
import qualified Data.Attoparsec as A
import qualified Data.Attoparsec.Lazy as AL
import Data.Git.Internal
import Data.Git.Object
{-import Data.Git.Delta-}
import Data.Git.Ref
import Data.Git.FileReader
import Data.Git.Path
import Data.Word
type PackedObjectRaw = (PackedObjectInfo, L.ByteString)
data PackedObjectInfo = PackedObjectInfo
{ poiType :: ObjectType
, poiOffset :: Word64
, poiSize :: Word64
, poiActualSize :: Word64
, poiExtra :: Maybe ObjectPtr
} deriving (Show,Eq)
-- | Enumerate the pack refs available in this repository.
packEnumerate :: FilePath -> IO [Ref]
packEnumerate repoPath = map onlyHash . filter isPackFile <$> getDirectoryContents (repoPath </> "objects" </> "pack")
where
isPackFile x = ".pack" `isSuffixOf` x
onlyHash = fromHexString . takebut 5 . drop 5
takebut n l = take (length l - n) l
-- | open a pack
packOpen :: FilePath -> Ref -> IO FileReader
packOpen repoPath packRef = openFile (packPath repoPath packRef) ReadMode >>= fileReaderNew False
-- | close a pack
packClose :: FileReader -> IO ()
packClose = fileReaderClose
-- | return the number of entries in this pack
packReadHeader :: FilePath -> Ref -> IO Word32
packReadHeader repoPath packRef =
withFileReader (packPath repoPath packRef) $ \filereader ->
fileReaderParse filereader parseHeader
where parseHeader = do
packMagic <- be32 <$> A.take 4
when (packMagic /= 0x5041434b) $ error "not a git packfile"
ver <- be32 <$> A.take 4
when (ver /= 2) $ error ("pack file version not supported: " ++ show ver)
be32 <$> A.take 4
-- | read an object at a specific position using a map function on the objectData
packReadMapAtOffset :: FileReader -> Word64 -> (L.ByteString -> L.ByteString)
-> IO (Maybe GitObject)
packReadMapAtOffset fr offset mapData = fileReaderSeek fr offset >> getNextObject fr mapData
-- | read an object at a specific position
packReadAtOffset :: FileReader -> Word64 -> IO (Maybe GitObject)
packReadAtOffset fr offset = packReadMapAtOffset fr offset id
-- | read a raw representation at a specific position
packReadRawAtOffset :: FileReader -> Word64 -> IO PackedObjectRaw
packReadRawAtOffset fr offset = fileReaderSeek fr offset >> getNextObjectRaw fr
-- | enumerate all objects in this pack and callback to f for reach raw objects
packEnumerateObjects :: FilePath -> Ref -> Int -> (PackedObjectRaw -> IO a) -> IO ()
packEnumerateObjects repoPath packRef entries f =
withFileReader (packPath repoPath packRef) $ \filebuffer -> do
fileReaderSeek filebuffer 12
parseNext filebuffer entries
where
parseNext :: FileReader -> Int -> IO ()
parseNext _ 0 = return ()
parseNext fr ents = getNextObjectRaw fr >>= f >> parseNext fr (ents-1)
getNextObject :: FileReader -> (L.ByteString -> L.ByteString) -> IO (Maybe GitObject)
getNextObject fr mapData =
packedObjectToObject . second mapData <$> getNextObjectRaw fr
packedObjectToObject :: (PackedObjectInfo, L.ByteString) -> Maybe GitObject
packedObjectToObject (PackedObjectInfo { poiType = ty, poiExtra = extra }, objData) =
packObjectFromRaw (ty, extra, objData)
packObjectFromRaw :: (ObjectType, Maybe ObjectPtr, L.ByteString)
-> Maybe GitObject
packObjectFromRaw (TypeCommit, Nothing, objData) =
AL.maybeResult $ AL.parse commitParse objData
packObjectFromRaw (TypeTree, Nothing, objData) =
AL.maybeResult $ AL.parse treeParse objData
packObjectFromRaw (TypeBlob, Nothing, objData) =
AL.maybeResult $ AL.parse blobParse objData
packObjectFromRaw (TypeTag, Nothing, objData) =
AL.maybeResult $ AL.parse tagParse objData
packObjectFromRaw _ = Nothing
getNextObjectRaw :: FileReader -> IO PackedObjectRaw
getNextObjectRaw fr = do
sobj <- fileReaderGetPos fr
(ty, size) <- fileReaderParse fr parseObjectHeader
extra <- case ty of
TypeDeltaRef -> Just . PtrRef . fromBinary <$> fileReaderGetBS 20 fr
TypeDeltaOff -> Just . PtrOfs . deltaOffFromList <$> fileReaderGetVLF fr
_ -> return Nothing
objData <- fileReaderInflateToSize fr size
eobj <- fileReaderGetPos fr
return (PackedObjectInfo ty sobj (eobj - sobj) size extra, objData)
where
parseObjectHeader = do
(m, ty, sz) <- splitFirst <$> anyWord8
size <- if m then (sz +) <$> getNextSize 4 else return sz
return (ty, size)
where
getNextSize n = do
(c, sz) <- splitOther n <$> anyWord8
if c then (sz +) <$> getNextSize (n+7) else return sz
splitFirst :: Word8 -> (Bool, ObjectType, Word64)
splitFirst w = (w `testBit` 7, toEnum $ fromIntegral ((w `shiftR` 4) .&. 0x7), fromIntegral (w .&. 0xf))
splitOther n w = (w `testBit` 7, fromIntegral (w .&. 0x7f) `shiftL` n)
deltaOffFromList (x:xs) = foldl' acc (fromIntegral (x `clearBit` 7)) xs
where acc a w = ((a+1) `shiftL` 7) + fromIntegral (w `clearBit` 7)
deltaOffFromList [] = error "cannot happen"
| Twinside/hit-simple | Data/Git/Pack.hs | bsd-3-clause | 5,767 | 228 | 9 | 1,097 | 1,566 | 896 | 670 | 113 | 6 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RecordWildCards #-}
-- | Dealing with Cabal.
module Stack.Package
(readPackage
,readPackageBS
,readPackageDescriptionDir
,readDotBuildinfo
,readPackageUnresolved
,readPackageUnresolvedBS
,resolvePackage
,packageFromPackageDescription
,findOrGenerateCabalFile
,hpack
,Package(..)
,PackageDescriptionPair(..)
,GetPackageFiles(..)
,GetPackageOpts(..)
,PackageConfig(..)
,buildLogPath
,PackageException (..)
,resolvePackageDescription
,packageDescTools
,packageDependencies
,autogenDir
,checkCabalFileName
,printCabalFileWarning
,cabalFilePackageId
,rawParseGPD)
where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C8
import Data.List (isSuffixOf, partition, isPrefixOf)
import Data.List.Extra (nubOrd)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8, decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Distribution.Compiler
import Distribution.ModuleName (ModuleName)
import qualified Distribution.ModuleName as Cabal
import qualified Distribution.Package as D
import Distribution.Package hiding (Package,PackageName,packageName,packageVersion,PackageIdentifier)
import qualified Distribution.PackageDescription as D
import Distribution.PackageDescription hiding (FlagName)
import Distribution.PackageDescription.Parse
import qualified Distribution.PackageDescription.Parse as D
import Distribution.ParseUtils
import Distribution.Simple.Utils
import Distribution.System (OS (..), Arch, Platform (..))
import qualified Distribution.Text as D
import qualified Distribution.Types.CondTree as Cabal
import qualified Distribution.Types.ExeDependency as Cabal
import Distribution.Types.ForeignLib
import qualified Distribution.Types.LegacyExeDependency as Cabal
import qualified Distribution.Types.UnqualComponentName as Cabal
import qualified Distribution.Verbosity as D
import Distribution.Version (showVersion)
import Lens.Micro (lens)
import qualified Hpack
import qualified Hpack.Config as Hpack
import Path as FL
import Path.Extra
import Path.Find
import Path.IO hiding (findFiles)
import Stack.Build.Installed
import Stack.Constants
import Stack.Constants.Config
import Stack.Prelude
import Stack.PrettyPrint
import Stack.Types.Build
import Stack.Types.BuildPlan (PackageLocationIndex (..), PackageLocation (..), ExeName (..))
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.FlagName
import Stack.Types.GhcPkgId
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Runner
import Stack.Types.Version
import qualified System.Directory as D
import System.FilePath (splitExtensions, replaceExtension)
import qualified System.FilePath as FilePath
import System.IO.Error
import System.Process.Run (runCmd, Cmd(..))
data Ctx = Ctx { ctxFile :: !(Path Abs File)
, ctxDir :: !(Path Abs Dir)
, ctxEnvConfig :: !EnvConfig
}
instance HasPlatform Ctx
instance HasGHCVariant Ctx
instance HasLogFunc Ctx where
logFuncL = configL.logFuncL
instance HasRunner Ctx where
runnerL = configL.runnerL
instance HasConfig Ctx
instance HasBuildConfig Ctx
instance HasEnvConfig Ctx where
envConfigL = lens ctxEnvConfig (\x y -> x { ctxEnvConfig = y })
-- | Read the raw, unresolved package information.
readPackageUnresolved :: (MonadIO m, MonadThrow m)
=> Path Abs File
-> m ([PWarning],GenericPackageDescription)
readPackageUnresolved cabalfp =
liftIO (BS.readFile (FL.toFilePath cabalfp))
>>= readPackageUnresolvedBS (PLOther $ PLFilePath $ toFilePath cabalfp)
-- | Read the raw, unresolved package information from a ByteString.
readPackageUnresolvedBS :: (MonadThrow m)
=> PackageLocationIndex FilePath
-> BS.ByteString
-> m ([PWarning],GenericPackageDescription)
readPackageUnresolvedBS source bs =
case rawParseGPD bs of
Left per ->
throwM (PackageInvalidCabalFile source per)
Right x -> return x
-- | A helper function that performs the basic character encoding
-- necessary.
rawParseGPD :: BS.ByteString
-> Either PError ([PWarning], GenericPackageDescription)
rawParseGPD bs =
case parseGenericPackageDescription chars of
ParseFailed per -> Left per
ParseOk warnings gpkg -> Right (warnings,gpkg)
where
chars = T.unpack (dropBOM (decodeUtf8With lenientDecode bs))
-- https://github.com/haskell/hackage-server/issues/351
dropBOM t = fromMaybe t $ T.stripPrefix "\xFEFF" t
-- | Reads and exposes the package information
readPackage :: (MonadLogger m, MonadIO m)
=> PackageConfig
-> Path Abs File
-> m ([PWarning],Package)
readPackage packageConfig cabalfp =
do (warnings,gpkg) <- liftIO $ readPackageUnresolved cabalfp
return (warnings,resolvePackage packageConfig gpkg)
-- | Reads and exposes the package information, from a ByteString
readPackageBS :: (MonadThrow m)
=> PackageConfig
-> PackageLocationIndex FilePath
-> BS.ByteString
-> m ([PWarning],Package)
readPackageBS packageConfig loc bs =
do (warnings,gpkg) <- readPackageUnresolvedBS loc bs
return (warnings,resolvePackage packageConfig gpkg)
-- | Get 'GenericPackageDescription' and 'PackageDescription' reading info
-- from given directory.
readPackageDescriptionDir
:: (MonadLogger m, MonadIO m, MonadUnliftIO m, MonadThrow m, HasRunner env, HasConfig env,
MonadReader env m)
=> PackageConfig
-> Path Abs Dir
-> m (GenericPackageDescription, PackageDescriptionPair)
readPackageDescriptionDir config pkgDir = do
cabalfp <- findOrGenerateCabalFile pkgDir
gdesc <- liftM snd (readPackageUnresolved cabalfp)
return (gdesc, resolvePackageDescription config gdesc)
-- | Read @<package>.buildinfo@ ancillary files produced by some Setup.hs hooks.
-- The file includes Cabal file syntax to be merged into the package description
-- derived from the package's .cabal file.
--
-- NOTE: not to be confused with BuildInfo, an Stack-internal datatype.
readDotBuildinfo :: MonadIO m
=> Path Abs File
-> m HookedBuildInfo
readDotBuildinfo buildinfofp =
liftIO $ readHookedBuildInfo D.silent (toFilePath buildinfofp)
-- | Print cabal file warnings.
printCabalFileWarning
:: (MonadLogger m, HasRunner env, MonadReader env m)
=> Path Abs File -> PWarning -> m ()
printCabalFileWarning cabalfp =
\case
(PWarning x) ->
prettyWarnL
[ flow "Cabal file warning in"
, display cabalfp <> ":"
, flow x
]
(UTFWarning ln msg) ->
prettyWarnL
[ flow "Cabal file warning in"
, display cabalfp <> ":" <> fromString (show ln) <> ":"
, flow msg
]
-- | Check if the given name in the @Package@ matches the name of the .cabal file
checkCabalFileName :: MonadThrow m => PackageName -> Path Abs File -> m ()
checkCabalFileName name cabalfp = do
-- Previously, we just use parsePackageNameFromFilePath. However, that can
-- lead to confusing error messages. See:
-- https://github.com/commercialhaskell/stack/issues/895
let expected = packageNameString name ++ ".cabal"
when (expected /= toFilePath (filename cabalfp))
$ throwM $ MismatchedCabalName cabalfp name
-- | Resolve a parsed cabal file into a 'Package', which contains all of
-- the info needed for stack to build the 'Package' given the current
-- configuration.
resolvePackage :: PackageConfig
-> GenericPackageDescription
-> Package
resolvePackage packageConfig gpkg =
packageFromPackageDescription
packageConfig
(genPackageFlags gpkg)
(resolvePackageDescription packageConfig gpkg)
packageFromPackageDescription :: PackageConfig
-> [D.Flag]
-> PackageDescriptionPair
-> Package
packageFromPackageDescription packageConfig pkgFlags (PackageDescriptionPair pkgNoMod pkg) =
Package
{ packageName = name
, packageVersion = fromCabalVersion (pkgVersion pkgId)
, packageLicense = license pkg
, packageDeps = deps
, packageFiles = pkgFiles
, packageTools = packageDescTools pkg
, packageGhcOptions = packageConfigGhcOptions packageConfig
, packageFlags = packageConfigFlags packageConfig
, packageDefaultFlags = M.fromList
[(fromCabalFlagName (flagName flag), flagDefault flag) | flag <- pkgFlags]
, packageAllDeps = S.fromList (M.keys deps)
, packageLibraries =
let mlib = do
lib <- library pkg
guard $ buildable $ libBuildInfo lib
Just lib
in
case mlib of
Nothing
| null extraLibNames -> NoLibraries
| otherwise -> error "Package has buildable sublibraries but no buildable libraries, I'm giving up"
Just _ -> HasLibraries foreignLibNames
, packageTests = M.fromList
[(T.pack (Cabal.unUnqualComponentName $ testName t), testInterface t)
| t <- testSuites pkgNoMod
, buildable (testBuildInfo t)
]
, packageBenchmarks = S.fromList
[T.pack (Cabal.unUnqualComponentName $ benchmarkName b)
| b <- benchmarks pkgNoMod
, buildable (benchmarkBuildInfo b)
]
-- Same comment about buildable applies here too.
, packageExes = S.fromList
[T.pack (Cabal.unUnqualComponentName $ exeName biBuildInfo)
| biBuildInfo <- executables pkg
, buildable (buildInfo biBuildInfo)]
-- This is an action used to collect info needed for "stack ghci".
-- This info isn't usually needed, so computation of it is deferred.
, packageOpts = GetPackageOpts $
\sourceMap installedMap omitPkgs addPkgs cabalfp ->
do (componentsModules,componentFiles,_,_) <- getPackageFiles pkgFiles cabalfp
componentsOpts <-
generatePkgDescOpts sourceMap installedMap omitPkgs addPkgs cabalfp pkg componentFiles
return (componentsModules,componentFiles,componentsOpts)
, packageHasExposedModules = maybe
False
(not . null . exposedModules)
(library pkg)
, packageBuildType = buildType pkg
, packageSetupDeps = msetupDeps
}
where
extraLibNames = S.union subLibNames foreignLibNames
subLibNames
= S.fromList
$ map (T.pack . Cabal.unUnqualComponentName)
$ mapMaybe libName -- this is a design bug in the Cabal API: this should statically be known to exist
$ filter (buildable . libBuildInfo)
$ subLibraries pkg
foreignLibNames
= S.fromList
$ map (T.pack . Cabal.unUnqualComponentName . foreignLibName)
$ filter (buildable . foreignLibBuildInfo)
$ foreignLibs pkg
-- Gets all of the modules, files, build files, and data files that
-- constitute the package. This is primarily used for dirtiness
-- checking during build, as well as use by "stack ghci"
pkgFiles = GetPackageFiles $
\cabalfp -> debugBracket ("getPackageFiles" <+> display cabalfp) $ do
let pkgDir = parent cabalfp
distDir <- distDirFromDir pkgDir
env <- view envConfigL
(componentModules,componentFiles,dataFiles',warnings) <-
runReaderT
(packageDescModulesAndFiles pkg)
(Ctx cabalfp (buildDir distDir) env)
setupFiles <-
if buildType pkg `elem` [Nothing, Just Custom]
then do
let setupHsPath = pkgDir </> $(mkRelFile "Setup.hs")
setupLhsPath = pkgDir </> $(mkRelFile "Setup.lhs")
setupHsExists <- doesFileExist setupHsPath
if setupHsExists then return (S.singleton setupHsPath) else do
setupLhsExists <- doesFileExist setupLhsPath
if setupLhsExists then return (S.singleton setupLhsPath) else return S.empty
else return S.empty
buildFiles <- liftM (S.insert cabalfp . S.union setupFiles) $ do
let hpackPath = pkgDir </> $(mkRelFile Hpack.packageConfig)
hpackExists <- doesFileExist hpackPath
return $ if hpackExists then S.singleton hpackPath else S.empty
return (componentModules, componentFiles, buildFiles <> dataFiles', warnings)
pkgId = package pkg
name = fromCabalPackageName (pkgName pkgId)
deps = M.filterWithKey (const . not . isMe) (M.union
(packageDependencies pkg)
-- We include all custom-setup deps - if present - in the
-- package deps themselves. Stack always works with the
-- invariant that there will be a single installed package
-- relating to a package name, and this applies at the setup
-- dependency level as well.
(fromMaybe M.empty msetupDeps))
msetupDeps = fmap
(M.fromList . map (depName &&& depRange) . setupDepends)
(setupBuildInfo pkg)
-- Is the package dependency mentioned here me: either the package
-- name itself, or the name of one of the sub libraries
isMe name' = name' == name || packageNameText name' `S.member` extraLibNames
-- | Generate GHC options for the package's components, and a list of
-- options which apply generally to the package, not one specific
-- component.
generatePkgDescOpts
:: (HasEnvConfig env, MonadThrow m, MonadReader env m, MonadIO m)
=> SourceMap
-> InstalledMap
-> [PackageName] -- ^ Packages to omit from the "-package" / "-package-id" flags
-> [PackageName] -- ^ Packages to add to the "-package" flags
-> Path Abs File
-> PackageDescription
-> Map NamedComponent (Set DotCabalPath)
-> m (Map NamedComponent BuildInfoOpts)
generatePkgDescOpts sourceMap installedMap omitPkgs addPkgs cabalfp pkg componentPaths = do
config <- view configL
distDir <- distDirFromDir cabalDir
let cabalMacros = autogenDir distDir </> $(mkRelFile "cabal_macros.h")
exists <- doesFileExist cabalMacros
let mcabalMacros =
if exists
then Just cabalMacros
else Nothing
let generate namedComponent binfo =
( namedComponent
, generateBuildInfoOpts BioInput
{ biSourceMap = sourceMap
, biInstalledMap = installedMap
, biCabalMacros = mcabalMacros
, biCabalDir = cabalDir
, biDistDir = distDir
, biOmitPackages = omitPkgs
, biAddPackages = addPkgs
, biBuildInfo = binfo
, biDotCabalPaths = fromMaybe mempty (M.lookup namedComponent componentPaths)
, biConfigLibDirs = configExtraLibDirs config
, biConfigIncludeDirs = configExtraIncludeDirs config
, biComponentName = namedComponent
}
)
return
( M.fromList
(concat
[ maybe
[]
(return . generate CLib . libBuildInfo)
(library pkg)
, fmap
(\exe ->
generate
(CExe (T.pack (Cabal.unUnqualComponentName (exeName exe))))
(buildInfo exe))
(executables pkg)
, fmap
(\bench ->
generate
(CBench (T.pack (Cabal.unUnqualComponentName (benchmarkName bench))))
(benchmarkBuildInfo bench))
(benchmarks pkg)
, fmap
(\test ->
generate
(CTest (T.pack (Cabal.unUnqualComponentName (testName test))))
(testBuildInfo test))
(testSuites pkg)]))
where
cabalDir = parent cabalfp
-- | Input to 'generateBuildInfoOpts'
data BioInput = BioInput
{ biSourceMap :: !SourceMap
, biInstalledMap :: !InstalledMap
, biCabalMacros :: !(Maybe (Path Abs File))
, biCabalDir :: !(Path Abs Dir)
, biDistDir :: !(Path Abs Dir)
, biOmitPackages :: ![PackageName]
, biAddPackages :: ![PackageName]
, biBuildInfo :: !BuildInfo
, biDotCabalPaths :: !(Set DotCabalPath)
, biConfigLibDirs :: !(Set FilePath)
, biConfigIncludeDirs :: !(Set FilePath)
, biComponentName :: !NamedComponent
}
-- | Generate GHC options for the target. Since Cabal also figures out
-- these options, currently this is only used for invoking GHCI (via
-- stack ghci).
generateBuildInfoOpts :: BioInput -> BuildInfoOpts
generateBuildInfoOpts BioInput {..} =
BuildInfoOpts
{ bioOpts = ghcOpts ++ cppOptions biBuildInfo
-- NOTE for future changes: Due to this use of nubOrd (and other uses
-- downstream), these generated options must not rely on multiple
-- argument sequences. For example, ["--main-is", "Foo.hs", "--main-
-- is", "Bar.hs"] would potentially break due to the duplicate
-- "--main-is" being removed.
--
-- See https://github.com/commercialhaskell/stack/issues/1255
, bioOneWordOpts = nubOrd $ concat
[extOpts, srcOpts, includeOpts, libOpts, fworks, cObjectFiles]
, bioPackageFlags = deps
, bioCabalMacros = biCabalMacros
}
where
cObjectFiles =
mapMaybe (fmap toFilePath .
makeObjectFilePathFromC biCabalDir biComponentName biDistDir)
cfiles
cfiles = mapMaybe dotCabalCFilePath (S.toList biDotCabalPaths)
-- Generates: -package=base -package=base16-bytestring-0.1.1.6 ...
deps =
concat
[ case M.lookup name biInstalledMap of
Just (_, Stack.Types.Package.Library _ident ipid) -> ["-package-id=" <> ghcPkgIdString ipid]
_ -> ["-package=" <> packageNameString name <>
maybe "" -- This empty case applies to e.g. base.
((("-" <>) . versionString) . piiVersion)
(M.lookup name biSourceMap)]
| name <- pkgs]
pkgs =
biAddPackages ++
[ name
| Dependency cname _ <- targetBuildDepends biBuildInfo
, let name = fromCabalPackageName cname
, name `notElem` biOmitPackages]
ghcOpts = concatMap snd . filter (isGhc . fst) $ options biBuildInfo
where
isGhc GHC = True
isGhc _ = False
extOpts = map (("-X" ++) . D.display) (usedExtensions biBuildInfo)
srcOpts =
map
(("-i" <>) . toFilePathNoTrailingSep)
([biCabalDir | null (hsSourceDirs biBuildInfo)] <>
mapMaybe toIncludeDir (hsSourceDirs biBuildInfo) <>
[autogenDir biDistDir,buildDir biDistDir] <>
[makeGenDir (buildDir biDistDir)
| Just makeGenDir <- [fileGenDirFromComponentName biComponentName]]) ++
["-stubdir=" ++ toFilePathNoTrailingSep (buildDir biDistDir)]
toIncludeDir "." = Just biCabalDir
toIncludeDir relDir = concatAndColapseAbsDir biCabalDir relDir
includeOpts =
map ("-I" <>) (configExtraIncludeDirs <> pkgIncludeOpts)
configExtraIncludeDirs = S.toList biConfigIncludeDirs
pkgIncludeOpts =
[ toFilePathNoTrailingSep absDir
| dir <- includeDirs biBuildInfo
, absDir <- handleDir dir
]
libOpts =
map ("-l" <>) (extraLibs biBuildInfo) <>
map ("-L" <>) (configExtraLibDirs <> pkgLibDirs)
configExtraLibDirs = S.toList biConfigLibDirs
pkgLibDirs =
[ toFilePathNoTrailingSep absDir
| dir <- extraLibDirs biBuildInfo
, absDir <- handleDir dir
]
handleDir dir = case (parseAbsDir dir, parseRelDir dir) of
(Just ab, _ ) -> [ab]
(_ , Just rel) -> [biCabalDir </> rel]
(Nothing, Nothing ) -> []
fworks = map (\fwk -> "-framework=" <> fwk) (frameworks biBuildInfo)
-- | Make the .o path from the .c file path for a component. Example:
--
-- @
-- executable FOO
-- c-sources: cbits/text_search.c
-- @
--
-- Produces
--
-- <dist-dir>/build/FOO/FOO-tmp/cbits/text_search.o
--
-- Example:
--
-- λ> makeObjectFilePathFromC
-- $(mkAbsDir "/Users/chris/Repos/hoogle")
-- CLib
-- $(mkAbsDir "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist")
-- $(mkAbsFile "/Users/chris/Repos/hoogle/cbits/text_search.c")
-- Just "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist/build/cbits/text_search.o"
-- λ> makeObjectFilePathFromC
-- $(mkAbsDir "/Users/chris/Repos/hoogle")
-- (CExe "hoogle")
-- $(mkAbsDir "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist")
-- $(mkAbsFile "/Users/chris/Repos/hoogle/cbits/text_search.c")
-- Just "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist/build/hoogle/hoogle-tmp/cbits/text_search.o"
-- λ>
makeObjectFilePathFromC
:: MonadThrow m
=> Path Abs Dir -- ^ The cabal directory.
-> NamedComponent -- ^ The name of the component.
-> Path Abs Dir -- ^ Dist directory.
-> Path Abs File -- ^ The path to the .c file.
-> m (Path Abs File) -- ^ The path to the .o file for the component.
makeObjectFilePathFromC cabalDir namedComponent distDir cFilePath = do
relCFilePath <- stripProperPrefix cabalDir cFilePath
relOFilePath <-
parseRelFile (replaceExtension (toFilePath relCFilePath) "o")
addComponentPrefix <- fileGenDirFromComponentName namedComponent
return (addComponentPrefix (buildDir distDir) </> relOFilePath)
-- | The directory where generated files are put like .o or .hs (from .x files).
fileGenDirFromComponentName
:: MonadThrow m
=> NamedComponent -> m (Path b Dir -> Path b Dir)
fileGenDirFromComponentName namedComponent =
case namedComponent of
CLib -> return id
CExe name -> makeTmp name
CTest name -> makeTmp name
CBench name -> makeTmp name
where makeTmp name = do
prefix <- parseRelDir (T.unpack name <> "/" <> T.unpack name <> "-tmp")
return (</> prefix)
-- | Make the autogen dir.
autogenDir :: Path Abs Dir -> Path Abs Dir
autogenDir distDir = buildDir distDir </> $(mkRelDir "autogen")
-- | Make the build dir.
buildDir :: Path Abs Dir -> Path Abs Dir
buildDir distDir = distDir </> $(mkRelDir "build")
-- | Make the component-specific subdirectory of the build directory.
getBuildComponentDir :: Maybe String -> Maybe (Path Rel Dir)
getBuildComponentDir Nothing = Nothing
getBuildComponentDir (Just name) = parseRelDir (name FilePath.</> (name ++ "-tmp"))
-- | Get all dependencies of the package (buildable targets only).
packageDependencies :: PackageDescription -> Map PackageName VersionRange
packageDependencies pkg =
M.fromListWith intersectVersionRanges $
map (depName &&& depRange) $
concatMap targetBuildDepends (allBuildInfo' pkg) ++
maybe [] setupDepends (setupBuildInfo pkg)
-- | Get all dependencies of the package (buildable targets only).
--
-- This uses both the new 'buildToolDepends' and old 'buildTools'
-- information.
packageDescTools :: PackageDescription -> Map ExeName VersionRange
packageDescTools =
M.fromList . concatMap tools . allBuildInfo'
where
tools bi = map go1 (buildTools bi) ++ map go2 (buildToolDepends bi)
go1 :: Cabal.LegacyExeDependency -> (ExeName, VersionRange)
go1 (Cabal.LegacyExeDependency name range) = (ExeName $ T.pack name, range)
go2 :: Cabal.ExeDependency -> (ExeName, VersionRange)
go2 (Cabal.ExeDependency _pkg name range) = (ExeName $ T.pack $ Cabal.unUnqualComponentName name, range)
-- | Variant of 'allBuildInfo' from Cabal that includes foreign
-- libraries; see <https://github.com/haskell/cabal/issues/4763>
allBuildInfo' :: PackageDescription -> [BuildInfo]
allBuildInfo' pkg = allBuildInfo pkg ++
[ bi | flib <- foreignLibs pkg
, let bi = foreignLibBuildInfo flib
, buildable bi
]
-- | Get all files referenced by the package.
packageDescModulesAndFiles
:: (MonadLogger m, MonadUnliftIO m, MonadReader Ctx m, MonadThrow m)
=> PackageDescription
-> m (Map NamedComponent (Set ModuleName), Map NamedComponent (Set DotCabalPath), Set (Path Abs File), [PackageWarning])
packageDescModulesAndFiles pkg = do
(libraryMods,libDotCabalFiles,libWarnings) <- -- FIXME add in sub libraries
maybe
(return (M.empty, M.empty, []))
(asModuleAndFileMap libComponent libraryFiles)
(library pkg)
(executableMods,exeDotCabalFiles,exeWarnings) <-
liftM
foldTuples
(mapM
(asModuleAndFileMap exeComponent executableFiles)
(executables pkg))
(testMods,testDotCabalFiles,testWarnings) <-
liftM
foldTuples
(mapM (asModuleAndFileMap testComponent testFiles) (testSuites pkg))
(benchModules,benchDotCabalPaths,benchWarnings) <-
liftM
foldTuples
(mapM
(asModuleAndFileMap benchComponent benchmarkFiles)
(benchmarks pkg))
dfiles <- resolveGlobFiles
(extraSrcFiles pkg
++ map (dataDir pkg FilePath.</>) (dataFiles pkg))
let modules = libraryMods <> executableMods <> testMods <> benchModules
files =
libDotCabalFiles <> exeDotCabalFiles <> testDotCabalFiles <>
benchDotCabalPaths
warnings = libWarnings <> exeWarnings <> testWarnings <> benchWarnings
return (modules, files, dfiles, warnings)
where
libComponent = const CLib
exeComponent = CExe . T.pack . Cabal.unUnqualComponentName . exeName
testComponent = CTest . T.pack . Cabal.unUnqualComponentName . testName
benchComponent = CBench . T.pack . Cabal.unUnqualComponentName . benchmarkName
asModuleAndFileMap label f lib = do
(a,b,c) <- f lib
return (M.singleton (label lib) a, M.singleton (label lib) b, c)
foldTuples = foldl' (<>) (M.empty, M.empty, [])
-- | Resolve globbing of files (e.g. data files) to absolute paths.
resolveGlobFiles :: (MonadLogger m,MonadUnliftIO m,MonadReader Ctx m)
=> [String] -> m (Set (Path Abs File))
resolveGlobFiles =
liftM (S.fromList . catMaybes . concat) .
mapM resolve
where
resolve name =
if '*' `elem` name
then explode name
else liftM return (resolveFileOrWarn name)
explode name = do
dir <- asks (parent . ctxFile)
names <-
matchDirFileGlob'
(FL.toFilePath dir)
name
mapM resolveFileOrWarn names
matchDirFileGlob' dir glob =
catch
(matchDirFileGlob_ dir glob)
(\(e :: IOException) ->
if isUserError e
then do
prettyWarnL
[ flow "Wildcard does not match any files:"
, styleFile $ fromString glob
, line <> flow "in directory:"
, styleDir $ fromString dir
]
return []
else throwIO e)
-- | This is a copy/paste of the Cabal library function, but with
--
-- @ext == ext'@
--
-- Changed to
--
-- @isSuffixOf ext ext'@
--
-- So that this will work:
--
-- @
-- λ> matchDirFileGlob_ "." "test/package-dump/*.txt"
-- ["test/package-dump/ghc-7.8.txt","test/package-dump/ghc-7.10.txt"]
-- @
--
matchDirFileGlob_ :: (MonadLogger m, MonadIO m, HasRunner env, MonadReader env m) => String -> String -> m [String]
matchDirFileGlob_ dir filepath = case parseFileGlob filepath of
Nothing -> liftIO $ throwString $
"invalid file glob '" ++ filepath
++ "'. Wildcards '*' are only allowed in place of the file"
++ " name, not in the directory name or file extension."
++ " If a wildcard is used it must be with an file extension."
Just (NoGlob filepath') -> return [filepath']
Just (FileGlob dir' ext) -> do
efiles <- liftIO $ try $ D.getDirectoryContents (dir FilePath.</> dir')
let matches =
case efiles of
Left (_ :: IOException) -> []
Right files ->
[ dir' FilePath.</> file
| file <- files
, let (name, ext') = splitExtensions file
, not (null name) && isSuffixOf ext ext'
]
when (null matches) $
prettyWarnL
[ flow "filepath wildcard"
, "'" <> styleFile (fromString filepath) <> "'"
, flow "does not match any files."
]
return matches
-- | Get all files referenced by the benchmark.
benchmarkFiles
:: (MonadLogger m, MonadIO m, MonadReader Ctx m, MonadThrow m)
=> Benchmark -> m (Set ModuleName, Set DotCabalPath, [PackageWarning])
benchmarkFiles bench = do
dirs <- mapMaybeM resolveDirOrWarn (hsSourceDirs build)
dir <- asks (parent . ctxFile)
(modules,files,warnings) <-
resolveFilesAndDeps
(Just $ Cabal.unUnqualComponentName $ benchmarkName bench)
(dirs ++ [dir])
(bnames <> exposed)
haskellModuleExts
cfiles <- buildOtherSources build
return (modules, files <> cfiles, warnings)
where
exposed =
case benchmarkInterface bench of
BenchmarkExeV10 _ fp -> [DotCabalMain fp]
BenchmarkUnsupported _ -> []
bnames = map DotCabalModule (otherModules build)
build = benchmarkBuildInfo bench
-- | Get all files referenced by the test.
testFiles
:: (MonadLogger m, MonadIO m, MonadReader Ctx m, MonadThrow m)
=> TestSuite
-> m (Set ModuleName, Set DotCabalPath, [PackageWarning])
testFiles test = do
dirs <- mapMaybeM resolveDirOrWarn (hsSourceDirs build)
dir <- asks (parent . ctxFile)
(modules,files,warnings) <-
resolveFilesAndDeps
(Just $ Cabal.unUnqualComponentName $ testName test)
(dirs ++ [dir])
(bnames <> exposed)
haskellModuleExts
cfiles <- buildOtherSources build
return (modules, files <> cfiles, warnings)
where
exposed =
case testInterface test of
TestSuiteExeV10 _ fp -> [DotCabalMain fp]
TestSuiteLibV09 _ mn -> [DotCabalModule mn]
TestSuiteUnsupported _ -> []
bnames = map DotCabalModule (otherModules build)
build = testBuildInfo test
-- | Get all files referenced by the executable.
executableFiles
:: (MonadLogger m, MonadIO m, MonadReader Ctx m, MonadThrow m)
=> Executable
-> m (Set ModuleName, Set DotCabalPath, [PackageWarning])
executableFiles exe = do
dirs <- mapMaybeM resolveDirOrWarn (hsSourceDirs build)
dir <- asks (parent . ctxFile)
(modules,files,warnings) <-
resolveFilesAndDeps
(Just $ Cabal.unUnqualComponentName $ exeName exe)
(dirs ++ [dir])
(map DotCabalModule (otherModules build) ++
[DotCabalMain (modulePath exe)])
haskellModuleExts
cfiles <- buildOtherSources build
return (modules, files <> cfiles, warnings)
where
build = buildInfo exe
-- | Get all files referenced by the library.
libraryFiles
:: (MonadLogger m, MonadIO m, MonadReader Ctx m, MonadThrow m)
=> Library -> m (Set ModuleName, Set DotCabalPath, [PackageWarning])
libraryFiles lib = do
dirs <- mapMaybeM resolveDirOrWarn (hsSourceDirs build)
dir <- asks (parent . ctxFile)
(modules,files,warnings) <-
resolveFilesAndDeps
Nothing
(dirs ++ [dir])
names
haskellModuleExts
cfiles <- buildOtherSources build
return (modules, files <> cfiles, warnings)
where
names = bnames ++ exposed
exposed = map DotCabalModule (exposedModules lib)
bnames = map DotCabalModule (otherModules build)
build = libBuildInfo lib
-- | Get all C sources and extra source files in a build.
buildOtherSources :: (MonadLogger m,MonadIO m,MonadReader Ctx m)
=> BuildInfo -> m (Set DotCabalPath)
buildOtherSources build =
do csources <- liftM
(S.map DotCabalCFilePath . S.fromList)
(mapMaybeM resolveFileOrWarn (cSources build))
jsources <- liftM
(S.map DotCabalFilePath . S.fromList)
(mapMaybeM resolveFileOrWarn (targetJsSources build))
return (csources <> jsources)
-- | Get the target's JS sources.
targetJsSources :: BuildInfo -> [FilePath]
targetJsSources = jsSources
-- | A pair of package descriptions: one which modified the buildable
-- values of test suites and benchmarks depending on whether they are
-- enabled, and one which does not.
--
-- Fields are intentionally lazy, we may only need one or the other
-- value.
--
-- MSS 2017-08-29: The very presence of this data type is terribly
-- ugly, it represents the fact that the Cabal 2.0 upgrade did _not_
-- go well. Specifically, we used to have a field to indicate whether
-- a component was enabled in addition to buildable, but that's gone
-- now, and this is an ugly proxy. We should at some point clean up
-- the mess of Package, LocalPackage, etc, and probably pull in the
-- definition of PackageDescription from Cabal with our additionally
-- needed metadata. But this is a good enough hack for the
-- moment. Odds are, you're reading this in the year 2024 and thinking
-- "wtf?"
data PackageDescriptionPair = PackageDescriptionPair
{ pdpOrigBuildable :: PackageDescription
, pdpModifiedBuildable :: PackageDescription
}
-- | Evaluates the conditions of a 'GenericPackageDescription', yielding
-- a resolved 'PackageDescription'.
resolvePackageDescription :: PackageConfig
-> GenericPackageDescription
-> PackageDescriptionPair
resolvePackageDescription packageConfig (GenericPackageDescription desc defaultFlags mlib subLibs foreignLibs' exes tests benches) =
PackageDescriptionPair
{ pdpOrigBuildable = go False
, pdpModifiedBuildable = go True
}
where
go modBuildable =
desc {library =
fmap (resolveConditions rc updateLibDeps) mlib
,subLibraries =
map (\(n, v) -> (resolveConditions rc updateLibDeps v){libName=Just n})
subLibs
,foreignLibs =
map (\(n, v) -> (resolveConditions rc updateForeignLibDeps v){foreignLibName=n})
foreignLibs'
,executables =
map (\(n, v) -> (resolveConditions rc updateExeDeps v){exeName=n})
exes
,testSuites =
map (\(n,v) -> (resolveConditions rc (updateTestDeps modBuildable) v){testName=n})
tests
,benchmarks =
map (\(n,v) -> (resolveConditions rc (updateBenchmarkDeps modBuildable) v){benchmarkName=n})
benches}
flags =
M.union (packageConfigFlags packageConfig)
(flagMap defaultFlags)
rc = mkResolveConditions
(packageConfigCompilerVersion packageConfig)
(packageConfigPlatform packageConfig)
flags
updateLibDeps lib deps =
lib {libBuildInfo =
(libBuildInfo lib) {targetBuildDepends = deps}}
updateForeignLibDeps lib deps =
lib {foreignLibBuildInfo =
(foreignLibBuildInfo lib) {targetBuildDepends = deps}}
updateExeDeps exe deps =
exe {buildInfo =
(buildInfo exe) {targetBuildDepends = deps}}
-- Note that, prior to moving to Cabal 2.0, we would set
-- testEnabled/benchmarkEnabled here. These fields no longer
-- exist, so we modify buildable instead here. The only
-- wrinkle in the Cabal 2.0 story is
-- https://github.com/haskell/cabal/issues/1725, where older
-- versions of Cabal (which may be used for actually building
-- code) don't properly exclude build-depends for
-- non-buildable components. Testing indicates that everything
-- is working fine, and that this comment can be completely
-- ignored. I'm leaving the comment anyway in case something
-- breaks and you, poor reader, are investigating.
updateTestDeps modBuildable test deps =
let bi = testBuildInfo test
bi' = bi
{ targetBuildDepends = deps
, buildable = buildable bi && (if modBuildable then packageConfigEnableTests packageConfig else True)
}
in test { testBuildInfo = bi' }
updateBenchmarkDeps modBuildable benchmark deps =
let bi = benchmarkBuildInfo benchmark
bi' = bi
{ targetBuildDepends = deps
, buildable = buildable bi && (if modBuildable then packageConfigEnableBenchmarks packageConfig else True)
}
in benchmark { benchmarkBuildInfo = bi' }
-- | Make a map from a list of flag specifications.
--
-- What is @flagManual@ for?
flagMap :: [Flag] -> Map FlagName Bool
flagMap = M.fromList . map pair
where pair :: Flag -> (FlagName, Bool)
pair (MkFlag (fromCabalFlagName -> name) _desc def _manual) = (name,def)
data ResolveConditions = ResolveConditions
{ rcFlags :: Map FlagName Bool
, rcCompilerVersion :: CompilerVersion 'CVActual
, rcOS :: OS
, rcArch :: Arch
}
-- | Generic a @ResolveConditions@ using sensible defaults.
mkResolveConditions :: CompilerVersion 'CVActual -- ^ Compiler version
-> Platform -- ^ installation target platform
-> Map FlagName Bool -- ^ enabled flags
-> ResolveConditions
mkResolveConditions compilerVersion (Platform arch os) flags = ResolveConditions
{ rcFlags = flags
, rcCompilerVersion = compilerVersion
, rcOS = os
, rcArch = arch
}
-- | Resolve the condition tree for the library.
resolveConditions :: (Monoid target,Show target)
=> ResolveConditions
-> (target -> cs -> target)
-> CondTree ConfVar cs target
-> target
resolveConditions rc addDeps (CondNode lib deps cs) = basic <> children
where basic = addDeps lib deps
children = mconcat (map apply cs)
where apply (Cabal.CondBranch cond node mcs) =
if condSatisfied cond
then resolveConditions rc addDeps node
else maybe mempty (resolveConditions rc addDeps) mcs
condSatisfied c =
case c of
Var v -> varSatisifed v
Lit b -> b
CNot c' ->
not (condSatisfied c')
COr cx cy ->
condSatisfied cx || condSatisfied cy
CAnd cx cy ->
condSatisfied cx && condSatisfied cy
varSatisifed v =
case v of
OS os -> os == rcOS rc
Arch arch -> arch == rcArch rc
Flag flag ->
fromMaybe False $ M.lookup (fromCabalFlagName flag) (rcFlags rc)
-- NOTE: ^^^^^ This should never happen, as all flags
-- which are used must be declared. Defaulting to
-- False.
Impl flavor range ->
case (flavor, rcCompilerVersion rc) of
(GHC, GhcVersion vghc) -> vghc `withinRange` range
(GHC, GhcjsVersion _ vghc) -> vghc `withinRange` range
(GHCJS, GhcjsVersion vghcjs _) ->
vghcjs `withinRange` range
_ -> False
-- | Get the name of a dependency.
depName :: Dependency -> PackageName
depName (Dependency n _) = fromCabalPackageName n
-- | Get the version range of a dependency.
depRange :: Dependency -> VersionRange
depRange (Dependency _ r) = r
-- | Try to resolve the list of base names in the given directory by
-- looking for unique instances of base names applied with the given
-- extensions, plus find any of their module and TemplateHaskell
-- dependencies.
resolveFilesAndDeps
:: (MonadIO m, MonadLogger m, MonadReader Ctx m, MonadThrow m)
=> Maybe String -- ^ Package component name
-> [Path Abs Dir] -- ^ Directories to look in.
-> [DotCabalDescriptor] -- ^ Base names.
-> [Text] -- ^ Extensions.
-> m (Set ModuleName,Set DotCabalPath,[PackageWarning])
resolveFilesAndDeps component dirs names0 exts = do
(dotCabalPaths, foundModules, missingModules) <- loop names0 S.empty
warnings <- liftM2 (++) (warnUnlisted foundModules) (warnMissing missingModules)
return (foundModules, dotCabalPaths, warnings)
where
loop [] _ = return (S.empty, S.empty, [])
loop names doneModules0 = do
resolved <- resolveFiles dirs names exts
let foundFiles = mapMaybe snd resolved
(foundModules', missingModules') = partition (isJust . snd) resolved
foundModules = mapMaybe (dotCabalModule . fst) foundModules'
missingModules = mapMaybe (dotCabalModule . fst) missingModules'
pairs <- mapM (getDependencies component) foundFiles
let doneModules =
S.union
doneModules0
(S.fromList (mapMaybe dotCabalModule names))
moduleDeps = S.unions (map fst pairs)
thDepFiles = concatMap snd pairs
modulesRemaining = S.difference moduleDeps doneModules
-- Ignore missing modules discovered as dependencies - they may
-- have been deleted.
(resolvedFiles, resolvedModules, _) <-
loop (map DotCabalModule (S.toList modulesRemaining)) doneModules
return
( S.union
(S.fromList
(foundFiles <> map DotCabalFilePath thDepFiles))
resolvedFiles
, S.union
(S.fromList foundModules)
resolvedModules
, missingModules)
warnUnlisted foundModules = do
let unlistedModules =
foundModules `S.difference`
S.fromList (mapMaybe dotCabalModule names0)
return $
if S.null unlistedModules
then []
else [ UnlistedModulesWarning
component
(S.toList unlistedModules)]
warnMissing _missingModules = do
return []
-- TODO: bring this back - see
-- https://github.com/commercialhaskell/stack/issues/2649
{-
cabalfp <- asks ctxFile
return $
if null missingModules
then []
else [ MissingModulesWarning
cabalfp
component
missingModules]
-}
-- | Get the dependencies of a Haskell module file.
getDependencies
:: (MonadReader Ctx m, MonadIO m, MonadLogger m)
=> Maybe String -> DotCabalPath -> m (Set ModuleName, [Path Abs File])
getDependencies component dotCabalPath =
case dotCabalPath of
DotCabalModulePath resolvedFile -> readResolvedHi resolvedFile
DotCabalMainPath resolvedFile -> readResolvedHi resolvedFile
DotCabalFilePath{} -> return (S.empty, [])
DotCabalCFilePath{} -> return (S.empty, [])
where
readResolvedHi resolvedFile = do
dumpHIDir <- getDumpHIDir
dir <- asks (parent . ctxFile)
case stripProperPrefix dir resolvedFile of
Nothing -> return (S.empty, [])
Just fileRel -> do
let dumpHIPath =
FilePath.replaceExtension
(toFilePath (dumpHIDir </> fileRel))
".dump-hi"
dumpHIExists <- liftIO $ D.doesFileExist dumpHIPath
if dumpHIExists
then parseDumpHI dumpHIPath
else return (S.empty, [])
getDumpHIDir = do
bld <- asks ctxDir
return $ maybe bld (bld </>) (getBuildComponentDir component)
-- | Parse a .dump-hi file into a set of modules and files.
parseDumpHI
:: (MonadReader Ctx m, MonadIO m, MonadLogger m)
=> FilePath -> m (Set ModuleName, [Path Abs File])
parseDumpHI dumpHIPath = do
dir <- asks (parent . ctxFile)
dumpHI <- liftIO $ fmap C8.lines (C8.readFile dumpHIPath)
let startModuleDeps =
dropWhile (not . ("module dependencies:" `C8.isPrefixOf`)) dumpHI
moduleDeps =
S.fromList $
mapMaybe (D.simpleParse . T.unpack . decodeUtf8) $
C8.words $
C8.concat $
C8.dropWhile (/= ' ') (fromMaybe "" $ listToMaybe startModuleDeps) :
takeWhile (" " `C8.isPrefixOf`) (drop 1 startModuleDeps)
thDeps =
-- The dependent file path is surrounded by quotes but is not escaped.
-- It can be an absolute or relative path.
mapMaybe
(fmap T.unpack .
(T.stripSuffix "\"" <=< T.stripPrefix "\"") .
T.dropWhileEnd (== '\r') . decodeUtf8 . C8.dropWhile (/= '"')) $
filter ("addDependentFile \"" `C8.isPrefixOf`) dumpHI
thDepsResolved <- liftM catMaybes $ forM thDeps $ \x -> do
mresolved <- liftIO (forgivingAbsence (resolveFile dir x)) >>= rejectMissingFile
when (isNothing mresolved) $
prettyWarnL
[ flow "addDependentFile path (Template Haskell) listed in"
, styleFile $ fromString dumpHIPath
, flow "does not exist:"
, styleFile $ fromString x
]
return mresolved
return (moduleDeps, thDepsResolved)
-- | Try to resolve the list of base names in the given directory by
-- looking for unique instances of base names applied with the given
-- extensions.
resolveFiles
:: (MonadIO m, MonadLogger m, MonadThrow m, MonadReader Ctx m)
=> [Path Abs Dir] -- ^ Directories to look in.
-> [DotCabalDescriptor] -- ^ Base names.
-> [Text] -- ^ Extensions.
-> m [(DotCabalDescriptor, Maybe DotCabalPath)]
resolveFiles dirs names exts =
forM names (\name -> liftM (name, ) (findCandidate dirs exts name))
-- | Find a candidate for the given module-or-filename from the list
-- of directories and given extensions.
findCandidate
:: (MonadIO m, MonadLogger m, MonadThrow m, MonadReader Ctx m)
=> [Path Abs Dir]
-> [Text]
-> DotCabalDescriptor
-> m (Maybe DotCabalPath)
findCandidate dirs exts name = do
pkg <- asks ctxFile >>= parsePackageNameFromFilePath
candidates <- liftIO makeNameCandidates
case candidates of
[candidate] -> return (Just (cons candidate))
[] -> do
case name of
DotCabalModule mn
| D.display mn /= paths_pkg pkg -> logPossibilities dirs mn
_ -> return ()
return Nothing
(candidate:rest) -> do
warnMultiple name candidate rest
return (Just (cons candidate))
where
cons =
case name of
DotCabalModule{} -> DotCabalModulePath
DotCabalMain{} -> DotCabalMainPath
DotCabalFile{} -> DotCabalFilePath
DotCabalCFile{} -> DotCabalCFilePath
paths_pkg pkg = "Paths_" ++ packageNameString pkg
makeNameCandidates =
liftM (nubOrd . concat) (mapM makeDirCandidates dirs)
makeDirCandidates :: Path Abs Dir
-> IO [Path Abs File]
makeDirCandidates dir =
case name of
DotCabalMain fp -> resolveCandidate dir fp
DotCabalFile fp -> resolveCandidate dir fp
DotCabalCFile fp -> resolveCandidate dir fp
DotCabalModule mn ->
liftM concat
$ mapM
((\ ext ->
resolveCandidate dir (Cabal.toFilePath mn ++ "." ++ ext))
. T.unpack)
exts
resolveCandidate
:: (MonadIO m, MonadThrow m)
=> Path Abs Dir -> FilePath.FilePath -> m [Path Abs File]
resolveCandidate x y = do
-- The standard canonicalizePath does not work for this case
p <- parseCollapsedAbsFile (toFilePath x FilePath.</> y)
exists <- doesFileExist p
return $ if exists then [p] else []
-- | Warn the user that multiple candidates are available for an
-- entry, but that we picked one anyway and continued.
warnMultiple
:: (MonadLogger m, HasRunner env, MonadReader env m)
=> DotCabalDescriptor -> Path b t -> [Path b t] -> m ()
warnMultiple name candidate rest =
-- TODO: figure out how to style 'name' and the dispOne stuff
prettyWarnL
[ flow "There were multiple candidates for the Cabal entry \""
, fromString . showName $ name
, line <> bulletedList (map dispOne rest)
, line <> flow "picking:"
, dispOne candidate
]
where showName (DotCabalModule name') = D.display name'
showName (DotCabalMain fp) = fp
showName (DotCabalFile fp) = fp
showName (DotCabalCFile fp) = fp
dispOne = fromString . toFilePath
-- TODO: figure out why dispOne can't be just `display`
-- (remove the .hlint.yaml exception if it can be)
-- | Log that we couldn't find a candidate, but there are
-- possibilities for custom preprocessor extensions.
--
-- For example: .erb for a Ruby file might exist in one of the
-- directories.
logPossibilities
:: (MonadIO m, MonadThrow m, MonadLogger m, HasRunner env,
MonadReader env m)
=> [Path Abs Dir] -> ModuleName -> m ()
logPossibilities dirs mn = do
possibilities <- liftM concat (makePossibilities mn)
unless (null possibilities) $ prettyWarnL
[ flow "Unable to find a known candidate for the Cabal entry"
, (styleModule . fromString $ D.display mn) <> ","
, flow "but did find:"
, line <> bulletedList (map display possibilities)
, flow "If you are using a custom preprocessor for this module"
, flow "with its own file extension, consider adding the file(s)"
, flow "to your .cabal under extra-source-files."
]
where
makePossibilities name =
mapM
(\dir ->
do (_,files) <- listDir dir
return
(map
filename
(filter
(isPrefixOf (D.display name) .
toFilePath . filename)
files)))
dirs
-- | Get the filename for the cabal file in the given directory.
--
-- If no .cabal file is present, or more than one is present, an exception is
-- thrown via 'throwM'.
--
-- If the directory contains a file named package.yaml, hpack is used to
-- generate a .cabal file from it.
findOrGenerateCabalFile
:: forall m env.
(MonadIO m, MonadUnliftIO m, MonadLogger m, HasRunner env, HasConfig env, MonadReader env m)
=> Path Abs Dir -- ^ package directory
-> m (Path Abs File)
findOrGenerateCabalFile pkgDir = do
hpack pkgDir
findCabalFile
where
findCabalFile :: m (Path Abs File)
findCabalFile = findCabalFile' >>= either throwIO return
findCabalFile' :: m (Either PackageException (Path Abs File))
findCabalFile' = do
files <- liftIO $ findFiles
pkgDir
(flip hasExtension "cabal" . FL.toFilePath)
(const False)
return $ case files of
[] -> Left $ PackageNoCabalFileFound pkgDir
[x] -> Right x
-- If there are multiple files, ignore files that start with
-- ".". On unixlike environments these are hidden, and this
-- character is not valid in package names. The main goal is
-- to ignore emacs lock files - see
-- https://github.com/commercialhaskell/stack/issues/1897.
(filter (not . ("." `isPrefixOf`) . toFilePath . filename) -> [x]) -> Right x
_:_ -> Left $ PackageMultipleCabalFilesFound pkgDir files
where hasExtension fp x = FilePath.takeExtension fp == "." ++ x
-- | Generate .cabal file from package.yaml, if necessary.
hpack :: (MonadIO m, MonadUnliftIO m, MonadLogger m, HasRunner env, HasConfig env, MonadReader env m)
=> Path Abs Dir -> m ()
hpack pkgDir = do
let hpackFile = pkgDir </> $(mkRelFile Hpack.packageConfig)
exists <- liftIO $ doesFileExist hpackFile
when exists $ do
prettyDebugL [flow "Running hpack on", display hpackFile]
config <- view configL
case configOverrideHpack config of
HpackBundled -> do
#if MIN_VERSION_hpack(0,18,0)
r <- liftIO $ Hpack.hpackResult (Just $ toFilePath pkgDir)
#else
r <- liftIO $ Hpack.hpackResult (toFilePath pkgDir)
#endif
forM_ (Hpack.resultWarnings r) prettyWarnS
let cabalFile = styleFile . fromString . Hpack.resultCabalFile $ r
case Hpack.resultStatus r of
Hpack.Generated -> prettyDebugL
[flow "hpack generated a modified version of", cabalFile]
Hpack.OutputUnchanged -> prettyDebugL
[flow "hpack output unchanged in", cabalFile]
Hpack.AlreadyGeneratedByNewerHpack -> prettyWarnL
[ cabalFile
, flow "was generated with a newer version of hpack,"
, flow "please upgrade and try again."
]
HpackCommand command -> do
envOverride <- getMinimalEnvOverride
let cmd = Cmd (Just pkgDir) command envOverride []
runCmd cmd Nothing
-- | Path for the package's build log.
buildLogPath :: (MonadReader env m, HasBuildConfig env, MonadThrow m)
=> Package -> Maybe String -> m (Path Abs File)
buildLogPath package' msuffix = do
env <- ask
let stack = getProjectWorkDir env
fp <- parseRelFile $ concat $
packageIdentifierString (packageIdentifier package') :
maybe id (\suffix -> ("-" :) . (suffix :)) msuffix [".log"]
return $ stack </> $(mkRelDir "logs") </> fp
-- Internal helper to define resolveFileOrWarn and resolveDirOrWarn
resolveOrWarn :: (MonadLogger m, MonadIO m, MonadReader Ctx m)
=> Text
-> (Path Abs Dir -> String -> m (Maybe a))
-> FilePath.FilePath
-> m (Maybe a)
resolveOrWarn subject resolver path =
do cwd <- liftIO getCurrentDir
file <- asks ctxFile
dir <- asks (parent . ctxFile)
result <- resolver dir path
when (isNothing result) $
prettyWarnL
[ fromString . T.unpack $ subject -- TODO: needs style?
, flow "listed in"
, maybe (display file) display (stripProperPrefix cwd file)
, flow "file does not exist:"
, styleDir . fromString $ path
]
return result
-- | Resolve the file, if it can't be resolved, warn for the user
-- (purely to be helpful).
resolveFileOrWarn :: (MonadIO m,MonadLogger m,MonadReader Ctx m)
=> FilePath.FilePath
-> m (Maybe (Path Abs File))
resolveFileOrWarn = resolveOrWarn "File" f
where f p x = liftIO (forgivingAbsence (resolveFile p x)) >>= rejectMissingFile
-- | Resolve the directory, if it can't be resolved, warn for the user
-- (purely to be helpful).
resolveDirOrWarn :: (MonadIO m,MonadLogger m,MonadReader Ctx m)
=> FilePath.FilePath
-> m (Maybe (Path Abs Dir))
resolveDirOrWarn = resolveOrWarn "Directory" f
where f p x = liftIO (forgivingAbsence (resolveDir p x)) >>= rejectMissingDir
-- | Extract the @PackageIdentifier@ given an exploded haskell package
-- path.
cabalFilePackageId
:: (MonadIO m, MonadThrow m)
=> Path Abs File -> m PackageIdentifier
cabalFilePackageId fp = do
pkgDescr <- liftIO (D.readGenericPackageDescription D.silent $ toFilePath fp)
(toStackPI . D.package . D.packageDescription) pkgDescr
where
toStackPI (D.PackageIdentifier (D.unPackageName -> name) ver) = do
name' <- parsePackageNameFromString name
ver' <- parseVersionFromString (showVersion ver)
return (PackageIdentifier name' ver')
| MichielDerhaeg/stack | src/Stack/Package.hs | bsd-3-clause | 58,229 | 0 | 24 | 17,291 | 12,798 | 6,653 | 6,145 | 1,102 | 12 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Network.Wai.Lens where
import Control.Lens
import Data.ByteString (ByteString)
import Data.Foldable
import Data.Monoid
import Data.Text (Text)
import Data.Tuple
import Data.Vault.Lazy (Vault)
import Network.Socket
import Network.HTTP.Types
import qualified Network.Wai as W
class HasMethod s a | s -> a where
method :: Lens' s a
instance HasMethod W.Request Method where
method = lens W.requestMethod $ \rq m -> rq { W.requestMethod = m }
{-# INLINE method #-}
class HasHttpVersion s a | s -> a where
httpVersion :: Lens' s a
instance HasHttpVersion W.Request HttpVersion where
httpVersion = lens W.httpVersion $ \rq v -> rq { W.httpVersion = v }
{-# INLINE httpVersion #-}
class HasRawPathInfo s a | s -> a where
rawPathInfo :: Lens' s a
instance HasRawPathInfo W.Request ByteString where
rawPathInfo = lens W.rawPathInfo $ \rq p -> rq { W.rawPathInfo = p }
{-# INLINE rawPathInfo #-}
class HasRawQueryString s a | s -> a where
rawQueryString :: Lens' s a
instance HasRawQueryString W.Request ByteString where
rawQueryString = lens W.rawQueryString $ \rq q -> rq { W.rawQueryString = q }
{-# INLINE rawQueryString #-}
class HasHeaders s a | s -> a where
headers :: Lens' s a
instance HasHeaders W.Request RequestHeaders where
headers = lens W.requestHeaders $ \rq h -> rq { W.requestHeaders = h }
{-# INLINE headers #-}
class HasRemoteHost s a | s -> a where
remoteHost :: Lens' s a
instance HasRemoteHost W.Request SockAddr where
remoteHost = lens W.remoteHost $ \rq h -> rq { W.remoteHost = h }
{-# INLINE remoteHost #-}
class HasPathInfo s a | s -> a where
pathInfo :: Lens' s a
instance HasPathInfo W.Request [Text] where
pathInfo = lens W.pathInfo $ \rq p -> rq { W.pathInfo = p }
{-# INLINE pathInfo #-}
class HasQueryString s a | s -> a where
queryString :: Lens' s a
instance HasQueryString W.Request Query where
queryString = lens W.queryString $ \rq q -> rq { W.queryString = q }
{-# INLINE queryString #-}
class HasRequestBody s a | s -> a where
requestBody :: Lens' s a
instance HasRequestBody W.Request (IO ByteString) where
requestBody = lens W.requestBody $ \rq b -> rq { W.requestBody = b }
{-# INLINE requestBody #-}
class HasVault s a | s -> a where
vault :: Lens' s a
instance HasVault W.Request Vault where
vault = lens W.vault $ \rq v -> rq { W.vault = v }
{-# INLINE vault #-}
class HasRequestBodyLength s a | s -> a where
requestBodyLength :: Lens' s a
instance HasRequestBodyLength W.Request W.RequestBodyLength where
requestBodyLength = lens W.requestBodyLength $ \rq l -> rq { W.requestBodyLength = l }
{-# INLINE requestBodyLength #-}
class HasStatus s a | s -> a where
status :: Lens' s a
-- | Useful for looking up query string or header values.
--
-- @
-- req ^. headers . value "Content-Type"
-- @
value
:: (Eq a, Foldable f)
=> a
-> (b -> Const (First b) b)
-> f (a, b)
-> Const (First b) (f (a, b))
value n = folded . to swap . aside (only n) . _1
{-# INLINE value #-}
| webcrank/wai-lens | src/Network/Wai/Lens.hs | bsd-3-clause | 3,201 | 0 | 12 | 643 | 1,021 | 556 | 465 | 81 | 1 |
module Day23.Test where
import qualified Data.Map.Strict as M
import Day23
import Test.Hspec
tests :: SpecWith ()
tests = do
describe "Part1" $ do
it "Test1" $ do
part1 M.empty 'a' test1 `shouldBe` 3
describe "Part2" $ do
it "Test1" $ do
part2 M.empty 'a' test1 `shouldBe` 3
| z0isch/aoc2016 | test/Day23/Test.hs | bsd-3-clause | 321 | 0 | 15 | 93 | 114 | 58 | 56 | 12 | 1 |
module TTT.MinimaxSpec where
import Control.Monad.State (evalState)
import qualified Data.Map as M (empty)
import Test.Hspec
import TTT.Minimax (getMove)
import TTT.Minimax.Internal (minimax)
import TTT.GameState (GameState(..), Space(..), Token(..), availableMoves,initialState, makeMove, isWinFor, choose)
playAllGameStates _ [] outcomes = outcomes
playAllGameStates initialPlayer gameStates@(gameState@(GameState _ currentPlayer):xs) outcomes
|gameState `isWinFor` initialPlayer || moves == [] =
playAllGameStates initialPlayer xs (True:outcomes)
|gameState `isWinFor` (choose initialPlayer O X) =
playAllGameStates initialPlayer xs (False:outcomes)
|otherwise = case currentPlayer == initialPlayer of
True -> playAllGameStates initialPlayer nextStates outcomes
False -> playAllGameStates initialPlayer possibleStates outcomes
where moves = availableMoves gameState
possibleStates = genPossibleStates gameStates []
compMoves = getCompMoves gameStates []
compStatesAndMoves = (zip gameStates (reverse compMoves))
nextStates = genNextStates compStatesAndMoves []
genPossibleStates [] states = concat states
genPossibleStates (x:xs) states =
let newGameStates = map (makeMove x) moves
in genPossibleStates xs (newGameStates:states)
where moves = availableMoves x
genNextStates [] states = states
genNextStates (x:xs) states =
let newGameState = makeMove (fst x) (snd x)
in genNextStates xs (newGameState:states)
getCompMoves [] moves = moves
getCompMoves (x:xs) moves =
let move = evalState (getMove x) M.empty
in getCompMoves xs (move:moves)
spec = describe "Minimax" $ do
context "scoring a win" $ do
it "should score a win for X on a 3x3 board" $ do
let gameState = (GameState [Filled X, Filled X, Filled X, Blank, Blank, Blank, Blank, Blank, Blank] O)
in evalState (minimax gameState) M.empty `shouldBe` 100
it "should score a win for X on a 4x4 board" $ do
let gameState = (GameState [Filled X, Filled X, Filled X, Filled X,
Blank, Blank, Blank, Blank,
Blank, Blank, Blank, Blank,
Blank, Blank, Blank, Blank] O)
in evalState (minimax gameState) M.empty `shouldBe` 100
it "should score a win for O on a 3x3 board" $ do
let gameState = (GameState [Filled O, Filled O, Filled O, Blank, Blank, Blank, Blank, Blank, Blank] X)
in evalState (minimax gameState) M.empty `shouldBe` (-100)
it "should score a win for O on a 4x4 board" $ do
let gameState = (GameState [Filled O, Filled O, Filled O, Filled O,
Blank, Blank, Blank, Blank,
Blank, Blank, Blank, Blank,
Blank, Blank, Blank, Blank] O)
in evalState (minimax gameState) M.empty `shouldBe` -100
it "should score a tie game on a 3x3 board" $ do
let gameState = (GameState [Filled X, Filled X, Filled O,
Filled O, Filled O, Filled X,
Filled X, Filled O, Filled X] O)
in evalState (minimax gameState) M.empty `shouldBe` 0
it "should score a tie game on a 4x4 board" $ do
let gameState = (GameState [Filled X, Filled X, Filled O, Filled X,
Filled X, Filled O, Filled O, Filled O,
Filled O, Filled X, Filled O, Filled X,
Filled X, Filled O, Filled X, Filled O] O)
in evalState (minimax gameState) M.empty `shouldBe` 0
it "chooses the only available move on a 3x3 board" $ do
let gameState = (GameState [Filled X, Filled X, Filled O,
Filled O, Filled O, Filled X,
Blank, Filled O, Filled X] X)
in evalState (getMove gameState) M.empty `shouldBe` 6
it "chooses the only available move on a 4x4 board" $ do
let gameState = (GameState [Filled X, Filled X, Filled O, Filled X,
Filled X, Filled O, Filled O, Filled O,
Filled O, Filled X, Filled O, Filled X,
Filled X, Filled O, Filled X, Blank] O)
in evalState (getMove gameState) M.empty `shouldBe` 15
it "X should prioritize winning over blocking on a 3x3 board" $ do
let gameState = (GameState [Filled X, Filled X, Blank,
Blank, Filled O, Filled O,
Blank, Blank, Blank] X)
in evalState (getMove gameState) M.empty `shouldBe` 2
it "X should prioritize winning over blocking on a 4x4 board" $ do
let gameState = (GameState [Filled X, Filled X, Filled X, Blank,
Filled O, Filled O, Filled O, Blank,
Blank, Blank, Blank, Blank,
Blank, Blank, Blank, Blank] X)
in evalState (getMove gameState) M.empty `shouldBe` 3
it "O should prioritize winning over blocking on a 3x3 board" $ do
let gameState = (GameState [Filled X, Filled X, Blank,
Blank, Filled O, Filled O,
Blank, Blank, Filled X] O)
in evalState (getMove gameState) M.empty `shouldBe` 3
it "O should prioritize winning over blocking on a 4x4 board" $ do
let gameState = (GameState [Filled X, Filled X, Filled X, Blank,
Filled O, Filled O, Filled O, Blank,
Blank, Blank, Blank, Blank,
Blank, Blank, Blank, Blank] O)
in evalState (getMove gameState) M.empty `shouldBe` 7
it "will block a fork" $ do
let gameState = (GameState [Filled X, Blank, Blank,
Blank, Filled X, Blank,
Blank, Blank, Filled O] O)
in evalState (getMove gameState) M.empty `shouldBe` 2
it "will play through all possible gameStates on a 3x3 board as the first player and never lose" $ do
all (== True) (playAllGameStates X [(initialState 3)] [])
`shouldBe` True
-- You'll be there for a while if you run this one.
-- it "will play through all possible gameStates on a 4x4 board as the first player and never lose" $ do
-- all (== True) (playAllGameStates X [(initialState 4)] [])
-- `shouldBe` True
it "will play through all possible gameStates on 3x3 board as the second player and never lose" $ do
let moves = availableMoves (initialState 3)
initialStates = map (makeMove (initialState 3)) moves
in all (== True) (playAllGameStates O initialStates []) `shouldBe` True
-- Same here -- it's going to take a long time
-- it "will play through all possible gameStates on 4x4 board as the second player and never lose" $ do
-- let moves = availableMoves (initialState 4)
-- initialStates = map (makeMove (initialState 4)) moves
-- in all (== True) (playAllGameStates O initialStates []) `shouldBe` True
| jcg3challenges/haskell_ttt | test/TTT/MinimaxSpec.hs | bsd-3-clause | 7,793 | 0 | 22 | 2,901 | 2,106 | 1,088 | 1,018 | 110 | 2 |
-- | Various functions
module Misc.Stuff (fact,len) where
-- | Calculates the fatorial
fact :: Int -> Int
fact 0 = 1
fact n = n * fact (n-1)
-- | Calculates the length of a list
len :: [a] -> Int
len [] = 0
len (_:xs) = 1 + len xs
| emaphis/Haskell-Practice | testing-project/src/Misc/Stuff.hs | bsd-3-clause | 235 | 0 | 8 | 57 | 100 | 55 | 45 | 7 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-|
Module : Numeric.AERN.RealArithmetic.Basis.MPFR
Description : Instances for MPFR as interval endpoints.
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable (indirect FFI)
Instances of MPFR required for serving as interval endpoints,
namely providing granularity, Comparison, lattice, rounded field and
rounded elementary operations.
-}
module Numeric.AERN.RealArithmetic.Basis.MPFR
(
M.MPFR, MM.MMPFR, M.Precision,
module Numeric.AERN.RealArithmetic.Basis.MPFR.Effort,
module Numeric.AERN.RealArithmetic.Basis.MPFR.ShowInternals,
module Numeric.AERN.RealArithmetic.Basis.MPFR.NumericOrder,
module Numeric.AERN.RealArithmetic.Basis.MPFR.Conversion,
module Numeric.AERN.RealArithmetic.Basis.MPFR.FieldOps,
module Numeric.AERN.RealArithmetic.Basis.MPFR.MixedFieldOps,
module Numeric.AERN.RealArithmetic.Basis.MPFR.SpecialConst,
module Numeric.AERN.RealArithmetic.Basis.MPFR.Elementary,
module Numeric.AERN.RealArithmetic.Basis.MPFR.Measures,
module Numeric.AERN.RealArithmetic.Basis.MPFR.ExactOps
)
where
import Numeric.AERN.RealArithmetic.Basis.MPFR.Effort
import Numeric.AERN.RealArithmetic.Basis.MPFR.ShowInternals
import Numeric.AERN.RealArithmetic.Basis.MPFR.NumericOrder
import Numeric.AERN.RealArithmetic.Basis.MPFR.Conversion
import Numeric.AERN.RealArithmetic.Basis.MPFR.FieldOps
import Numeric.AERN.RealArithmetic.Basis.MPFR.MixedFieldOps
import Numeric.AERN.RealArithmetic.Basis.MPFR.SpecialConst
import Numeric.AERN.RealArithmetic.Basis.MPFR.Elementary
import Numeric.AERN.RealArithmetic.Basis.MPFR.Measures
import Numeric.AERN.RealArithmetic.Basis.MPFR.ExactOps
import Numeric.AERN.RealArithmetic.NumericOrderRounding
import Numeric.AERN.Basics.Effort
import Numeric.AERN.Basics.Exception
import qualified Data.Number.MPFR as M
import qualified Data.Number.MPFR.Mutable as MM
import Data.Word
instance RoundedReal M.MPFR where
type RoundedRealEffortIndicator M.MPFR = M.Precision
roundedRealDefaultEffort _ = 100
rrEffortComp _ _ = ()
rrEffortMinmax _ _ = ()
rrEffortDistance _ p = ()
rrEffortToSelf _ _ = ()
rrEffortToInt _ _ = ()
rrEffortFromInt _ p = p
rrEffortToInteger _ _ = ()
rrEffortFromInteger _ p = p
rrEffortToDouble _ _ = ()
rrEffortFromDouble _ p = p
rrEffortToRational _ _ = ()
rrEffortFromRational _ p = p
rrEffortAbs _ _ = ()
rrEffortField _ p = ()
rrEffortIntMixedField _ _ = ()
rrEffortIntegerMixedField _ _ = ()
rrEffortDoubleMixedField _ _ = ()
rrEffortRationalMixedField _ _ = ()
instance HasLegalValues M.MPFR where
maybeGetProblem d
| M.isNaN d = Just "A NaN MPFR"
-- | d == 1/0 = False
-- | d == -1/0 = False
| otherwise = Nothing
| michalkonecny/aern | aern-mpfr/src/Numeric/AERN/RealArithmetic/Basis/MPFR.hs | bsd-3-clause | 2,914 | 0 | 10 | 496 | 545 | 342 | 203 | 55 | 0 |
module Problem14
( p14
) where
import Data.List (maximum, maximumBy)
import Data.Ord (comparing)
collatz' :: Int -> Int -> Int
collatz' i n
| n == 1 = i
| n `mod` 2 == 0 = collatz' (i + 1) $ n `div` 2
| otherwise = collatz' (i + 1) $ (3 * n) + 1
collatz :: Int -> Int
collatz = collatz' 0
p14 = show . maximumBy (comparing collatz) $ [1 .. 1000000]
| anup-2s/project-euler | src/Problem14.hs | bsd-3-clause | 363 | 0 | 10 | 91 | 183 | 98 | 85 | 12 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
module Distribution.Nixpkgs.Haskell.Derivation
( Derivation, pkgid, revision, src, isLibrary, isExecutable
, extraFunctionArgs, libraryDepends, executableDepends, testDepends, configureFlags
, cabalFlags, runHaddock, jailbreak, doCheck, testTarget, hyperlinkSource, enableSplitObjs
, enableLibraryProfiling, enableExecutableProfiling, phaseOverrides, editedCabalFile, metaSection
, dependencies
)
where
import Control.DeepSeq.Generics
import Data.List
import Data.Set ( Set )
import qualified Data.Set as Set
import Data.Set.Lens
import Distribution.Nixpkgs.Fetch
import Distribution.Nixpkgs.Haskell.OrphanInstances ( )
import Distribution.Nixpkgs.Meta
import Distribution.Nixpkgs.Haskell.BuildInfo
import Distribution.Package
import Distribution.PackageDescription ( FlagAssignment, FlagName(..) )
import GHC.Generics ( Generic )
import Language.Nix
import Control.Lens.Create
import Text.PrettyPrint.HughesPJClass
import Control.Lens
import Internal.PrettyPrinting
-- | A represtation of Nix expressions for building Haskell packages.
-- The data type correspond closely to the definition of
-- 'PackageDescription' from Cabal.
data Derivation = MkDerivation
{ _pkgid :: PackageIdentifier
, _revision :: Int
, _src :: DerivationSource
, _isLibrary :: Bool
, _isExecutable :: Bool
, _extraFunctionArgs :: Set Identifier
, _libraryDepends :: BuildInfo
, _executableDepends :: BuildInfo
, _testDepends :: BuildInfo
, _configureFlags :: Set String
, _cabalFlags :: FlagAssignment
, _runHaddock :: Bool
, _jailbreak :: Bool
, _doCheck :: Bool
, _testTarget :: String
, _hyperlinkSource :: Bool
, _enableLibraryProfiling :: Bool
, _enableExecutableProfiling :: Bool
, _enableSplitObjs :: Bool
, _phaseOverrides :: String
, _editedCabalFile :: String
, _metaSection :: Meta
}
deriving (Show, Eq, Generic)
instance Default Derivation where
def = MkDerivation
{ _pkgid = error "undefined Derivation.pkgid"
, _revision = error "undefined Derivation.revision"
, _src = error "undefined Derivation.src"
, _isLibrary = error "undefined Derivation.isLibrary"
, _isExecutable = error "undefined Derivation.isExecutable"
, _extraFunctionArgs = error "undefined Derivation.extraFunctionArgs"
, _libraryDepends = error "undefined Derivation.libraryDepends"
, _executableDepends = error "undefined Derivation.executableDepends"
, _testDepends = error "undefined Derivation.testDepends"
, _configureFlags = error "undefined Derivation.configureFlags"
, _cabalFlags = error "undefined Derivation.cabalFlags"
, _runHaddock = error "undefined Derivation.runHaddock"
, _jailbreak = error "undefined Derivation.jailbreak"
, _doCheck = error "undefined Derivation.doCheck"
, _testTarget = error "undefined Derivation.testTarget"
, _hyperlinkSource = error "undefined Derivation.hyperlinkSource"
, _enableLibraryProfiling = error "undefined Derivation.enableLibraryProfiling"
, _enableExecutableProfiling = error "undefined Derivation.enableExecutableProfiling"
, _enableSplitObjs = error "undefined Derivation.enableSplitObjs"
, _phaseOverrides = error "undefined Derivation.phaseOverrides"
, _editedCabalFile = error "undefined Derivation.editedCabalFile"
, _metaSection = error "undefined Derivation.metaSection"
}
makeLenses ''Derivation
makeLensesFor [("_libraryDepends", "dependencies"), ("_executableDepends", "dependencies"), ("_testDepends", "dependencies")] ''Derivation
instance Package Derivation where
packageId = view pkgid
instance NFData Derivation where rnf = genericRnf
instance Pretty Derivation where
pPrint drv@(MkDerivation {..}) = funargs (map text ("mkDerivation" : toAscList inputs)) $$ vcat
[ text "mkDerivation" <+> lbrace
, nest 2 $ vcat
[ attr "pname" $ doubleQuotes $ disp (packageName _pkgid)
, attr "version" $ doubleQuotes $ disp (packageVersion _pkgid)
, sourceAttr _src
, onlyIf (_revision > 0) $ attr "revision" $ doubleQuotes $ int _revision
, onlyIf (not (null _editedCabalFile)) $ attr "editedCabalFile" $ string _editedCabalFile
, listattr "configureFlags" empty (map (show . show) renderedFlags)
, boolattr "isLibrary" (not _isLibrary || _isExecutable) _isLibrary
, boolattr "isExecutable" (not _isLibrary || _isExecutable) _isExecutable
, onlyIf (_libraryDepends /= mempty) $ pPrintBuildInfo "library" _libraryDepends
, onlyIf (_executableDepends /= mempty) $ pPrintBuildInfo "executable" _executableDepends
, onlyIf (_testDepends /= mempty) $ pPrintBuildInfo "test" _testDepends
, boolattr "enableLibraryProfiling" _enableLibraryProfiling _enableLibraryProfiling
, boolattr "enableExecutableProfiling" _enableExecutableProfiling _enableExecutableProfiling
, boolattr "enableSplitObjs" (not _enableSplitObjs) _enableSplitObjs
, boolattr "doHaddock" (not _runHaddock) _runHaddock
, boolattr "jailbreak" _jailbreak _jailbreak
, boolattr "doCheck" (not _doCheck) _doCheck
, onlyIf (not (null _testTarget)) $ attr "testTarget" $ string _testTarget
, boolattr "hyperlinkSource" (not _hyperlinkSource) _hyperlinkSource
, onlyIf (not (null _phaseOverrides)) $ vcat ((map text . lines) _phaseOverrides)
, pPrint _metaSection
]
, rbrace
]
where
inputs :: Set String
inputs = Set.unions [ Set.map (view ident) _extraFunctionArgs
, setOf (dependencies . each . folded . localName . ident) drv
, Set.fromList ["fetch" ++ derivKind _src | derivKind _src /= "" && not isHackagePackage]
]
renderedFlags = [ text "-f" <> (if enable then empty else char '-') <> text f | (FlagName f, enable) <- _cabalFlags ]
++ map text (toAscList _configureFlags)
isHackagePackage = "mirror://hackage/" `isPrefixOf` derivUrl _src
sourceAttr (DerivationSource{..})
| isHackagePackage = attr "sha256" $ string derivHash
| derivKind /= "" = vcat
[ text "src" <+> equals <+> text ("fetch" ++ derivKind) <+> lbrace
, nest 2 $ vcat
[ attr "url" $ string derivUrl
, attr "sha256" $ string derivHash
, if derivRevision /= "" then attr "rev" (string derivRevision) else empty
]
, rbrace <> semi
]
| otherwise = attr "src" $ text derivUrl
| gridaphobe/cabal2nix | distribution-nixpkgs/src/Distribution/Nixpkgs/Haskell/Derivation.hs | bsd-3-clause | 6,944 | 0 | 18 | 1,601 | 1,511 | 822 | 689 | 123 | 0 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
module CmmContFlowOpt
( cmmCfgOpts
, cmmCfgOptsProc
, removeUnreachableBlocksProc
, replaceLabels
)
where
import GhcPrelude hiding (succ, unzip, zip)
import Hoopl.Block
import Hoopl.Collections
import Hoopl.Graph
import Hoopl.Label
import BlockId
import Cmm
import CmmUtils
import CmmSwitch (mapSwitchTargets)
import Maybes
import Panic
import Util
import Control.Monad
-- Note [What is shortcutting]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Consider this Cmm code:
--
-- L1: ...
-- goto L2;
-- L2: goto L3;
-- L3: ...
--
-- Here L2 is an empty block and contains only an unconditional branch
-- to L3. In this situation any block that jumps to L2 can jump
-- directly to L3:
--
-- L1: ...
-- goto L3;
-- L2: goto L3;
-- L3: ...
--
-- In this situation we say that we shortcut L2 to L3. One of
-- consequences of shortcutting is that some blocks of code may become
-- unreachable (in the example above this is true for L2).
-- Note [Control-flow optimisations]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- This optimisation does three things:
--
-- - If a block finishes in an unconditional branch to another block
-- and that is the only jump to that block we concatenate the
-- destination block at the end of the current one.
--
-- - If a block finishes in a call whose continuation block is a
-- goto, then we can shortcut the destination, making the
-- continuation block the destination of the goto - but see Note
-- [Shortcut call returns].
--
-- - For any block that is not a call we try to shortcut the
-- destination(s). Additionally, if a block ends with a
-- conditional branch we try to invert the condition.
--
-- Blocks are processed using postorder DFS traversal. A side effect
-- of determining traversal order with a graph search is elimination
-- of any blocks that are unreachable.
--
-- Transformations are improved by working from the end of the graph
-- towards the beginning, because we may be able to perform many
-- shortcuts in one go.
-- Note [Shortcut call returns]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- We are going to maintain the "current" graph (LabelMap CmmBlock) as
-- we go, and also a mapping from BlockId to BlockId, representing
-- continuation labels that we have renamed. This latter mapping is
-- important because we might shortcut a CmmCall continuation. For
-- example:
--
-- Sp[0] = L
-- call g returns to L
-- L: goto M
-- M: ...
--
-- So when we shortcut the L block, we need to replace not only
-- the continuation of the call, but also references to L in the
-- code (e.g. the assignment Sp[0] = L):
--
-- Sp[0] = M
-- call g returns to M
-- M: ...
--
-- So we keep track of which labels we have renamed and apply the mapping
-- at the end with replaceLabels.
-- Note [Shortcut call returns and proc-points]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Consider this code that you might get from a recursive
-- let-no-escape:
--
-- goto L1
-- L1:
-- if (Hp > HpLim) then L2 else L3
-- L2:
-- call stg_gc_noregs returns to L4
-- L4:
-- goto L1
-- L3:
-- ...
-- goto L1
--
-- Then the control-flow optimiser shortcuts L4. But that turns L1
-- into the call-return proc point, and every iteration of the loop
-- has to shuffle variables to and from the stack. So we must *not*
-- shortcut L4.
--
-- Moreover not shortcutting call returns is probably fine. If L4 can
-- concat with its branch target then it will still do so. And we
-- save some compile time because we don't have to traverse all the
-- code in replaceLabels.
--
-- However, we probably do want to do this if we are splitting proc
-- points, because L1 will be a proc-point anyway, so merging it with
-- L4 reduces the number of proc points. Unfortunately recursive
-- let-no-escapes won't generate very good code with proc-point
-- splitting on - we should probably compile them to explicitly use
-- the native calling convention instead.
cmmCfgOpts :: Bool -> CmmGraph -> CmmGraph
cmmCfgOpts split g = fst (blockConcat split g)
cmmCfgOptsProc :: Bool -> CmmDecl -> CmmDecl
cmmCfgOptsProc split (CmmProc info lbl live g) = CmmProc info' lbl live g'
where (g', env) = blockConcat split g
info' = info{ info_tbls = new_info_tbls }
new_info_tbls = mapFromList (map upd_info (mapToList (info_tbls info)))
-- If we changed any labels, then we have to update the info tables
-- too, except for the top-level info table because that might be
-- referred to by other procs.
upd_info (k,info)
| Just k' <- mapLookup k env
= (k', if k' == g_entry g'
then info
else info{ cit_lbl = infoTblLbl k' })
| otherwise
= (k,info)
cmmCfgOptsProc _ top = top
blockConcat :: Bool -> CmmGraph -> (CmmGraph, LabelMap BlockId)
blockConcat splitting_procs g@CmmGraph { g_entry = entry_id }
= (replaceLabels shortcut_map $ ofBlockMap new_entry new_blocks, shortcut_map')
where
-- We might be able to shortcut the entry BlockId itself.
-- Remember to update the shortcut_map, since we also have to
-- update the info_tbls mapping now.
(new_entry, shortcut_map')
| Just entry_blk <- mapLookup entry_id new_blocks
, Just dest <- canShortcut entry_blk
= (dest, mapInsert entry_id dest shortcut_map)
| otherwise
= (entry_id, shortcut_map)
-- blocks is a list of blocks in DFS postorder, while blockmap is
-- a map of blocks. We process each element from blocks and update
-- blockmap accordingly
blocks = postorderDfs g
blockmap = foldr addBlock emptyBody blocks
-- Accumulator contains three components:
-- * map of blocks in a graph
-- * map of shortcut labels. See Note [Shortcut call returns]
-- * map containing number of predecessors for each block. We discard
-- it after we process all blocks.
(new_blocks, shortcut_map, _) =
foldr maybe_concat (blockmap, mapEmpty, initialBackEdges) blocks
-- Map of predecessors for initial graph. We increase number of
-- predecessors for entry block by one to denote that it is
-- target of a jump, even if no block in the current graph jumps
-- to it.
initialBackEdges = incPreds entry_id (predMap blocks)
maybe_concat :: CmmBlock
-> (LabelMap CmmBlock, LabelMap BlockId, LabelMap Int)
-> (LabelMap CmmBlock, LabelMap BlockId, LabelMap Int)
maybe_concat block (!blocks, !shortcut_map, !backEdges)
-- If:
-- (1) current block ends with unconditional branch to b' and
-- (2) it has exactly one predecessor (namely, current block)
--
-- Then:
-- (1) append b' block at the end of current block
-- (2) remove b' from the map of blocks
-- (3) remove information about b' from predecessors map
--
-- Since we know that the block has only one predecessor we call
-- mapDelete directly instead of calling decPreds.
--
-- Note that we always maintain an up-to-date list of predecessors, so
-- we can ignore the contents of shortcut_map
| CmmBranch b' <- last
, hasOnePredecessor b'
, Just blk' <- mapLookup b' blocks
= let bid' = entryLabel blk'
in ( mapDelete bid' $ mapInsert bid (splice head blk') blocks
, shortcut_map
, mapDelete b' backEdges )
-- If:
-- (1) we are splitting proc points (see Note
-- [Shortcut call returns and proc-points]) and
-- (2) current block is a CmmCall or CmmForeignCall with
-- continuation b' and
-- (3) we can shortcut that continuation to dest
-- Then:
-- (1) we change continuation to point to b'
-- (2) create mapping from b' to dest
-- (3) increase number of predecessors of dest by 1
-- (4) decrease number of predecessors of b' by 1
--
-- Later we will use replaceLabels to substitute all occurrences of b'
-- with dest.
| splitting_procs
, Just b' <- callContinuation_maybe last
, Just blk' <- mapLookup b' blocks
, Just dest <- canShortcut blk'
= ( mapInsert bid (blockJoinTail head (update_cont dest)) blocks
, mapInsert b' dest shortcut_map
, decPreds b' $ incPreds dest backEdges )
-- If:
-- (1) a block does not end with a call
-- Then:
-- (1) if it ends with a conditional attempt to invert the
-- conditional
-- (2) attempt to shortcut all destination blocks
-- (3) if new successors of a block are different from the old ones
-- update the of predecessors accordingly
--
-- A special case of this is a situation when a block ends with an
-- unconditional jump to a block that can be shortcut.
| Nothing <- callContinuation_maybe last
= let oldSuccs = successors last
newSuccs = successors swapcond_last
in ( mapInsert bid (blockJoinTail head swapcond_last) blocks
, shortcut_map
, if oldSuccs == newSuccs
then backEdges
else foldr incPreds (foldr decPreds backEdges oldSuccs) newSuccs )
-- Otherwise don't do anything
| otherwise
= ( blocks, shortcut_map, backEdges )
where
(head, last) = blockSplitTail block
bid = entryLabel block
-- Changes continuation of a call to a specified label
update_cont dest =
case last of
CmmCall{} -> last { cml_cont = Just dest }
CmmForeignCall{} -> last { succ = dest }
_ -> panic "Can't shortcut continuation."
-- Attempts to shortcut successors of last node
shortcut_last = mapSuccessors shortcut last
where
shortcut l =
case mapLookup l blocks of
Just b | Just dest <- canShortcut b -> dest
_otherwise -> l
-- See Note [Invert Cmm conditionals]
swapcond_last
| CmmCondBranch cond t f l <- shortcut_last
, hasOnePredecessor t -- inverting will make t a fallthrough
, likelyTrue l || (numPreds f > 1)
, Just cond' <- maybeInvertCmmExpr cond
= CmmCondBranch cond' f t (invertLikeliness l)
| otherwise
= shortcut_last
likelyTrue (Just True) = True
likelyTrue _ = False
invertLikeliness :: Maybe Bool -> Maybe Bool
invertLikeliness = fmap not
-- Number of predecessors for a block
numPreds bid = mapLookup bid backEdges `orElse` 0
hasOnePredecessor b = numPreds b == 1
{-
Note [Invert Cmm conditionals]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The native code generator always produces jumps to the true branch.
Falling through to the false branch is however faster. So we try to
arrange for that to happen.
This means we invert the condition if:
* The likely path will become a fallthrough.
* We can't guarantee a fallthrough for the false branch but for the
true branch.
In some cases it's faster to avoid inverting when the false branch is likely.
However determining when that is the case is neither easy nor cheap so for
now we always invert as this produces smaller binaries and code that is
equally fast on average. (On an i7-6700K)
TODO:
There is also the edge case when both branches have multiple predecessors.
In this case we could assume that we will end up with a jump for BOTH
branches. In this case it might be best to put the likely path in the true
branch especially if there are large numbers of predecessors as this saves
us the jump thats not taken. However I haven't tested this and as of early
2018 we almost never generate cmm where this would apply.
-}
-- Functions for incrementing and decrementing number of predecessors. If
-- decrementing would set the predecessor count to 0, we remove entry from the
-- map.
-- Invariant: if a block has no predecessors it should be dropped from the
-- graph because it is unreachable. maybe_concat is constructed to maintain
-- that invariant, but calling replaceLabels may introduce unreachable blocks.
-- We rely on subsequent passes in the Cmm pipeline to remove unreachable
-- blocks.
incPreds, decPreds :: BlockId -> LabelMap Int -> LabelMap Int
incPreds bid edges = mapInsertWith (+) bid 1 edges
decPreds bid edges = case mapLookup bid edges of
Just preds | preds > 1 -> mapInsert bid (preds - 1) edges
Just _ -> mapDelete bid edges
_ -> edges
-- Checks if a block consists only of "goto dest". If it does than we return
-- "Just dest" label. See Note [What is shortcutting]
canShortcut :: CmmBlock -> Maybe BlockId
canShortcut block
| (_, middle, CmmBranch dest) <- blockSplit block
, all dont_care $ blockToList middle
= Just dest
| otherwise
= Nothing
where dont_care CmmComment{} = True
dont_care CmmTick{} = True
dont_care _other = False
-- Concatenates two blocks. First one is assumed to be open on exit, the second
-- is assumed to be closed on entry (i.e. it has a label attached to it, which
-- the splice function removes by calling snd on result of blockSplitHead).
splice :: Block CmmNode C O -> CmmBlock -> CmmBlock
splice head rest = entry `blockJoinHead` code0 `blockAppend` code1
where (CmmEntry lbl sc0, code0) = blockSplitHead head
(CmmEntry _ sc1, code1) = blockSplitHead rest
entry = CmmEntry lbl (combineTickScopes sc0 sc1)
-- If node is a call with continuation call return Just label of that
-- continuation. Otherwise return Nothing.
callContinuation_maybe :: CmmNode O C -> Maybe BlockId
callContinuation_maybe (CmmCall { cml_cont = Just b }) = Just b
callContinuation_maybe (CmmForeignCall { succ = b }) = Just b
callContinuation_maybe _ = Nothing
-- Map over the CmmGraph, replacing each label with its mapping in the
-- supplied LabelMap.
replaceLabels :: LabelMap BlockId -> CmmGraph -> CmmGraph
replaceLabels env g
| mapNull env = g
| otherwise = replace_eid $ mapGraphNodes1 txnode g
where
replace_eid g = g {g_entry = lookup (g_entry g)}
lookup id = mapLookup id env `orElse` id
txnode :: CmmNode e x -> CmmNode e x
txnode (CmmBranch bid) = CmmBranch (lookup bid)
txnode (CmmCondBranch p t f l) =
mkCmmCondBranch (exp p) (lookup t) (lookup f) l
txnode (CmmSwitch e ids) =
CmmSwitch (exp e) (mapSwitchTargets lookup ids)
txnode (CmmCall t k rg a res r) =
CmmCall (exp t) (liftM lookup k) rg a res r
txnode fc@CmmForeignCall{} =
fc{ args = map exp (args fc), succ = lookup (succ fc) }
txnode other = mapExpDeep exp other
exp :: CmmExpr -> CmmExpr
exp (CmmLit (CmmBlock bid)) = CmmLit (CmmBlock (lookup bid))
exp (CmmStackSlot (Young id) i) = CmmStackSlot (Young (lookup id)) i
exp e = e
mkCmmCondBranch :: CmmExpr -> Label -> Label -> Maybe Bool -> CmmNode O C
mkCmmCondBranch p t f l =
if t == f then CmmBranch t else CmmCondBranch p t f l
-- Build a map from a block to its set of predecessors.
predMap :: [CmmBlock] -> LabelMap Int
predMap blocks = foldr add_preds mapEmpty blocks
where
add_preds block env = foldr add env (successors block)
where add lbl env = mapInsertWith (+) lbl 1 env
-- Removing unreachable blocks
removeUnreachableBlocksProc :: CmmDecl -> CmmDecl
removeUnreachableBlocksProc proc@(CmmProc info lbl live g)
| used_blocks `lengthLessThan` mapSize (toBlockMap g)
= CmmProc info' lbl live g'
| otherwise
= proc
where
g' = ofBlockList (g_entry g) used_blocks
info' = info { info_tbls = keep_used (info_tbls info) }
-- Remove any info_tbls for unreachable
keep_used :: LabelMap CmmInfoTable -> LabelMap CmmInfoTable
keep_used bs = mapFoldlWithKey keep mapEmpty bs
keep :: LabelMap CmmInfoTable -> Label -> CmmInfoTable -> LabelMap CmmInfoTable
keep env l i | l `setMember` used_lbls = mapInsert l i env
| otherwise = env
used_blocks :: [CmmBlock]
used_blocks = postorderDfs g
used_lbls :: LabelSet
used_lbls = setFromList $ map entryLabel used_blocks
| shlevy/ghc | compiler/cmm/CmmContFlowOpt.hs | bsd-3-clause | 16,886 | 0 | 18 | 4,657 | 2,593 | 1,391 | 1,202 | 174 | 8 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
-- | Interpretation of basic syntactic constructs
module Feldspar.Core.Constructs.Literal
( module Language.Syntactic.Constructs.Literal
) where
import Language.Syntactic
import Language.Syntactic.Constructs.Literal
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
import Data.Typeable
instance Sharable Literal
where
sharable (Literal a) = typeRepTyCon (typeOf a) == typeRepTyCon (typeOf [()])
instance Cumulative Literal
instance SizeProp (Literal :|| Type)
where
sizeProp (C' (Literal a)) Nil = sizeOf a
instance ((Literal :|| Type) :<: dom, OptimizeSuper dom) =>
Optimize (Literal :|| Type) dom
where
constructFeatUnOpt opts l@(C' _) = constructFeatUnOptDefault opts l
| emwap/feldspar-language | src/Feldspar/Core/Constructs/Literal.hs | bsd-3-clause | 2,521 | 0 | 11 | 437 | 242 | 147 | 95 | -1 | -1 |
module Main where
import Types
import Game
import AI
import Control.Monad
import Control.Monad.Random
randIndex :: RandomGen r => Int -> Rand r Int
randIndex len = getRandomR (0, len - 1)
randMove :: RandomGen r => Game -> Rand r Move
randMove g = do
let ls = legalSquares g
n = length ls
idx <- randIndex n
return $ move (ls !! idx)
playGame :: RandomGen r => Int -> Rand r Game -> Rand r Game
playGame n g = do
g'@(Game _ b) <- g
if isOver b
then g
else if odd n
then playGame (n + 1) (return $ (chooseMove g' g'))
else do
m <- randMove g'
playGame (n + 1) (return $ m g')
main :: IO ()
main = do
gs <- forM [1..100] $ \_ -> evalRandIO $ playGame 1 (return newGame)
hs <- forM [1..100] $ \_ -> evalRandIO $ playGame 0 (return newGame)
let resultsB = map (findWinner . board) gs
blackWins = sum $ map (oneIfEq Black) resultsB
resultsW = map (findWinner . board) hs
whiteWins = sum $ map (oneIfEq White) resultsW
print $ "Computer playing black wins "
++ show blackWins
++ " out of 100 games against random play."
print $ "Computer playing white wins "
++ show whiteWins
++ " out of 100 games against random play."
| reedrosenbluth/othello | tests/Test.hs | bsd-3-clause | 1,232 | 0 | 14 | 340 | 494 | 244 | 250 | 38 | 3 |
{-
Purely Functional Queue with Amortised Linear Cost
Based on section 3 of
Christoph Herrmann, Edwin Brady and Kevin Hammond. 2011.
Dependently-typed Programming by Composition from Functional
Building Blocks.
In Draft Proceedings of the 12th International Symposium on Trends
in Functional Programming (TFP 2011). Tech. Rep. SIC-07/11,
Dept. Computer Systems and Computing, Universidad Complutense de
Madrid.
-}
{-# OPTIONS_GHC -F -pgmF inch #-}
{-# LANGUAGE RankNTypes, GADTs, KindSignatures, ScopedTypeVariables,
NPlusKPatterns #-}
module Queue where
data Vec :: * -> Num -> * where
Nil :: forall a . Vec a 0
Cons :: forall (n :: Nat) a . a -> Vec a n -> Vec a (n+1)
deriving Show
data Queue :: * -> Num -> * where
Q :: forall elem . pi (a b c :: Nat) .
Vec elem a -> Vec elem b -> Queue elem (c + 3*a + b)
deriving Show
initQueue = Q {0} {0} {0} Nil Nil
enqueue :: forall elem (paid :: Nat) .
elem -> Queue elem paid -> Queue elem (paid + 4)
enqueue x (Q {a} {b} {c} sA sB) = Q {a+1} {b} {c+1} (Cons x sA) sB
reverseS :: forall elem (paid :: Nat) .
Queue elem paid -> Queue elem paid
reverseS (Q {0} {b} {c} Nil sB) = Q {0} {b} {c} Nil sB
reverseS (Q {a+1} {b} {c} (Cons x sA) sB) = reverseS (Q {a} {b+1} {c+2} sA (Cons x sB))
dequeue :: forall elem (paid :: Nat) .
Queue elem paid -> (elem, Queue elem paid)
dequeue (Q {a} {b+1} {c} sA (Cons x sB)) = (x, Q {a} {b} {c+1} sA sB)
dequeue (Q {a+1} {0} {c} sA Nil) = dequeue (reverseS (Q {a+1} {0} {c} sA Nil))
data Queue2 :: * -> Num -> * where
Q2 :: forall elem (a b c :: Nat) .
Vec elem a -> Vec elem b -> Queue2 elem (c + 3*a + b)
deriving Show
initQueue2 :: forall elem . Queue2 elem 0
initQueue2 = Q2 Nil Nil
enqueue2 :: forall elem (paid :: Nat) .
elem -> Queue2 elem paid -> Queue2 elem (paid + 4)
enqueue2 x (Q2 sA sB) = Q2 (Cons x sA) sB
reverseS2 :: forall elem (paid :: Nat) .
Queue2 elem paid -> Queue2 elem paid
reverseS2 (Q2 Nil sB) = Q2 Nil sB
reverseS2 (Q2 (Cons x sA) sB) = reverseS2 (Q2 sA (Cons x sB))
dequeue2 :: forall elem (paid :: Nat) .
Queue2 elem paid -> (elem, Queue2 elem paid)
dequeue2 (Q2 sA (Cons x sB)) = (x, Q2 sA sB)
dequeue2 (Q2 sA Nil) = dequeue2 (reverseS2 (Q2 sA Nil)) | adamgundry/inch | examples/Queue.hs | bsd-3-clause | 2,405 | 63 | 16 | 686 | 944 | 516 | 428 | -1 | -1 |
--------------------------------------------------------------------------------
-- | A store for storing and retreiving items
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Hakyll.Core.Store
( Store
, Result (..)
, toMaybe
, new
, set
, get
, delete
, hash
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Exception (IOException, handle)
import qualified Crypto.Hash.MD5 as MD5
import Data.Binary (Binary, decodeFile, encodeFile)
import qualified Data.ByteString as B
import qualified Data.Cache.LRU.IO as Lru
import Data.List (intercalate)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Typeable (TypeRep, Typeable, cast, typeOf)
import System.Directory (createDirectoryIfMissing)
import System.Directory (doesFileExist, removeFile)
import System.FilePath ((</>))
import Text.Printf (printf)
--------------------------------------------------------------------------------
-- | Simple wrapper type
data Box = forall a. Typeable a => Box a
--------------------------------------------------------------------------------
data Store = Store
{ -- | All items are stored on the filesystem
storeDirectory :: FilePath
, -- | Optionally, items are also kept in-memory
storeMap :: Maybe (Lru.AtomicLRU FilePath Box)
}
--------------------------------------------------------------------------------
-- | Result of a store query
data Result a
= Found a -- ^ Found, result
| NotFound -- ^ Not found
| WrongType TypeRep TypeRep -- ^ Expected, true type
deriving (Show, Eq)
--------------------------------------------------------------------------------
-- | Convert result to 'Maybe'
toMaybe :: Result a -> Maybe a
toMaybe (Found x) = Just x
toMaybe _ = Nothing
--------------------------------------------------------------------------------
-- | Initialize the store
new :: Bool -- ^ Use in-memory caching
-> FilePath -- ^ Directory to use for hard disk storage
-> IO Store -- ^ Store
new inMemory directory = do
createDirectoryIfMissing True directory
ref <- if inMemory then Just <$> Lru.newAtomicLRU csize else return Nothing
return Store
{ storeDirectory = directory
, storeMap = ref
}
where
csize = Just 500
--------------------------------------------------------------------------------
-- | Auxiliary: add an item to the in-memory cache
cacheInsert :: Typeable a => Store -> String -> a -> IO ()
cacheInsert (Store _ Nothing) _ _ = return ()
cacheInsert (Store _ (Just lru)) key x =
Lru.insert key (Box x) lru
--------------------------------------------------------------------------------
-- | Auxiliary: get an item from the in-memory cache
cacheLookup :: forall a. Typeable a => Store -> String -> IO (Result a)
cacheLookup (Store _ Nothing) _ = return NotFound
cacheLookup (Store _ (Just lru)) key = do
res <- Lru.lookup key lru
return $ case res of
Nothing -> NotFound
Just (Box x) -> case cast x of
Just x' -> Found x'
Nothing -> WrongType (typeOf (undefined :: a)) (typeOf x)
--------------------------------------------------------------------------------
-- | Auxiliary: delete an item from the in-memory cache
cacheDelete :: Store -> String -> IO ()
cacheDelete (Store _ Nothing) _ = return ()
cacheDelete (Store _ (Just lru)) key = do
_ <- Lru.delete key lru
return ()
--------------------------------------------------------------------------------
-- | Store an item
set :: (Binary a, Typeable a) => Store -> [String] -> a -> IO ()
set store identifier value = do
encodeFile (storeDirectory store </> key) value
cacheInsert store key value
where
key = hash identifier
--------------------------------------------------------------------------------
-- | Load an item
get :: (Binary a, Typeable a) => Store -> [String] -> IO (Result a)
get store identifier = do
-- First check the in-memory map
ref <- cacheLookup store key
case ref of
-- Not found in the map, try the filesystem
NotFound -> do
exists <- doesFileExist path
if not exists
-- Not found in the filesystem either
then return NotFound
-- Found in the filesystem
else do
v <- decodeFile path
cacheInsert store key v
return $ Found v
-- Found in the in-memory map (or wrong type), just return
s -> return s
where
key = hash identifier
path = storeDirectory store </> key
--------------------------------------------------------------------------------
-- | Delete an item
delete :: Store -> [String] -> IO ()
delete store identifier = do
cacheDelete store key
deleteFile $ storeDirectory store </> key
where
key = hash identifier
--------------------------------------------------------------------------------
-- | Delete a file unless it doesn't exist...
deleteFile :: FilePath -> IO ()
deleteFile = handle (\(_ :: IOException) -> return ()) . removeFile
--------------------------------------------------------------------------------
-- | Mostly meant for internal usage
hash :: [String] -> String
hash = concatMap (printf "%02x") . B.unpack .
MD5.hash . T.encodeUtf8 . T.pack . intercalate "/"
| bergmark/hakyll | src/Hakyll/Core/Store.hs | bsd-3-clause | 5,742 | 0 | 17 | 1,384 | 1,230 | 659 | 571 | 95 | 3 |
module ScrabbleScoreKata.Day4Spec (spec) where
import Test.Hspec
import ScrabbleScoreKata.Day4 (score)
spec :: Spec
spec = do
it "is zero when empty input" $ do
score "" `shouldBe` 0
it "is 1 when given lowercase 'a'" $ do
score "a" `shouldBe` 1
it "is 1 when given uppercase 'A'" $ do
score "A" `shouldBe` 1
it "is 4 when given 'f'" $ do
score "f" `shouldBe` 4
it "is 2 when given the word 'at'" $ do
score "at" `shouldBe` 2
it "is 12 when given the word 'zoo'" $ do
score "zoo" `shouldBe` 12
it "is 6 when given the word 'street'" $ do
score "street" `shouldBe` 6
it "is 22 when given the word 'quirky'" $ do
score "quirky" `shouldBe` 22
it "is 41 when given the word 'OxyphenButazone'" $ do
score "OxyphenButazone" `shouldBe` 41
it "scores only english-like letters" $ do
score "pinata" `shouldBe` 8
score "piñata" `shouldBe` 7
| Alex-Diez/haskell-tdd-kata | old-katas/test/ScrabbleScoreKata/Day4Spec.hs | bsd-3-clause | 1,071 | 0 | 11 | 389 | 268 | 128 | 140 | 26 | 1 |
{-# LANGUAGE DeriveDataTypeable, TypeFamilies #-}
module Main where
import Graphics.QML.Test.Framework
import Graphics.QML.Test.Harness
import Graphics.QML.Test.SimpleTest
import Test.QuickCheck
import Data.Proxy
main :: IO ()
main = checkProperty $ TestType (Proxy :: Proxy SimpleMethods)
| drhodes/HsQML | test/Test1.hs | bsd-3-clause | 293 | 0 | 8 | 34 | 67 | 41 | 26 | 9 | 1 |
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : Data.String.Parse
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : portable
--
-- A very simple string parser related to Parsec and Attoparsec
--
-- Simple example:
--
-- > > parse ((,) <$> take 2 <*> char ' ' <*> (string "abc" *> anyChar)) "xx abctest"
-- > ParseOK "est" ("xx", 't')
--
module Data.String.Parse
( Parser
, Result(..)
-- * run the Parser
, parse
, parseFeed
-- * Parser methods
, isEndOfBuff
, char
, anyChar
, string
, take
, takeWhile
, skip
, skipWhile
) where
import Control.Applicative
import Control.Monad
import Prelude hiding (take, takeWhile)
-- | Simple parsing result, that represent respectively:
--
-- * failure: with the error message
--
-- * continuation: that need for more input data
--
-- * success: the remaining unparsed data and the parser value
data Result a =
ParseFail String
| ParseMore (String -> Result a)
| ParseOK String a
instance Show a => Show (Result a) where
show (ParseFail err) = "ParseFailure: " ++ err
show (ParseMore _) = "ParseMore _"
show (ParseOK b a) = "ParseOK " ++ show a ++ " " ++ show b
type Failure r = String -> String -> Result r
type Success a r = String -> a -> Result r
-- | Simple String parser structure
newtype Parser a = Parser
{ runParser :: forall r . String -> Failure r -> Success a r -> Result r }
instance Monad Parser where
fail errorMsg = Parser $ \buf err _ -> err buf ("failed: " ++ errorMsg)
return v = Parser $ \buf _ ok -> ok buf v
m >>= k = Parser $ \buf err ok ->
runParser m buf err (\buf' a -> runParser (k a) buf' err ok)
instance MonadPlus Parser where
mzero = fail "Parser.MonadPlus.mzero"
mplus f g = Parser $ \buf err ok ->
-- rewrite the err callback of @f to call @g
runParser f buf (\_ _ -> runParser g buf err ok) ok
instance Functor Parser where
fmap f p = Parser $ \buf err ok ->
runParser p buf err (\b a -> ok b (f a))
instance Applicative Parser where
pure = return
(<*>) d e = d >>= \b -> e >>= \a -> return (b a)
instance Alternative Parser where
empty = fail "Parser.Alternative.empty"
(<|>) = mplus
-- | Run a parser on an @initial String.
--
-- If the Parser need more data than available, the @feeder function
-- is automatically called and fed to the More continuation.
parseFeed :: Monad m => m String -> Parser a -> String -> m (Result a)
parseFeed feeder p initial = loop $ parse p initial
where loop (ParseMore k) = feeder >>= (loop . k)
loop r = return r
-- | Run a Parser on a String and return a 'Result'
parse :: Parser a -> String -> Result a
parse p s = runParser p s (\_ msg -> ParseFail msg) (\b a -> ParseOK b a)
------------------------------------------------------------
getMore :: Parser ()
getMore = Parser $ \buf err ok -> ParseMore $ \nextChunk ->
if null nextChunk
then err buf "EOL: need more data"
else ok (buf ++ nextChunk) ()
------------------------------------------------------------
isEndOfBuff :: Parser Bool
isEndOfBuff = Parser $ \buf _ ok ->
case buf of
[] -> ok buf True
_ -> ok buf False
-- | Get the next byte from the parser
anyChar :: Parser Char
anyChar = Parser $ \buf err ok ->
case buf of
[] -> runParser (getMore >> anyChar) buf err ok
c1:b2 -> ok b2 c1
-- | Parse a specific byte at current position
--
-- if the byte is different than the expected on,
-- this parser will raise a failure.
char :: Char -> Parser ()
char w = Parser $ \buf err ok ->
case buf of
[] -> runParser (getMore >> char w) buf err ok
c1:b2 | c1 == w -> ok b2 ()
| otherwise -> err buf ("byte " ++ show w ++ " : failed")
-- | Parse a sequence of bytes from current position
--
-- if the following bytes don't match the expected
-- string completely, the parser will raise a failure
string :: String -> Parser ()
string allExpected = consumeEq allExpected
where errMsg = "string " ++ show allExpected ++ " : failed"
-- partially consume as much as possible or raise an error.
consumeEq expected = Parser $ \actual err ok ->
case isMatch actual expected of
Left e -> err actual e
Right aRem -> ok aRem ()
isMatch [] [] = Right ""
isMatch r [] = Right r
isMatch [] _ = Left (errMsg ++ " : too short")
isMatch (x:xs) (y:ys)
| x == y = isMatch xs ys
| otherwise = Left (errMsg ++ " : mismatch")
------------------------------------------------------------
-- | Take @n bytes from the current position in the stream
--
-- FIXME optimize
take :: Int -> Parser [Char]
take n = Parser $ \buf err ok ->
let (b1,b2) = splitAt n buf
in if length b1 == n
then ok b2 b1
else runParser (getMore >> take n) buf err ok
-- | Take bytes while the @predicate hold from the current position in the stream
takeWhile :: (Char -> Bool) -> Parser [Char]
takeWhile predicate = Parser $ \buf err ok ->
case span predicate buf of
(_, "") -> runParser (getMore >> takeWhile predicate) buf err ok
(b1, b2) -> ok b2 b1
-- | Skip @n bytes from the current position in the stream
skip :: forall a. (Num a, Eq a) => a -> Parser ()
skip num = Parser $ \buf err ok ->
case dropOf num buf of
Left n' -> runParser (getMore >> skip n') "" err ok
Right buf' -> ok buf' ()
where dropOf 0 s = Right s
dropOf n [] = Left n
dropOf n (_:xs) = dropOf (n-1) xs
-- | Skip bytes while the @predicate hold from the current position in the stream
skipWhile :: (Char -> Bool) -> Parser ()
skipWhile p = Parser $ \buf err ok ->
case span p buf of
(_, "") -> runParser (getMore >> skipWhile p) [] err ok
(_, b2) -> ok b2 ()
| NicolasDP/hop | Data/String/Parse.hs | bsd-3-clause | 6,100 | 0 | 15 | 1,699 | 1,807 | 946 | 861 | 113 | 5 |
-----------------------------------------------------------------------------
-- |
-- Module : Text.JSON.ToJSON
-- Copyright : (c) Scrive 2011
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : mariusz@scrive.com
-- Stability : development
-- Portability : portable
--
-- Unifing some structures so they can be serialized to JSValue
--
module Text.JSON.ToJSValue (ToJSValue(..))where
import Data.Int
import Data.Map as M
import Data.Word
import Text.JSON
class ToJSValue a where
toJSValue :: a -> JSValue
instance ToJSValue JSValue where
toJSValue = id
instance ToJSValue Bool where
toJSValue = JSBool
instance {-# OVERLAPPING #-} ToJSValue String where
toJSValue = JSString . toJSString
instance ToJSValue Integer where
toJSValue = JSRational False . toRational
instance ToJSValue Int where
toJSValue = JSRational False . toRational
instance ToJSValue Int8 where
toJSValue = JSRational False . toRational
instance ToJSValue Int16 where
toJSValue = JSRational False . toRational
instance ToJSValue Int32 where
toJSValue = JSRational False . toRational
instance ToJSValue Int64 where
toJSValue = JSRational False . toRational
instance ToJSValue Word where
toJSValue = JSRational False . toRational
instance ToJSValue Word8 where
toJSValue = JSRational False . toRational
instance ToJSValue Word16 where
toJSValue = JSRational False . toRational
instance ToJSValue Word32 where
toJSValue = JSRational False . toRational
instance ToJSValue Word64 where
toJSValue = JSRational False . toRational
instance ToJSValue Double where
toJSValue = JSRational False . toRational
instance ToJSValue Float where
toJSValue = JSRational True . toRational
instance ToJSValue a => ToJSValue [a] where
toJSValue = JSArray . fmap toJSValue
instance ToJSValue a => ToJSValue (M.Map String a) where
toJSValue = JSObject . toJSObject . M.toList . fmap toJSValue
instance ToJSValue a => ToJSValue (Maybe a) where
toJSValue = maybe JSNull toJSValue
instance (ToJSValue a, ToJSValue b) => ToJSValue (a,b) where
toJSValue (a,b) = JSArray [toJSValue a, toJSValue b]
| scrive/fields-json | src/Text/JSON/ToJSValue.hs | bsd-3-clause | 2,157 | 0 | 8 | 376 | 538 | 284 | 254 | 47 | 0 |
module Validations where
import Data.Text (Text)
import qualified Data.Text as T (null)
-- isAdult :: Monad m => Validator m String User
isNotEmpty :: Text -> Bool
isNotEmpty = not . T.null
isPhone :: Text -> Bool
isPhone _ = True
| vyorkin-archive/assignment | api/src/Validations.hs | mit | 235 | 0 | 6 | 45 | 65 | 39 | 26 | 7 | 1 |
{- | Module : ./GMP/GMP-CoLoSS/GMP/Logics/C.hs
- Description : Implementation of logic instance Coalition Logic
- Copyright : (c) Daniel Hausmann & Georgel Calin & Lutz Schroeder, DFKI Lab Bremen,
- Rob Myers & Dirk Pattinson, Department of Computing, ICL
- License : GPLv2 or higher, see LICENSE.txt
- Maintainer : hausmann@dfki.de
- Stability : provisional
- Portability : portable
-
- Provides the implementation of the matching functions of coalition logic.
-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, FlexibleContexts #-}
module GMP.Logics.C where
import Data.List
import Data.Ratio
import Data.Maybe
import Debug.Trace
import Text.ParserCombinators.Parsec
import GMP.Logics.Generic
import GMP.Parser
{- ------------------------------------------------------------------------------
instance of feature for coalition logic
------------------------------------------------------------------------------ -}
data C a = C [Int] [Formula a] deriving (Eq, Show)
agents :: Int; agents = 10
{- For each positive modal literal, there are possibly serveral premises,
containing only one sequent each. This sequent contains the stripped positive
literal and additional stripped literals which are computed by
'c_build_matches'. Also there is one additional premise, containing the sequent
of all negated stripped negative literals.
e.g. seq = [ (M (C [0,3]) p), !(M (C [0]) q), (M (C [0,2,3]) !p), !(M (C [1]) !r)]
match seq = [ [[ p, !p, !q ]], [[ p, !p, !q]], [[!q, r]] ] -}
instance (SigFeature b c d, Eq (b (c d)), Eq (c d)) => NonEmptyFeature C b c d where
nefMatch flags seq = let neglits = [ Neg phi | Neg (Mod (C s [phi])) <- seq ]
poslits = [ phi | Mod (C s [phi]) <- seq ]
in [ [[Sequent (c_match ++ poslits)]] |
c_match <- c_build_matches (keep_poslits seq)
(keep_neglits seq)] ++ [[[Sequent neglits]]]
nefPretty d = case d of
C l [] -> "[C]" ++ show l ++ "nothing contained"
C l e -> "[C]" ++ show l ++ pretty (head e)
nefFeatureFromSignature sig = C [1]
nefFeatureFromFormula phi = C [1]
nefStripFeature (C i phis) = phis
nefDisj2Conj (Mod (C l phi)) = Mod (C l [disj2conj (head phi)])
nefNegNorm (Mod (C l phi)) = Mod (C l [negNorm (head phi)])
nefParser sig = do -- checks whether there are more numbers to be parsed
let stopParser = do char ','
return False
<|> do char '}'
return True
<?> "Parser.parseCindex.stop"
-- checks whether the index is of the form x1,..,x&
let normalParser l = do x <- natural
let n = fromInteger x
spaces
q <- stopParser
spaces
if q then normalParser (n : l)
else return (n : l)
<?> "Parser.parseCindex.normal"
char '{'
res <- try (normalParser [])
return $ C res
<|> do -- checks whether the index is of the form "n..m"
let shortParser = do x <- natural
let n = fromInteger x
spaces
string ".."
spaces
y <- natural
let m = fromInteger y
return [n .. m]
<?> "Parser.parseCindex.short"
res <- try shortParser
return $ C res
<?> "Parser.parseCindex"
{- ------------------------------------------------------------------------------
additional functions for the matching function of this logic
------------------------------------------------------------------------------ -}
-- Form negative literal parts of matching premise for positive literals
c_build_matches :: (Eq b) => [Formula (C b)] -> [Formula (C b)] -> [[Formula b]]
c_build_matches [] _ = []
c_build_matches ( Mod (C pset pphi) : pls) nls =
let relevant_neglits = filter (\ (Neg (Mod (C s _))) -> ((s `intersect` pset) == s)) nls
relevant_ncoalitions = nub $ map (\ (Neg (Mod (C s _))) -> s) relevant_neglits
maximal_pw_dis_lists = rm_sublists $ sortBy compare_length
(filter pairwise_disjunct
(powerList relevant_ncoalitions))
negmats = [ [Neg phi] | [Neg (Mod (C s [phi]))] <-
concatMap (build_lit_lists relevant_neglits) maximal_pw_dis_lists]
in map ([head pphi] ++) negmats ++ c_build_matches pls nls
{- Given a list of negative literals and a list of pairwise disjunctive lists, form pairwise
disjunctive lists of the according literals -}
build_lit_lists :: (Eq b) => [Formula (C b)] -> [[Int]] -> [[Formula (C b)]]
build_lit_lists _ [] = [[]]
build_lit_lists lits (set : sets) =
let relevant_neglits = filter (\ (Neg (Mod (C t _))) -> set == t) lits
in if null relevant_neglits then [] else
map ([head relevant_neglits] ++) (build_lit_lists lits sets)
++ build_lit_lists (lits \\ [head relevant_neglits]) (set : sets)
-- Does the list contain only pairwise disjunct lists?
pairwise_disjunct :: (Eq a) => [[a]] -> Bool
pairwise_disjunct [] = True
pairwise_disjunct (x : xs) = all (\ y -> null (x `intersect` y)) xs &&
pairwise_disjunct xs
-- Remove sublists (i.e. keep only maximal lists). Requires input to be sorted
rm_sublists :: (Eq a) => [[a]] -> [[a]]
rm_sublists [] = []
rm_sublists (x : xs) | any (\ y -> x `intersect` y == x) xs = rm_sublists xs
| otherwise = x : rm_sublists xs
-- Compare lists by size.
compare_length :: [a] -> [a] -> Ordering
compare_length s t = if length s < length t then LT else GT
{- ------------------------------------------------------------------------------
instance of sigFeature for coalition logic
------------------------------------------------------------------------------ -}
instance (SigFeature b c d, Eq (c d), Eq (b (c d))) => NonEmptySigFeature C b c d where
neGoOn = genericPGoOn
| spechub/Hets | GMP/GMP-CoLoSS/GMP/Logics/C.hs | gpl-2.0 | 6,773 | 0 | 19 | 2,351 | 1,573 | 803 | 770 | 85 | 2 |
--
-- Data vault for metrics
--
-- Copyright © 2013-2014 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS -fno-warn-type-defaults #-}
module Main where
import System.ZMQ4.Monadic
import Test.Hspec hiding (pending)
import Control.Concurrent
import Data.HashMap.Strict (fromList)
import Data.Maybe
import Data.String
import Data.Text
import Network.URI
import Pipes.Prelude (toListM)
import Test.Hspec.QuickCheck
import Test.QuickCheck
import Test.QuickCheck.Monadic (assert, monadicIO, run)
import Marquise.Client
import TestHelpers
import Vaultaire.Broker
import Vaultaire.Contents
import Vaultaire.Daemon
import Vaultaire.Util
startDaemons :: IO ()
startDaemons = do
quit <- newEmptyMVar
linkThread $ do
runZMQ $ startProxy (Router,"tcp://*:5580")
(Dealer,"tcp://*:5581") "tcp://*:5008"
readMVar quit
args <- daemonArgsDefault (fromJust $ parseURI "tcp://localhost:5581")
Nothing "test" quit
linkThread $ startContents args
main :: IO ()
main = do
startDaemons
hspec suite
suite :: Spec
suite = do
-- TODO: This does not belong here, move to another test at the least.
-- The reason for encodeAddressToString and decodeStringAsAddress beyond
-- Show and IsString is questionable. Is this made use of anywhere? Perhaps
-- we can remove it before we have to maintain it.
describe "Addresses" $ do
it "encodes an address in base62" $ do
show (0 :: Address) `shouldBe` "00000000000"
show (2^64-1 :: Address) `shouldBe` "LygHa16AHYF"
show (minBound :: Address) `shouldBe` "00000000000"
show (maxBound :: Address) `shouldBe` "LygHa16AHYF"
it "decodes an address from base62" $ do
fromString "00000000000" `shouldBe` (0 :: Address)
fromString "00000000001" `shouldBe` (1 :: Address)
fromString "LygHa16AHYF" `shouldBe` ((2^64-1) :: Address)
fromString "LygHa16AHYG" `shouldBe` (0 :: Address)
describe "Full stack" $ do
it "unions two dicts" $ do
let dict_a = listToDict [("a", "1")]
let dict_b = listToDict [("a", "2")]
let addr = 1
cleanupTestEnvironment
let o = Origin "PONY"
xs <- withContentsConnection "localhost" $ \c -> do
updateSourceDict addr dict_a o c
updateSourceDict addr dict_b o c
toListM (enumerateOrigin o c)
case xs of
[(addr', dict)] -> do
dict `shouldBe` dict_b
addr' `shouldBe` addr
_ -> error "expected one"
prop "updates source dict for any address" propSourceDictUpdated
listToDict :: [(Text, Text)] -> SourceDict
listToDict elts = either error id . makeSourceDict $ fromList elts
propSourceDictUpdated :: Address -> SourceDict -> Property
propSourceDictUpdated addr dict = monadicIO $ do
xs <- run $ do
-- Clear out ceph
cleanupTestEnvironment
let o = Origin "PONY"
withContentsConnection "localhost" $ \c -> do
updateSourceDict addr dict o c
toListM (enumerateOrigin o c)
case xs of
[(addr', dict')] -> assert (addr' == addr && dict' == dict)
_ -> error "expected one"
| afcowie/vaultaire | tests/ContentsTest.hs | bsd-3-clause | 3,651 | 0 | 20 | 1,027 | 850 | 444 | 406 | 79 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wno-orphans #-}
-- | This module only exports orphan 'Store.Store' instances. Import as:
--
-- @
-- import "Money.Store" ()
-- @
module Money.Store () where
import Control.Monad (when)
import Data.Ratio ((%), numerator, denominator)
import GHC.TypeLits (KnownSymbol)
import qualified Data.Store as Store
import qualified Money
import qualified Money.Internal as MoneyI
--------------------------------------------------------------------------------
-- | Compatible with 'Money.SomeDense'.
instance (KnownSymbol currency) => Store.Store (Money.Dense currency) where
size = storeContramapSize Money.toSomeDense Store.size
poke = Store.poke . Money.toSomeDense
peek = maybe (fail "peek") pure =<< fmap Money.fromSomeDense Store.peek
-- | Compatible with 'Money.Dense'.
instance Store.Store Money.SomeDense where
poke = \sd -> do
Store.poke (MoneyI.someDenseCurrency' sd)
let r = Money.someDenseAmount sd
Store.poke (numerator r)
Store.poke (denominator r)
peek = maybe (fail "peek") pure =<< do
c :: String <- Store.peek
n :: Integer <- Store.peek
d :: Integer <- Store.peek
when (d == 0) (fail "denominator is zero")
pure (MoneyI.mkSomeDense' c (n % d))
-- | Compatible with 'Money.SomeDiscrete'.
instance
( KnownSymbol currency, Money.GoodScale scale
) => Store.Store (Money.Discrete' currency scale) where
size = storeContramapSize Money.toSomeDiscrete Store.size
poke = Store.poke . Money.toSomeDiscrete
peek = maybe (fail "peek") pure =<< fmap Money.fromSomeDiscrete Store.peek
instance Store.Store Money.Scale where
poke = \s -> do
let r = Money.scaleToRational s
Store.poke (numerator r)
Store.poke (denominator r)
peek = maybe (fail "peek") pure =<< do
n :: Integer <- Store.peek
d :: Integer <- Store.peek
when (d == 0) (fail "denominator is zero")
pure (Money.scaleFromRational (n % d))
-- | Compatible with 'Money.Discrete''.
instance Store.Store Money.SomeDiscrete where
poke = \sd -> do
Store.poke (MoneyI.someDiscreteCurrency' sd)
Store.poke (Money.someDiscreteScale sd)
Store.poke (Money.someDiscreteAmount sd)
peek = do
-- We go through String for backwards compatibility.
c :: String <- Store.peek
s :: Money.Scale <- Store.peek
a :: Integer <- Store.peek
pure (MoneyI.mkSomeDiscrete' c s a)
-- | Compatible with 'Money.SomeExchangeRate'.
instance
( KnownSymbol src, KnownSymbol dst
) => Store.Store (Money.ExchangeRate src dst) where
size = storeContramapSize Money.toSomeExchangeRate Store.size
poke = Store.poke . Money.toSomeExchangeRate
peek = maybe (fail "peek") pure =<< fmap Money.fromSomeExchangeRate Store.peek
-- | Compatible with 'ExchangeRate'.
instance Store.Store Money.SomeExchangeRate where
poke = \ser -> do
Store.poke (MoneyI.someExchangeRateSrcCurrency' ser)
Store.poke (MoneyI.someExchangeRateDstCurrency' ser)
let r = Money.someExchangeRateRate ser
Store.poke (numerator r)
Store.poke (denominator r)
peek = maybe (fail "peek") pure =<< do
src :: String <- Store.peek
dst :: String <- Store.peek
n :: Integer <- Store.peek
d :: Integer <- Store.peek
when (d == 0) (fail "denominator is zero")
pure (MoneyI.mkSomeExchangeRate' src dst (n % d))
storeContramapSize :: (a -> b) -> Store.Size b -> Store.Size a
storeContramapSize f = \case
Store.VarSize g -> Store.VarSize (g . f)
Store.ConstSize x -> Store.ConstSize x
{-# INLINABLE storeContramapSize #-}
| k0001/haskell-money | safe-money-store/src/Money/Store.hs | bsd-3-clause | 3,699 | 0 | 13 | 660 | 1,117 | 560 | 557 | 80 | 2 |
module Tyckiting.AI.Timid (timidAI) where
import Prelude ()
import Prelude.Compat
import Control.Monad.Random (uniform)
import Data.List (nub)
import Data.Monoid (First (..))
import Tyckiting.Action
import Tyckiting.AI
import Tyckiting.Event
import Tyckiting.Position
import Tyckiting.Types
-- Timid doesn't have any memory.
type TimidState = ()
-- And it doesn't do any precalc.
mkInitialState :: GameConfig -> IO TimidState
mkInitialState _ = return ()
-- Randomise where we could move.
moveBot :: Bot WithHpAndPos -> NDAIMonad TimidState Action
moveBot bot = do
gameConfig <- gsGameConfig <$> askGameState
MoveAction (botId bot) <$> uniform (possibleMoves gameConfig)
where
pos = botPosition bot
possibleMoves gameConfig
= nub
. map (clamp (cfgFieldRadius gameConfig))
. neighbours (cfgMove gameConfig)
$ pos
cannonAt :: Position -> Bot w -> NDAIMonad TimidState Action
cannonAt pos bot = return $ CannonAction (botId bot) pos
-- | Return the first `Just` value predicate returns.
lookupFirst :: Foldable f => (a -> Maybe b) -> f a -> Maybe b
lookupFirst p = getFirst . foldMap (First . p)
seeAnyone :: [Event] -> Maybe Position
seeAnyone = lookupFirst p
where
p (SeeEvent _ pos) = Just pos
p _ = Nothing
-- Actual move function
move :: NDAIMonad TimidState [Action]
move = do
-- Ask for info
ownBots <- teamBots . gsYourTeam <$> askGameState :: NDAIMonad TimidState [Bot WithHpAndPos]
es <- askEvents
-- Log
tellShow ownBots
tellShow es
-- Cannon if we see anyone
as <- case seeAnyone es of
Just pos -> traverse (cannonAt pos) ownBots
Nothing -> traverse moveBot ownBots
tellShow as
return as
-- | Timid doesn't do much. It just moves around.
timidAI :: AI
timidAI = mkNDAI move mkInitialState
| vvmann/tyckiting-bot | clients/haskell/src/Tyckiting/AI/Timid.hs | mit | 1,869 | 0 | 14 | 436 | 534 | 271 | 263 | 45 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{- |
Module : Data.ML.Model
Description : Model class.
Copyright : (c) Paweł Nowak
License : MIT
Maintainer : pawel834@gmail.com
Stability : experimental
-}
module Data.ML.Model where
import Data.Bytes.Serial
import Data.ML.Internal.Product
import Data.Vector
import Linear
-- | A machine learning model.
class (Functor (Input m), Functor (Output m)) => Model m where
type Input m :: * -> *
type Output m :: * -> *
predict :: Floating a => Input m a -> m a -> Output m a
-- | A data set is a vector of (input, output) pairs.
type DataSet m a = Vector (Input m a, Output m a)
-- | Generates a model with an applicative.
generate :: (Applicative f, Applicative g, Traversable g) => f a -> f (g a)
generate f = traverse (const f) (pure ())
-- | Chaining of models.
newtype (f :>> g) a = Chain (Product f g a)
deriving (Functor, Applicative, Foldable, Traversable, Additive, Metric)
pattern f :>> g = Chain (Pair f g)
instance (Serial1 f, Serial1 g) => Serial1 (f :>> g) where
serializeWith f (Chain m) = serializeWith f m
deserializeWith f = Chain <$> deserializeWith f
instance (Model f, Model g, Output f ~ Input g) => Model (f :>> g) where
type Input (f :>> g) = Input f
type Output (f :>> g) = Output g
predict x (f :>> g) = predict (predict x f) g
| bitemyapp/machine-learning | src/Data/ML/Model.hs | mit | 1,517 | 6 | 11 | 327 | 430 | 238 | 192 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Redshift.DescribeOrderableClusterOptions
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns a list of orderable cluster options. Before you create a new cluster
-- you can use this operation to find what options are available, such as the
-- EC2 Availability Zones (AZ) in the specific AWS region that you can specify,
-- and the node types you can request. The node types differ by available
-- storage, memory, CPU and price. With the cost involved you might want to
-- obtain a list of cluster options in the specific region and specify values
-- when creating a cluster. For more information about managing clusters, go to <http://docs.aws.amazon.com/redshift/latest/mgmt/working-with-clusters.html Amazon Redshift Clusters> in the /Amazon Redshift Cluster Management Guide/
--
-- <http://docs.aws.amazon.com/redshift/latest/APIReference/API_DescribeOrderableClusterOptions.html>
module Network.AWS.Redshift.DescribeOrderableClusterOptions
(
-- * Request
DescribeOrderableClusterOptions
-- ** Request constructor
, describeOrderableClusterOptions
-- ** Request lenses
, docoClusterVersion
, docoMarker
, docoMaxRecords
, docoNodeType
-- * Response
, DescribeOrderableClusterOptionsResponse
-- ** Response constructor
, describeOrderableClusterOptionsResponse
-- ** Response lenses
, docorMarker
, docorOrderableClusterOptions
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.Redshift.Types
import qualified GHC.Exts
data DescribeOrderableClusterOptions = DescribeOrderableClusterOptions
{ _docoClusterVersion :: Maybe Text
, _docoMarker :: Maybe Text
, _docoMaxRecords :: Maybe Int
, _docoNodeType :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeOrderableClusterOptions' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'docoClusterVersion' @::@ 'Maybe' 'Text'
--
-- * 'docoMarker' @::@ 'Maybe' 'Text'
--
-- * 'docoMaxRecords' @::@ 'Maybe' 'Int'
--
-- * 'docoNodeType' @::@ 'Maybe' 'Text'
--
describeOrderableClusterOptions :: DescribeOrderableClusterOptions
describeOrderableClusterOptions = DescribeOrderableClusterOptions
{ _docoClusterVersion = Nothing
, _docoNodeType = Nothing
, _docoMaxRecords = Nothing
, _docoMarker = Nothing
}
-- | The version filter value. Specify this parameter to show only the available
-- offerings matching the specified version.
--
-- Default: All versions.
--
-- Constraints: Must be one of the version returned from 'DescribeClusterVersions'
-- .
docoClusterVersion :: Lens' DescribeOrderableClusterOptions (Maybe Text)
docoClusterVersion =
lens _docoClusterVersion (\s a -> s { _docoClusterVersion = a })
-- | An optional parameter that specifies the starting point to return a set of
-- response records. When the results of a 'DescribeOrderableClusterOptions'
-- request exceed the value specified in 'MaxRecords', AWS returns a value in the 'Marker' field of the response. You can retrieve the next set of response records by
-- providing the returned marker value in the 'Marker' parameter and retrying the
-- request.
docoMarker :: Lens' DescribeOrderableClusterOptions (Maybe Text)
docoMarker = lens _docoMarker (\s a -> s { _docoMarker = a })
-- | The maximum number of response records to return in each call. If the number
-- of remaining response records exceeds the specified 'MaxRecords' value, a value
-- is returned in a 'marker' field of the response. You can retrieve the next set
-- of records by retrying the command with the returned marker value.
--
-- Default: '100'
--
-- Constraints: minimum 20, maximum 100.
docoMaxRecords :: Lens' DescribeOrderableClusterOptions (Maybe Int)
docoMaxRecords = lens _docoMaxRecords (\s a -> s { _docoMaxRecords = a })
-- | The node type filter value. Specify this parameter to show only the
-- available offerings matching the specified node type.
docoNodeType :: Lens' DescribeOrderableClusterOptions (Maybe Text)
docoNodeType = lens _docoNodeType (\s a -> s { _docoNodeType = a })
data DescribeOrderableClusterOptionsResponse = DescribeOrderableClusterOptionsResponse
{ _docorMarker :: Maybe Text
, _docorOrderableClusterOptions :: List "member" OrderableClusterOption
} deriving (Eq, Read, Show)
-- | 'DescribeOrderableClusterOptionsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'docorMarker' @::@ 'Maybe' 'Text'
--
-- * 'docorOrderableClusterOptions' @::@ ['OrderableClusterOption']
--
describeOrderableClusterOptionsResponse :: DescribeOrderableClusterOptionsResponse
describeOrderableClusterOptionsResponse = DescribeOrderableClusterOptionsResponse
{ _docorOrderableClusterOptions = mempty
, _docorMarker = Nothing
}
-- | A value that indicates the starting point for the next set of response
-- records in a subsequent request. If a value is returned in a response, you
-- can retrieve the next set of records by providing this returned marker value
-- in the 'Marker' parameter and retrying the command. If the 'Marker' field is
-- empty, all response records have been retrieved for the request.
docorMarker :: Lens' DescribeOrderableClusterOptionsResponse (Maybe Text)
docorMarker = lens _docorMarker (\s a -> s { _docorMarker = a })
-- | An 'OrderableClusterOption' structure containing information about orderable
-- options for the Cluster.
docorOrderableClusterOptions :: Lens' DescribeOrderableClusterOptionsResponse [OrderableClusterOption]
docorOrderableClusterOptions =
lens _docorOrderableClusterOptions
(\s a -> s { _docorOrderableClusterOptions = a })
. _List
instance ToPath DescribeOrderableClusterOptions where
toPath = const "/"
instance ToQuery DescribeOrderableClusterOptions where
toQuery DescribeOrderableClusterOptions{..} = mconcat
[ "ClusterVersion" =? _docoClusterVersion
, "Marker" =? _docoMarker
, "MaxRecords" =? _docoMaxRecords
, "NodeType" =? _docoNodeType
]
instance ToHeaders DescribeOrderableClusterOptions
instance AWSRequest DescribeOrderableClusterOptions where
type Sv DescribeOrderableClusterOptions = Redshift
type Rs DescribeOrderableClusterOptions = DescribeOrderableClusterOptionsResponse
request = post "DescribeOrderableClusterOptions"
response = xmlResponse
instance FromXML DescribeOrderableClusterOptionsResponse where
parseXML = withElement "DescribeOrderableClusterOptionsResult" $ \x -> DescribeOrderableClusterOptionsResponse
<$> x .@? "Marker"
<*> x .@? "OrderableClusterOptions" .!@ mempty
instance AWSPager DescribeOrderableClusterOptions where
page rq rs
| stop (rs ^. docorMarker) = Nothing
| otherwise = (\x -> rq & docoMarker ?~ x)
<$> (rs ^. docorMarker)
| romanb/amazonka | amazonka-redshift/gen/Network/AWS/Redshift/DescribeOrderableClusterOptions.hs | mpl-2.0 | 7,915 | 0 | 12 | 1,502 | 814 | 490 | 324 | 85 | 1 |
--
-- Copyright (c) 2014 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE FlexibleContexts #-}
module XenMgr.PowerManagement (
initPm
, isLidClosed
, isAcAdapter
, getBatteryLevel
, getVmsToDehibernate
, PMAction (..)
, PMSettings (..)
, executePmAction
, pmGetSettings
, pmSaveSettings
, pmSaveBatteryLidCloseAction
, pmSaveAcLidCloseAction
, pmActionToStr
, pmActionOfStr
, pmShutdownVms
, hostWhenIdle
, hostWhenIdleDoWithState
, pmGetScreenRestoreVm
, pmSetScreenRestoreVm
, pmClearScreenRestoreVm
) where
import Control.Monad
import Control.Monad.Error hiding (liftIO)
import Control.Applicative
import Control.Concurrent
import Control.Exception
import System.Process (readProcessWithExitCode)
import Data.String
import Data.Maybe
import Data.IORef
import Data.List (nub)
import qualified Data.Text.Lazy as TL
import Tools.Log
import Tools.XenStore
import Tools.File
import Tools.Misc
import Tools.Process
import Tools.IfM
import Tools.Future
import XenMgr.Errors
import XenMgr.Config
import XenMgr.Db
import XenMgr.Vm
import XenMgr.User
import XenMgr.Host
import XenMgr.XM
import Vm.Queries
import Vm.State
import XenMgr.Rpc
import Rpc.Autogen.SurfmanClient
import qualified XenMgr.Connect.Xenvm as Xenvm
import XenMgr.Connect.InputDaemon
data PMAction = ActionSleep
| ActionHibernate
| ActionShutdown
| ActionForcedShutdown
| ActionReboot
| ActionNothing
deriving (Eq, Show)
data PMSettings = PMSettings { pmLidCloseACAction :: PMAction
, pmLidCloseBatteryAction :: PMAction }
deriving (Eq, Show)
data BatteryLevel = BatNormal | BatWarning | BatLow | BatCritical
deriving (Eq, Show)
instance Marshall PMAction where
dbRead path = dbReadStr path >>= pure . pmActionOfStr
dbWrite path a = dbWriteStr path (pmActionToStr a)
instance Marshall PMSettings where
dbRead path = do
acLidAction <- dbReadWithDefault ActionNothing (path ++ "/" ++ "ac-lid-close-action")
batteryLidAction <- dbReadWithDefault ActionNothing (path ++ "/" ++ "battery-lid-close-action")
return $ PMSettings { pmLidCloseACAction = acLidAction
, pmLidCloseBatteryAction = batteryLidAction }
dbWrite path v = do dbWrite (path ++ "/" ++ "ac-lid-close-action") $ pmLidCloseACAction v
dbWrite (path ++ "/" ++ "battery-lid-close-action") $ pmLidCloseBatteryAction v
pmActionOfStr "sleep" = ActionSleep
pmActionOfStr "hibernate" = ActionHibernate
pmActionOfStr "shutdown" = ActionShutdown
pmActionOfStr "forced-shutdown" = ActionForcedShutdown
pmActionOfStr "reboot" = ActionReboot
pmActionOfStr "nothing" = ActionNothing
pmActionOfStr _ = error "incorrect pm action specification"
pmActionToStr ActionSleep = "sleep"
pmActionToStr ActionHibernate = "hibernate"
pmActionToStr ActionShutdown = "shutdown"
pmActionToStr ActionForcedShutdown = "forced-shutdown"
pmActionToStr ActionReboot = "reboot"
pmActionToStr ActionNothing = "nothing"
pmGetSettings :: Rpc PMSettings
pmGetSettings = dbReadWithDefault PMSettings { pmLidCloseACAction = ActionNothing
, pmLidCloseBatteryAction = ActionNothing }
"/power-management"
pmSaveSettings :: PMSettings -> Rpc ()
pmSaveSettings s = dbWrite "/power-management" s
pmSaveBatteryLidCloseAction :: PMAction -> Rpc ()
pmSaveBatteryLidCloseAction = dbWrite "/power-management/battery-lid-close-action"
pmSaveAcLidCloseAction :: PMAction -> Rpc ()
pmSaveAcLidCloseAction = dbWrite "/power-management/ac-lid-close-action"
hostStateOfPmAction :: PMAction -> HostState
hostStateOfPmAction ActionSleep = HostGoingToSleep
hostStateOfPmAction ActionHibernate = HostGoingToHibernate
hostStateOfPmAction ActionReboot = HostRebooting
hostStateOfPmAction ActionShutdown = HostShuttingDown
hostStateOfPmAction ActionForcedShutdown = HostShuttingDown
hostStateOfPmAction ActionNothing = HostIdle
-- Do an operation if the host is idle
hostWhenIdle :: (MonadRpc e m) => m () -> m ()
hostWhenIdle action = getHostState >>= maybeExec
where maybeExec HostIdle = action
maybeExec _ = return ()
-- Do an operation which is allowed only if the host is idle (resetting it to idle afterwards)
hostWhenIdleDoWithState :: (MonadRpc e m) => HostState -> m () -> m ()
hostWhenIdleDoWithState newState action =
hostWhenIdle $
(do setHostState newState
action
setHostState HostIdle
) `catchError` (\e -> setHostState HostIdle >> throwError e)
isLidClosed :: IO Bool
isLidClosed = xsRead "/pm/lid_state" >>= pure . f where f (Just "0") = True
f _ = False
isAcAdapter :: IO Bool
isAcAdapter = xsRead "/pm/ac_adapter" >>= pure . f where f (Just "1") = True
f _ = False
getBatteryLevel :: IO BatteryLevel
getBatteryLevel =
xsRead "/pm/currentbatterylevel" >>= pure . f
where
f (Just "0") = BatNormal
f (Just "1") = BatWarning
f (Just "2") = BatLow
f (Just "3") = BatCritical
f _ = BatNormal
isAsleep :: Uuid -> Rpc Bool
isAsleep uuid = (== 3) <$> getVmAcpiState uuid
getCurrentPmAction :: Rpc (Maybe PMAction)
getCurrentPmAction =
liftIO $ xsRead "/xenmgr/pm-current-action" >>= return . f
where
f (Just "sleep") = Just ActionSleep
f (Just "hibernate") = Just ActionHibernate
f (Just "shutdown") = Just ActionShutdown
f (Just "forced-shutdown") = Just ActionForcedShutdown
f (Just "reboot") = Just ActionReboot
f _ = Nothing
setCurrentPmAction :: PMAction -> Rpc ()
setCurrentPmAction action =
liftIO $ xsWrite "/xenmgr/pm-current-action" (str action)
where
str ActionReboot = "reboot"
str ActionSleep = "sleep"
str ActionHibernate = "hibernate"
str ActionShutdown = "shutdown"
str ActionForcedShutdown = "forced-shutdown"
str ActionNothing = error "bad pm action"
clearCurrentPmAction :: Rpc ()
clearCurrentPmAction =
liftIO $ xsRm "/xenmgr/pm-current-action"
hdxRunning :: Rpc Bool
hdxRunning = (/= []) <$> getRunningHDX
pmGetScreenRestoreVm :: Rpc (Maybe Uuid)
pmGetScreenRestoreVm = dbMaybeRead "/xenmgr/pm-visible-vm"
pmClearScreenRestoreVm :: Rpc ()
pmClearScreenRestoreVm = dbRm "/xenmgr/pm-visible-vm"
-- doesn't store if any HDX vm running
pmSetScreenRestoreVm :: Uuid -> Rpc ()
pmSetScreenRestoreVm vm
= --whenM (not <$> hdxRunning) $
dbWrite "/xenmgr/pm-visible-vm" vm
storeVisibleVm :: Rpc ()
storeVisibleVm = from =<< getVisibleVms where
from (vm:_) = pmSetScreenRestoreVm vm
from _ = return ()
restoreVisibleVm :: Rpc Bool
restoreVisibleVm = from =<< pmGetScreenRestoreVm where
from (Just vm) = pmClearScreenRestoreVm >> switchVm vm
from _ = return False
pmShutdownVms :: Bool -> Rpc ()
pmShutdownVms force = do
vms <- getVmShutdownOrder
parallelVmExecInStages vms maybeShutdown
return ()
where
maybeShutdown uuid | force = whenM (isRunning uuid) $ forceShutdownVm uuid
| otherwise = do
acpi <- getVmAcpiState uuid
when (acpi == 3) $
do resumed <- resumeFromSleep uuid
when (not resumed) $ failResumeFromSleep
running <- isRunning uuid
when running $
do t <- getVmGraphics uuid
when (t == HDX) (switchVm uuid >> return())
-- ensure the VM is considered dead internally (i.e. shutdown even handlers have ran)
-- by waiting for internal Shutdown state upto 3 secs
(shutdownVm uuid >> waitForVmInternalState uuid Shutdown 3)
`catchError` shutdownError
--FIXME! : should really translate xenvm errors into something better than strings
shutdownError err
| show err == "103:VM didn't shutdown as expected." = return ()
| otherwise = throwError err
-- common code for shutdown/reboot
shutdownCommon :: String -> Bool -> XM ()
shutdownCommon offCommand force = do
liftRpc $ pmShutdownVms force
liftIO $ do
info "PM: halting host now"
spawnShell offCommand
-- wait for actuall poweroff
threadDelay $ 360 * (10^6)
splitHDX :: [Uuid] -> Rpc ([Uuid], [Uuid])
splitHDX uuids = go [] [] uuids where
go no_hdx hdx [] = return (no_hdx,hdx)
go no_hdx hdx (uuid:uuids) =
do g <- getVmGraphics uuid
case g of
HDX -> go no_hdx ( hdx ++ [uuid] ) uuids
_ -> go ( no_hdx ++ [uuid] ) hdx uuids
onScreenHDX :: (MonadRpc e m) => Uuid -> m a -> m a
onScreenHDX uuid f
= go =<< getVmGraphics uuid where
go HDX = do
success <- reallySwitchVm uuid 10
when (not success) $ warn "FAILED to switch to HDX vm before putting it into S3"
f
go _ = f
-- returns True if vm was put to sleep here, False if that was not necessary (because
-- it already was in S3 for example)
putS3 :: Uuid -> XM Bool
putS3 uuid = putS3' uuid =<< liftRpc (getVmS3Mode uuid)
putS3' uuid S3Ignore = return True
putS3' uuid S3Pv = liftRpc $ do
acpi <- getVmAcpiState uuid
if (acpi /= 3)
then onScreenHDX uuid $ do
info ("PM: putting " ++ show uuid ++ " to sleep") >> sleepVm uuid
return True
else return False
putS3' uuid S3Restart = liftRpc $ do
acpi <- getVmAcpiState uuid
if (acpi /= 3)
then onScreenHDX uuid $ do
info ("PM: shutting " ++ show uuid ++ " down") >> shutdownVm uuid
return True
else return False
putS3' uuid m = error ("s3 mode " ++ show m ++ " unimplemented")
resumeS3 :: Uuid -> XM ()
resumeS3 uuid = resumeS3' uuid =<< liftRpc (getVmS3Mode uuid)
resumeS3' uuid S3Ignore = return ()
resumeS3' uuid S3Pv = do
void . liftRpc $ Xenvm.resumeFromSleep uuid
info $ "PM: Successfully resumed " ++ show uuid ++ " from S3"
resumeS3' uuid S3Restart = do
startVm uuid
info $ "PM: Restarted " ++ show uuid ++ " after S3"
resumeS3' uuid S3Snapshot =
return () -- unimplemented
putS4 :: Uuid -> XM ()
putS4 uuid = putS4' uuid =<< liftRpc (getVmS4Mode uuid)
putS4' uuid S4Ignore = return ()
putS4' uuid S4Pv = liftRpc $ do
addons <- getVmPvAddons uuid
-- FIXME: this definately needs cleaning up with respect to which actions require preemptive pv-addons db check,
-- which require preemptive pv-addons check via hypercall (or xs lookup),
-- which are fine to fail post-factum
when (not addons) $ do
warn $ "PM: VM " ++ show uuid ++ " has no PV addons!"
failActionRequiresPvAddons
info $ "PM: attempt to hibernate " ++ show uuid
hibernateVm uuid
-- ^ above sets the 'hibernated' db flag for us
putS4' uuid S4Restart = liftRpc $ do
info ("PM: shutting " ++ show uuid ++ " down")
shutdownVm uuid
-- manually toggle S4 flag to induce proper restart action on next host boot
saveConfigProperty uuid vmHibernated True
putS4' uuid m = error ("s4 mode " ++ show m ++ " unimplemented")
-- We allow only one PM action to be run at the time
executePmAction :: PMAction -> XM ()
executePmAction action = executePmActionInternal action True -- supervised by default
executePmActionInternal :: PMAction -> Bool -> XM ()
executePmActionInternal action supervised =
when (action /= ActionNothing) $ do
info $ "PM: received pm action request: " ++ show action
current <- liftRpc getCurrentPmAction
case current of
Just c -> info $ "PM: but pm action " ++ show c ++ " is currently running, so cannot do"
Nothing -> (do liftRpc $ setCurrentPmAction action
execute_ action supervised
liftRpc $ clearCurrentPmAction)
`catchError` \err -> do
liftRpc $ clearCurrentPmAction
throwError err
execute_ ActionReboot supervised = do
info "PM: received host reboot request"
shutdownCommon "reboot" False
execute_ ActionShutdown supervised = do
info "PM: received host shutdown request"
shutdownCommon "poweroff" False
execute_ ActionForcedShutdown supervised = do
info "PM: received host force shutdown request"
setHostState HostShuttingDown
shutdownCommon "poweroff" True
execute_ ActionSleep supervised = do
info "PM: received host sleep request"
queue <- filter (\(uuid,slept) -> slept == True) <$> putVmsToSleep
liftRpc (whenM inputAuthOnBoot $ info "PM: locking screen" >> inputLock)
info "PM: expiring user sessions"
liftRpc expireSessions
-- info "PM: logging out of synchroniser"
-- liftRpc $ comCitrixXenclientBedUserLogout "com.citrix.xenclient.bed" "/"
info "PM: executing surfman pre-s3"
liftRpc $ comCitrixXenclientSurfmanPreS3 "com.citrix.xenclient.surfman" "/"
info "PM: executing s3 suspend script"
liftIO $ spawnShell "/usr/share/xenclient/enter-s3.sh"
info "PM: resumed host from s3"
info "PM: executing surfman post-s3"
liftRpc $ comCitrixXenclientSurfmanPostS3 "com.citrix.xenclient.surfman" "/"
hdx <- liftRpc getRunningHDX
-- FIXME: we probably should have separate 'host-resuming-from-sleep-state' here
liftRpc $ setHostState HostIdle
-- Resume all vms
resumeQueue (map fst queue)
when (hdx == []) $ do
restored <- liftRpc $ restoreVisibleVm
when (not restored) $ void $ liftRpc $ switchGraphicsFallback
liftRpc $ pmClearScreenRestoreVm
where
-- stage S3 so non hdx vm go to sleep first in parallel, then all hdx vms
putVmsToSleep = xmContext >>= \xm -> liftRpc $ do
guests <- getGuestVms
-- FIXME: remove this by making all vms (not just user vms) go thru this pipeline
let needsS3Restart uuid = (`elem` [ S3Restart ]) <$> getVmS3Mode uuid
more <- getVmsBy needsS3Restart
(no_hdx, hdx) <- splitHDX guests
parallelVmExecInStages [ no_hdx, hdx, more ] (sleep xm)
sleep xm uuid = go =<< isRunning uuid where
go True = runXM xm $ putS3 uuid
go _ = return False
resumeQueue = mapM resumeS3
expireSessions =
do users <- enumActiveUsers
mapM expire users
where
expire u = info ("PM: terminating user session " ++ show u) >> expireUserSession u
execute_ ActionHibernate supervised = do
info "PM: received host hibernate request"
liftRpc $ maybeSwitch =<< getRunningHDX
-- execute hibernate request in parallel for all VMS (but pvm always last)
-- if it bugs, raise nice error message
parallelHib `catchError` \err -> failHibernateFailed
-- now hibernated vms are already shut down, run common shutdown code to get rid of service vms
liftRpc $ dbWrite "/platform/hibernated" "true"
shutdownCommon "poweroff" False
where
maybeSwitch [] = return ()
maybeSwitch (uuid:_) = switchVm uuid >> return ()
parallelHib = xmContext >>= \xm -> liftRpc $
do guests <- getGuestVms
(no_hdx,hdx) <- splitHDX guests
parallelVmExecInStages [no_hdx,hdx] (attempt xm)
attempt xm uuid = go =<< isRunning uuid where
go True = (runXM xm $ putS4 uuid) `catchError` on_vm_hib_error uuid
go _ = return ()
-- what to do on VM hibernate error:
-- If we are supervised by the user, propagate exception during hibernate.
-- otherwise, swallow it, shutdown VM and continue with hibernate sequence
on_vm_hib_error uuid err | supervised == True = do warn $ "PM: problem when trying to hibernate " ++ show uuid ++ ", aborting sequence"
throwError err
| otherwise = do warn $ "PM: problem when trying to hibernate, shutting it down instead: " ++ show uuid ++ ": " ++ show err
shutdownVm uuid
execute_ ActionNothing supervised = return ()
isTXTLaunch :: IO Bool
isTXTLaunch =
do (_, out, _) <- readProcessWithExitCode "txt-stat" [] ""
let out' = TL.pack out
case (TL.find flag'a out', TL.find flag'b out') of
((_, ma), (_, mb)) | not (null ma) && not (null mb)
-> return True
_ -> return False
where
flag'a = TL.pack $ "TXT measured launch: TRUE"
flag'b = TL.pack $ "secrets flag set: TRUE"
assertNotTXTLaunch :: Rpc ()
assertNotTXTLaunch = from =<< liftIO isTXTLaunch where
from False = return ()
from _ = failCannotSleepInTXTMeasuredLaunch
-- Resume all vms from hibernation
getVmsToDehibernate :: Rpc [Uuid]
getVmsToDehibernate = do
-- only do this if autostart enabled
autostart <- appAutoStart
if not autostart
then return []
else getGuestVms >>=
filterM getVmHibernated >>=
filterM (\uuid -> not <$> isRunning uuid)
type EventHandler = XM ()
type Event = (String,String)
handleLidStateChanged closed = do
debug $ "PM: detected lid state change event, closed=" ++ (show closed)
when closed $ do
a <- action
hostWhenIdleDoWithState (hostStateOfPmAction a) $ do
when (a `elem` [ActionHibernate, ActionSleep]) $
liftRpc $ storeVisibleVm
executePmActionInternal a False
where
action = do
settings <- liftRpc pmGetSettings
ac <- liftIO isAcAdapter
return $ case ac of
True -> pmLidCloseACAction settings
False -> pmLidCloseBatteryAction settings
handlePowerButtonPressed = do
debug "PM: detected power button press event"
hostWhenIdleDoWithState HostShuttingDown $ executePmActionInternal ActionShutdown True
handleSleepButtonPressed = do
debug "PM: detected sleep button press event"
hostWhenIdleDoWithState HostGoingToSleep $ do
liftRpc storeVisibleVm
executePmActionInternal ActionSleep True
handleBatteryLevelChanged = do
level <- liftIO $ getBatteryLevel
debug $ "PM: detected battery level change event = " ++ (show level)
when (level == BatCritical) $
hostWhenIdleDoWithState HostGoingToHibernate $ do
liftRpc storeVisibleVm
executePmActionInternal ActionHibernate False
whenEvent :: Event -> EventHandler -> XM ()
whenEvent (intf,signal) h =
let rule = matchSignal intf signal
in
xmContext >>= \c -> liftRpc $ rpcOnSignal rule (process c)
where
process c _ signal = runXM c (void $ future h)
lidStateChanged = ("com.citrix.xenclient.input", "lid_state_changed")
powerButtonPressed = ("com.citrix.xenclient.xcpmd", "power_button_pressed")
sleepButtonPressed = ("com.citrix.xenclient.xcpmd", "sleep_button_pressed")
batteryLevelNotification = ("com.citrix.xenclient.xcpmd", "battery_level_notification")
initPm :: XM ()
initPm = do
whenEvent lidStateChanged $ do
closed <- liftIO isLidClosed
handleLidStateChanged closed
whenEvent powerButtonPressed (handlePowerButtonPressed)
whenEvent sleepButtonPressed (handleSleepButtonPressed)
whenEvent batteryLevelNotification (handleBatteryLevelChanged)
info "installed PM event handlers"
| jean-edouard/manager | xenmgr/XenMgr/PowerManagement.hs | gpl-2.0 | 20,373 | 0 | 18 | 5,303 | 4,705 | 2,318 | 2,387 | 409 | 6 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_HADDOCK hide #-}
-- #hide
-----------------------------------------------------------------------------
-- |
-- Module : Data.Sequence
-- Copyright : (c) Ross Paterson 2005
-- License : BSD-style
-- Maintainer : ross@soi.city.ac.uk
-- Stability : experimental
-- Portability : portable
--
-- General purpose finite sequences.
-- Apart from being finite and having strict operations, sequences
-- also differ from lists in supporting a wider variety of operations
-- efficiently.
--
-- An amortized running time is given for each operation, with /n/ referring
-- to the length of the sequence and /i/ being the integral index used by
-- some operations. These bounds hold even in a persistent (shared) setting.
--
-- The implementation uses 2-3 finger trees annotated with sizes,
-- as described in section 4.2 of
--
-- * Ralf Hinze and Ross Paterson,
-- \"Finger trees: a simple general-purpose data structure\",
-- submitted to /Journal of Functional Programming/.
-- <http://www.soi.city.ac.uk/~ross/papers/FingerTree.html>
--
-----------------------------------------------------------------------------
module Graphics.UI.Gtk.ModelView.Sequence (
Seq,
-- * Construction
empty, -- :: Seq a
singleton, -- :: a -> Seq a
(<|), -- :: a -> Seq a -> Seq a
(|>), -- :: Seq a -> a -> Seq a
(><), -- :: Seq a -> Seq a -> Seq a
-- * Deconstruction
null, -- :: Seq a -> Bool
-- ** Views
ViewL(..),
viewl, -- :: Seq a -> ViewL a
ViewR(..),
viewr, -- :: Seq a -> ViewR a
-- ** Indexing
length, -- :: Seq a -> Int
index, -- :: Seq a -> Int -> a
adjust, -- :: (a -> a) -> Int -> Seq a -> Seq a
update, -- :: Int -> a -> Seq a -> Seq a
take, -- :: Int -> Seq a -> Seq a
drop, -- :: Int -> Seq a -> Seq a
splitAt, -- :: Int -> Seq a -> (Seq a, Seq a)
-- * Lists
fromList, -- :: [a] -> Seq a
toList, -- :: Seq a -> [a]
-- * Folds
-- ** Right associative
foldr, -- :: (a -> b -> b) -> b -> Seq a -> b
foldr1, -- :: (a -> a -> a) -> Seq a -> a
foldr', -- :: (a -> b -> b) -> b -> Seq a -> b
foldrM, -- :: Monad m => (a -> b -> m b) -> b -> Seq a -> m b
-- ** Left associative
foldl, -- :: (a -> b -> a) -> a -> Seq b -> a
foldl1, -- :: (a -> a -> a) -> Seq a -> a
foldl', -- :: (a -> b -> a) -> a -> Seq b -> a
foldlM, -- :: Monad m => (a -> b -> m a) -> a -> Seq b -> m a
-- * Transformations
reverse, -- :: Seq a -> Seq a
#if TESTING
valid,
#endif
) where
import Prelude hiding (
null, length, take, drop, splitAt, foldl, foldl1, foldr, foldr1,
reverse)
import qualified Prelude (foldr)
import Data.List (intersperse)
import qualified Data.List (foldl')
#if TESTING
import Control.Monad (liftM, liftM2, liftM3, liftM4)
import Test.QuickCheck
#endif
infixr 5 `consTree`
infixl 5 `snocTree`
infixr 5 ><
infixr 5 <|, :<
infixl 5 |>, :>
class Sized a where
size :: a -> Int
------------------------------------------------------------------------
-- Random access sequences
------------------------------------------------------------------------
-- | General-purpose finite sequences.
newtype Seq a = Seq (FingerTree (Elem a))
instance Functor Seq where
fmap f (Seq xs) = Seq (fmap (fmap f) xs)
instance Eq a => Eq (Seq a) where
xs == ys = length xs == length ys && toList xs == toList ys
instance Ord a => Ord (Seq a) where
compare xs ys = compare (toList xs) (toList ys)
#if TESTING
instance (Show a) => Show (Seq a) where
showsPrec p (Seq x) = showsPrec p x
#else
instance Show a => Show (Seq a) where
showsPrec _ xs = showChar '<' .
flip (Prelude.foldr ($)) (intersperse (showChar ',')
(map shows (toList xs))) .
showChar '>'
#endif
-- Finger trees
data FingerTree a
= Empty
| Single a
| Deep {-# UNPACK #-} !Int !(Digit a) (FingerTree (Node a)) !(Digit a)
#if TESTING
deriving Show
#endif
instance Sized a => Sized (FingerTree a) where
size Empty = 0
size (Single x) = size x
size (Deep v _ _ _) = v
instance Functor FingerTree where
fmap _ Empty = Empty
fmap f (Single x) = Single (f x)
fmap f (Deep v pr m sf) =
Deep v (fmap f pr) (fmap (fmap f) m) (fmap f sf)
{-# INLINE deep #-}
deep :: Sized a => Digit a -> FingerTree (Node a) -> Digit a -> FingerTree a
deep pr m sf = Deep (size pr + size m + size sf) pr m sf
-- Digits
data Digit a
= One a
| Two a a
| Three a a a
| Four a a a a
#if TESTING
deriving Show
#endif
instance Functor Digit where
fmap f (One a) = One (f a)
fmap f (Two a b) = Two (f a) (f b)
fmap f (Three a b c) = Three (f a) (f b) (f c)
fmap f (Four a b c d) = Four (f a) (f b) (f c) (f d)
instance Sized a => Sized (Digit a) where
size xs = foldlDigit (\ i x -> i + size x) 0 xs
{-# SPECIALIZE digitToTree :: Digit (Elem a) -> FingerTree (Elem a) #-}
{-# SPECIALIZE digitToTree :: Digit (Node a) -> FingerTree (Node a) #-}
digitToTree :: Sized a => Digit a -> FingerTree a
digitToTree (One a) = Single a
digitToTree (Two a b) = deep (One a) Empty (One b)
digitToTree (Three a b c) = deep (Two a b) Empty (One c)
digitToTree (Four a b c d) = deep (Two a b) Empty (Two c d)
-- Nodes
data Node a
= Node2 {-# UNPACK #-} !Int a a
| Node3 {-# UNPACK #-} !Int a a a
#if TESTING
deriving Show
#endif
instance Functor (Node) where
fmap f (Node2 v a b) = Node2 v (f a) (f b)
fmap f (Node3 v a b c) = Node3 v (f a) (f b) (f c)
instance Sized (Node a) where
size (Node2 v _ _) = v
size (Node3 v _ _ _) = v
{-# INLINE node2 #-}
node2 :: Sized a => a -> a -> Node a
node2 a b = Node2 (size a + size b) a b
{-# INLINE node3 #-}
node3 :: Sized a => a -> a -> a -> Node a
node3 a b c = Node3 (size a + size b + size c) a b c
nodeToDigit :: Node a -> Digit a
nodeToDigit (Node2 _ a b) = Two a b
nodeToDigit (Node3 _ a b c) = Three a b c
-- Elements
newtype Elem a = Elem { getElem :: a }
instance Sized (Elem a) where
size _ = 1
instance Functor Elem where
fmap f (Elem x) = Elem (f x)
#ifdef TESTING
instance (Show a) => Show (Elem a) where
showsPrec p (Elem x) = showsPrec p x
#endif
------------------------------------------------------------------------
-- Construction
------------------------------------------------------------------------
-- | /O(1)/. The empty sequence.
empty :: Seq a
empty = Seq Empty
-- | /O(1)/. A singleton sequence.
singleton :: a -> Seq a
singleton x = Seq (Single (Elem x))
-- | /O(1)/. Add an element to the left end of a sequence.
-- Mnemonic: a triangle with the single element at the pointy end.
(<|) :: a -> Seq a -> Seq a
x <| Seq xs = Seq (Elem x `consTree` xs)
{-# SPECIALIZE consTree :: Elem a -> FingerTree (Elem a) -> FingerTree (Elem a) #-}
{-# SPECIALIZE consTree :: Node a -> FingerTree (Node a) -> FingerTree (Node a) #-}
consTree :: Sized a => a -> FingerTree a -> FingerTree a
consTree a Empty = Single a
consTree a (Single b) = deep (One a) Empty (One b)
consTree a (Deep s (Four b c d e) m sf) = m `seq`
Deep (size a + s) (Two a b) (node3 c d e `consTree` m) sf
consTree a (Deep s (Three b c d) m sf) =
Deep (size a + s) (Four a b c d) m sf
consTree a (Deep s (Two b c) m sf) =
Deep (size a + s) (Three a b c) m sf
consTree a (Deep s (One b) m sf) =
Deep (size a + s) (Two a b) m sf
-- | /O(1)/. Add an element to the right end of a sequence.
-- Mnemonic: a triangle with the single element at the pointy end.
(|>) :: Seq a -> a -> Seq a
Seq xs |> x = Seq (xs `snocTree` Elem x)
{-# SPECIALIZE snocTree :: FingerTree (Elem a) -> Elem a -> FingerTree (Elem a) #-}
{-# SPECIALIZE snocTree :: FingerTree (Node a) -> Node a -> FingerTree (Node a) #-}
snocTree :: Sized a => FingerTree a -> a -> FingerTree a
snocTree Empty a = Single a
snocTree (Single a) b = deep (One a) Empty (One b)
snocTree (Deep s pr m (Four a b c d)) e = m `seq`
Deep (s + size e) pr (m `snocTree` node3 a b c) (Two d e)
snocTree (Deep s pr m (Three a b c)) d =
Deep (s + size d) pr m (Four a b c d)
snocTree (Deep s pr m (Two a b)) c =
Deep (s + size c) pr m (Three a b c)
snocTree (Deep s pr m (One a)) b =
Deep (s + size b) pr m (Two a b)
-- | /O(log(min(n1,n2)))/. Concatenate two sequences.
(><) :: Seq a -> Seq a -> Seq a
Seq xs >< Seq ys = Seq (appendTree0 xs ys)
-- The appendTree/addDigits gunk below is machine generated
appendTree0 :: FingerTree (Elem a) -> FingerTree (Elem a) -> FingerTree (Elem a)
appendTree0 Empty xs =
xs
appendTree0 xs Empty =
xs
appendTree0 (Single x) xs =
x `consTree` xs
appendTree0 xs (Single x) =
xs `snocTree` x
appendTree0 (Deep s1 pr1 m1 sf1) (Deep s2 pr2 m2 sf2) =
Deep (s1 + s2) pr1 (addDigits0 m1 sf1 pr2 m2) sf2
addDigits0 :: FingerTree (Node (Elem a)) -> Digit (Elem a) -> Digit (Elem a) -> FingerTree (Node (Elem a)) -> FingerTree (Node (Elem a))
addDigits0 m1 (One a) (One b) m2 =
appendTree1 m1 (node2 a b) m2
addDigits0 m1 (One a) (Two b c) m2 =
appendTree1 m1 (node3 a b c) m2
addDigits0 m1 (One a) (Three b c d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits0 m1 (One a) (Four b c d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits0 m1 (Two a b) (One c) m2 =
appendTree1 m1 (node3 a b c) m2
addDigits0 m1 (Two a b) (Two c d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits0 m1 (Two a b) (Three c d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits0 m1 (Two a b) (Four c d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits0 m1 (Three a b c) (One d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits0 m1 (Three a b c) (Two d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits0 m1 (Three a b c) (Three d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits0 m1 (Three a b c) (Four d e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits0 m1 (Four a b c d) (One e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits0 m1 (Four a b c d) (Two e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits0 m1 (Four a b c d) (Three e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits0 m1 (Four a b c d) (Four e f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
appendTree1 :: FingerTree (Node a) -> Node a -> FingerTree (Node a) -> FingerTree (Node a)
appendTree1 Empty a xs =
a `consTree` xs
appendTree1 xs a Empty =
xs `snocTree` a
appendTree1 (Single x) a xs =
x `consTree` a `consTree` xs
appendTree1 xs a (Single x) =
xs `snocTree` a `snocTree` x
appendTree1 (Deep s1 pr1 m1 sf1) a (Deep s2 pr2 m2 sf2) =
Deep (s1 + size a + s2) pr1 (addDigits1 m1 sf1 a pr2 m2) sf2
addDigits1 :: FingerTree (Node (Node a)) -> Digit (Node a) -> Node a -> Digit (Node a) -> FingerTree (Node (Node a)) -> FingerTree (Node (Node a))
addDigits1 m1 (One a) b (One c) m2 =
appendTree1 m1 (node3 a b c) m2
addDigits1 m1 (One a) b (Two c d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits1 m1 (One a) b (Three c d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits1 m1 (One a) b (Four c d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits1 m1 (Two a b) c (One d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits1 m1 (Two a b) c (Two d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits1 m1 (Two a b) c (Three d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits1 m1 (Two a b) c (Four d e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits1 m1 (Three a b c) d (One e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits1 m1 (Three a b c) d (Two e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits1 m1 (Three a b c) d (Three e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits1 m1 (Three a b c) d (Four e f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits1 m1 (Four a b c d) e (One f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits1 m1 (Four a b c d) e (Two f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits1 m1 (Four a b c d) e (Three f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits1 m1 (Four a b c d) e (Four f g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
appendTree2 :: FingerTree (Node a) -> Node a -> Node a -> FingerTree (Node a) -> FingerTree (Node a)
appendTree2 Empty a b xs =
a `consTree` b `consTree` xs
appendTree2 xs a b Empty =
xs `snocTree` a `snocTree` b
appendTree2 (Single x) a b xs =
x `consTree` a `consTree` b `consTree` xs
appendTree2 xs a b (Single x) =
xs `snocTree` a `snocTree` b `snocTree` x
appendTree2 (Deep s1 pr1 m1 sf1) a b (Deep s2 pr2 m2 sf2) =
Deep (s1 + size a + size b + s2) pr1 (addDigits2 m1 sf1 a b pr2 m2) sf2
addDigits2 :: FingerTree (Node (Node a)) -> Digit (Node a) -> Node a -> Node a -> Digit (Node a) -> FingerTree (Node (Node a)) -> FingerTree (Node (Node a))
addDigits2 m1 (One a) b c (One d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits2 m1 (One a) b c (Two d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits2 m1 (One a) b c (Three d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits2 m1 (One a) b c (Four d e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits2 m1 (Two a b) c d (One e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits2 m1 (Two a b) c d (Two e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits2 m1 (Two a b) c d (Three e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits2 m1 (Two a b) c d (Four e f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits2 m1 (Three a b c) d e (One f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits2 m1 (Three a b c) d e (Two f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits2 m1 (Three a b c) d e (Three f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits2 m1 (Three a b c) d e (Four f g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits2 m1 (Four a b c d) e f (One g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits2 m1 (Four a b c d) e f (Two g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits2 m1 (Four a b c d) e f (Three g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits2 m1 (Four a b c d) e f (Four g h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
appendTree3 :: FingerTree (Node a) -> Node a -> Node a -> Node a -> FingerTree (Node a) -> FingerTree (Node a)
appendTree3 Empty a b c xs =
a `consTree` b `consTree` c `consTree` xs
appendTree3 xs a b c Empty =
xs `snocTree` a `snocTree` b `snocTree` c
appendTree3 (Single x) a b c xs =
x `consTree` a `consTree` b `consTree` c `consTree` xs
appendTree3 xs a b c (Single x) =
xs `snocTree` a `snocTree` b `snocTree` c `snocTree` x
appendTree3 (Deep s1 pr1 m1 sf1) a b c (Deep s2 pr2 m2 sf2) =
Deep (s1 + size a + size b + size c + s2) pr1 (addDigits3 m1 sf1 a b c pr2 m2) sf2
addDigits3 :: FingerTree (Node (Node a)) -> Digit (Node a) -> Node a -> Node a -> Node a -> Digit (Node a) -> FingerTree (Node (Node a)) -> FingerTree (Node (Node a))
addDigits3 m1 (One a) b c d (One e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits3 m1 (One a) b c d (Two e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits3 m1 (One a) b c d (Three e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits3 m1 (One a) b c d (Four e f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits3 m1 (Two a b) c d e (One f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits3 m1 (Two a b) c d e (Two f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits3 m1 (Two a b) c d e (Three f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits3 m1 (Two a b) c d e (Four f g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits3 m1 (Three a b c) d e f (One g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits3 m1 (Three a b c) d e f (Two g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits3 m1 (Three a b c) d e f (Three g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits3 m1 (Three a b c) d e f (Four g h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits3 m1 (Four a b c d) e f g (One h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits3 m1 (Four a b c d) e f g (Two h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits3 m1 (Four a b c d) e f g (Three h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits3 m1 (Four a b c d) e f g (Four h i j k) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node3 g h i) (node2 j k) m2
appendTree4 :: FingerTree (Node a) -> Node a -> Node a -> Node a -> Node a -> FingerTree (Node a) -> FingerTree (Node a)
appendTree4 Empty a b c d xs =
a `consTree` b `consTree` c `consTree` d `consTree` xs
appendTree4 xs a b c d Empty =
xs `snocTree` a `snocTree` b `snocTree` c `snocTree` d
appendTree4 (Single x) a b c d xs =
x `consTree` a `consTree` b `consTree` c `consTree` d `consTree` xs
appendTree4 xs a b c d (Single x) =
xs `snocTree` a `snocTree` b `snocTree` c `snocTree` d `snocTree` x
appendTree4 (Deep s1 pr1 m1 sf1) a b c d (Deep s2 pr2 m2 sf2) =
Deep (s1 + size a + size b + size c + size d + s2) pr1 (addDigits4 m1 sf1 a b c d pr2 m2) sf2
addDigits4 :: FingerTree (Node (Node a)) -> Digit (Node a) -> Node a -> Node a -> Node a -> Node a -> Digit (Node a) -> FingerTree (Node (Node a)) -> FingerTree (Node (Node a))
addDigits4 m1 (One a) b c d e (One f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits4 m1 (One a) b c d e (Two f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits4 m1 (One a) b c d e (Three f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits4 m1 (One a) b c d e (Four f g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits4 m1 (Two a b) c d e f (One g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits4 m1 (Two a b) c d e f (Two g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits4 m1 (Two a b) c d e f (Three g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits4 m1 (Two a b) c d e f (Four g h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits4 m1 (Three a b c) d e f g (One h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits4 m1 (Three a b c) d e f g (Two h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits4 m1 (Three a b c) d e f g (Three h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits4 m1 (Three a b c) d e f g (Four h i j k) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node3 g h i) (node2 j k) m2
addDigits4 m1 (Four a b c d) e f g h (One i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits4 m1 (Four a b c d) e f g h (Two i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits4 m1 (Four a b c d) e f g h (Three i j k) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node3 g h i) (node2 j k) m2
addDigits4 m1 (Four a b c d) e f g h (Four i j k l) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node3 g h i) (node3 j k l) m2
------------------------------------------------------------------------
-- Deconstruction
------------------------------------------------------------------------
-- | /O(1)/. Is this the empty sequence?
null :: Seq a -> Bool
null (Seq Empty) = True
null _ = False
-- | /O(1)/. The number of elements in the sequence.
length :: Seq a -> Int
length (Seq xs) = size xs
-- Views
data Maybe2 a b = Nothing2 | Just2 a b
-- | View of the left end of a sequence.
data ViewL a
= EmptyL -- ^ empty sequence
| a :< Seq a -- ^ leftmost element and the rest of the sequence
#ifndef __HADDOCK__
deriving (Eq, Show)
#else
instance Eq a => Eq (ViewL a)
instance Show a => Show (ViewL a)
#endif
instance Functor ViewL where
fmap _ EmptyL = EmptyL
fmap f (x :< xs) = f x :< fmap f xs
-- | /O(1)/. Analyse the left end of a sequence.
viewl :: Seq a -> ViewL a
viewl (Seq xs) = case viewLTree xs of
Nothing2 -> EmptyL
Just2 (Elem x) xs' -> x :< Seq xs'
{-# SPECIALIZE viewLTree :: FingerTree (Elem a) -> Maybe2 (Elem a) (FingerTree (Elem a)) #-}
{-# SPECIALIZE viewLTree :: FingerTree (Node a) -> Maybe2 (Node a) (FingerTree (Node a)) #-}
viewLTree :: Sized a => FingerTree a -> Maybe2 a (FingerTree a)
viewLTree Empty = Nothing2
viewLTree (Single a) = Just2 a Empty
viewLTree (Deep s (One a) m sf) = Just2 a (case viewLTree m of
Nothing2 -> digitToTree sf
Just2 b m' -> Deep (s - size a) (nodeToDigit b) m' sf)
viewLTree (Deep s (Two a b) m sf) =
Just2 a (Deep (s - size a) (One b) m sf)
viewLTree (Deep s (Three a b c) m sf) =
Just2 a (Deep (s - size a) (Two b c) m sf)
viewLTree (Deep s (Four a b c d) m sf) =
Just2 a (Deep (s - size a) (Three b c d) m sf)
-- | View of the right end of a sequence.
data ViewR a
= EmptyR -- ^ empty sequence
| Seq a :> a -- ^ the sequence minus the rightmost element,
-- and the rightmost element
#ifndef __HADDOCK__
deriving (Eq, Show)
#else
instance Eq a => Eq (ViewR a)
instance Show a => Show (ViewR a)
#endif
instance Functor ViewR where
fmap _ EmptyR = EmptyR
fmap f (xs :> x) = fmap f xs :> f x
-- | /O(1)/. Analyse the right end of a sequence.
viewr :: Seq a -> ViewR a
viewr (Seq xs) = case viewRTree xs of
Nothing2 -> EmptyR
Just2 xs' (Elem x) -> Seq xs' :> x
{-# SPECIALIZE viewRTree :: FingerTree (Elem a) -> Maybe2 (FingerTree (Elem a)) (Elem a) #-}
{-# SPECIALIZE viewRTree :: FingerTree (Node a) -> Maybe2 (FingerTree (Node a)) (Node a) #-}
viewRTree :: Sized a => FingerTree a -> Maybe2 (FingerTree a) a
viewRTree Empty = Nothing2
viewRTree (Single z) = Just2 Empty z
viewRTree (Deep s pr m (One z)) = Just2 (case viewRTree m of
Nothing2 -> digitToTree pr
Just2 m' y -> Deep (s - size z) pr m' (nodeToDigit y)) z
viewRTree (Deep s pr m (Two y z)) =
Just2 (Deep (s - size z) pr m (One y)) z
viewRTree (Deep s pr m (Three x y z)) =
Just2 (Deep (s - size z) pr m (Two x y)) z
viewRTree (Deep s pr m (Four w x y z)) =
Just2 (Deep (s - size z) pr m (Three w x y)) z
-- Indexing
-- | /O(log(min(i,n-i)))/. The element at the specified position
index :: Seq a -> Int -> a
index (Seq xs) i
| 0 <= i && i < size xs = case lookupTree (-i) xs of
Place _ (Elem x) -> x
| otherwise = error "index out of bounds"
data Place a = Place {-# UNPACK #-} !Int a
#if TESTING
deriving Show
#endif
{-# SPECIALIZE lookupTree :: Int -> FingerTree (Elem a) -> Place (Elem a) #-}
{-# SPECIALIZE lookupTree :: Int -> FingerTree (Node a) -> Place (Node a) #-}
lookupTree :: Sized a => Int -> FingerTree a -> Place a
lookupTree i (Single x) = Place i x
lookupTree i (Deep _ pr m sf)
| vpr > 0 = lookupDigit i pr
| vm > 0 = case lookupTree vpr m of
Place i' xs -> lookupNode i' xs
| otherwise = lookupDigit vm sf
where vpr = i + size pr
vm = vpr + size m
{-# SPECIALIZE lookupNode :: Int -> Node (Elem a) -> Place (Elem a) #-}
{-# SPECIALIZE lookupNode :: Int -> Node (Node a) -> Place (Node a) #-}
lookupNode :: Sized a => Int -> Node a -> Place a
lookupNode i (Node2 _ a b)
| va > 0 = Place i a
| otherwise = Place va b
where va = i + size a
lookupNode i (Node3 _ a b c)
| va > 0 = Place i a
| vab > 0 = Place va b
| otherwise = Place vab c
where va = i + size a
vab = va + size b
{-# SPECIALIZE lookupDigit :: Int -> Digit (Elem a) -> Place (Elem a) #-}
{-# SPECIALIZE lookupDigit :: Int -> Digit (Node a) -> Place (Node a) #-}
lookupDigit :: Sized a => Int -> Digit a -> Place a
lookupDigit i (One a) = Place i a
lookupDigit i (Two a b)
| va > 0 = Place i a
| otherwise = Place va b
where va = i + size a
lookupDigit i (Three a b c)
| va > 0 = Place i a
| vab > 0 = Place va b
| otherwise = Place vab c
where va = i + size a
vab = va + size b
lookupDigit i (Four a b c d)
| va > 0 = Place i a
| vab > 0 = Place va b
| vabc > 0 = Place vab c
| otherwise = Place vabc d
where va = i + size a
vab = va + size b
vabc = vab + size c
-- | /O(log(min(i,n-i)))/. Replace the element at the specified position
update :: Int -> a -> Seq a -> Seq a
update i x = adjust (const x) i
-- | /O(log(min(i,n-i)))/. Update the element at the specified position
adjust :: (a -> a) -> Int -> Seq a -> Seq a
adjust f i (Seq xs)
| 0 <= i && i < size xs = Seq (adjustTree (const (fmap f)) (-i) xs)
| otherwise = Seq xs
{-# SPECIALIZE adjustTree :: (Int -> Elem a -> Elem a) -> Int -> FingerTree (Elem a) -> FingerTree (Elem a) #-}
{-# SPECIALIZE adjustTree :: (Int -> Node a -> Node a) -> Int -> FingerTree (Node a) -> FingerTree (Node a) #-}
adjustTree :: Sized a => (Int -> a -> a) ->
Int -> FingerTree a -> FingerTree a
adjustTree f i (Single x) = Single (f i x)
adjustTree f i (Deep s pr m sf)
| vpr > 0 = Deep s (adjustDigit f i pr) m sf
| vm > 0 = Deep s pr (adjustTree (adjustNode f) vpr m) sf
| otherwise = Deep s pr m (adjustDigit f vm sf)
where vpr = i + size pr
vm = vpr + size m
{-# SPECIALIZE adjustNode :: (Int -> Elem a -> Elem a) -> Int -> Node (Elem a) -> Node (Elem a) #-}
{-# SPECIALIZE adjustNode :: (Int -> Node a -> Node a) -> Int -> Node (Node a) -> Node (Node a) #-}
adjustNode :: Sized a => (Int -> a -> a) -> Int -> Node a -> Node a
adjustNode f i (Node2 s a b)
| va > 0 = Node2 s (f i a) b
| otherwise = Node2 s a (f va b)
where va = i + size a
adjustNode f i (Node3 s a b c)
| va > 0 = Node3 s (f i a) b c
| vab > 0 = Node3 s a (f va b) c
| otherwise = Node3 s a b (f vab c)
where va = i + size a
vab = va + size b
{-# SPECIALIZE adjustDigit :: (Int -> Elem a -> Elem a) -> Int -> Digit (Elem a) -> Digit (Elem a) #-}
{-# SPECIALIZE adjustDigit :: (Int -> Node a -> Node a) -> Int -> Digit (Node a) -> Digit (Node a) #-}
adjustDigit :: Sized a => (Int -> a -> a) -> Int -> Digit a -> Digit a
adjustDigit f i (One a) = One (f i a)
adjustDigit f i (Two a b)
| va > 0 = Two (f i a) b
| otherwise = Two a (f va b)
where va = i + size a
adjustDigit f i (Three a b c)
| va > 0 = Three (f i a) b c
| vab > 0 = Three a (f va b) c
| otherwise = Three a b (f vab c)
where va = i + size a
vab = va + size b
adjustDigit f i (Four a b c d)
| va > 0 = Four (f i a) b c d
| vab > 0 = Four a (f va b) c d
| vabc > 0 = Four a b (f vab c) d
| otherwise = Four a b c (f vabc d)
where va = i + size a
vab = va + size b
vabc = vab + size c
-- Splitting
-- | /O(log(min(i,n-i)))/. The first @i@ elements of a sequence.
take :: Int -> Seq a -> Seq a
take i = fst . splitAt i
-- | /O(log(min(i,n-i)))/. Elements of sequence after the first @i@.
drop :: Int -> Seq a -> Seq a
drop i = snd . splitAt i
-- | /O(log(min(i,n-i)))/. Split a sequence at a given position.
splitAt :: Int -> Seq a -> (Seq a, Seq a)
splitAt i (Seq xs) = (Seq l, Seq r)
where (l, r) = split i xs
split :: Int -> FingerTree (Elem a) ->
(FingerTree (Elem a), FingerTree (Elem a))
split i Empty = i `seq` (Empty, Empty)
split i xs
| size xs > i = (l, consTree x r)
| otherwise = (xs, Empty)
where Split l x r = splitTree (-i) xs
data Split t a = Split t a t
#if TESTING
deriving Show
#endif
{-# SPECIALIZE splitTree :: Int -> FingerTree (Elem a) -> Split (FingerTree (Elem a)) (Elem a) #-}
{-# SPECIALIZE splitTree :: Int -> FingerTree (Node a) -> Split (FingerTree (Node a)) (Node a) #-}
splitTree :: Sized a => Int -> FingerTree a -> Split (FingerTree a) a
splitTree i (Single x) = i `seq` Split Empty x Empty
splitTree i (Deep _ pr m sf)
| vpr > 0 = case splitDigit i pr of
Split l x r -> Split (maybe Empty digitToTree l) x (deepL r m sf)
| vm > 0 = case splitTree vpr m of
Split ml xs mr -> case splitNode (vpr + size ml) xs of
Split l x r -> Split (deepR pr ml l) x (deepL r mr sf)
| otherwise = case splitDigit vm sf of
Split l x r -> Split (deepR pr m l) x (maybe Empty digitToTree r)
where vpr = i + size pr
vm = vpr + size m
{-# SPECIALIZE deepL :: Maybe (Digit (Elem a)) -> FingerTree (Node (Elem a)) -> Digit (Elem a) -> FingerTree (Elem a) #-}
{-# SPECIALIZE deepL :: Maybe (Digit (Node a)) -> FingerTree (Node (Node a)) -> Digit (Node a) -> FingerTree (Node a) #-}
deepL :: Sized a => Maybe (Digit a) -> FingerTree (Node a) -> Digit a -> FingerTree a
deepL Nothing m sf = case viewLTree m of
Nothing2 -> digitToTree sf
Just2 a m' -> deep (nodeToDigit a) m' sf
deepL (Just pr) m sf = deep pr m sf
{-# SPECIALIZE deepR :: Digit (Elem a) -> FingerTree (Node (Elem a)) -> Maybe (Digit (Elem a)) -> FingerTree (Elem a) #-}
{-# SPECIALIZE deepR :: Digit (Node a) -> FingerTree (Node (Node a)) -> Maybe (Digit (Node a)) -> FingerTree (Node a) #-}
deepR :: Sized a => Digit a -> FingerTree (Node a) -> Maybe (Digit a) -> FingerTree a
deepR pr m Nothing = case viewRTree m of
Nothing2 -> digitToTree pr
Just2 m' a -> deep pr m' (nodeToDigit a)
deepR pr m (Just sf) = deep pr m sf
{-# SPECIALIZE splitNode :: Int -> Node (Elem a) -> Split (Maybe (Digit (Elem a))) (Elem a) #-}
{-# SPECIALIZE splitNode :: Int -> Node (Node a) -> Split (Maybe (Digit (Node a))) (Node a) #-}
splitNode :: Sized a => Int -> Node a -> Split (Maybe (Digit a)) a
splitNode i (Node2 _ a b)
| va > 0 = Split Nothing a (Just (One b))
| otherwise = Split (Just (One a)) b Nothing
where va = i + size a
splitNode i (Node3 _ a b c)
| va > 0 = Split Nothing a (Just (Two b c))
| vab > 0 = Split (Just (One a)) b (Just (One c))
| otherwise = Split (Just (Two a b)) c Nothing
where va = i + size a
vab = va + size b
{-# SPECIALIZE splitDigit :: Int -> Digit (Elem a) -> Split (Maybe (Digit (Elem a))) (Elem a) #-}
{-# SPECIALIZE splitDigit :: Int -> Digit (Node a) -> Split (Maybe (Digit (Node a))) (Node a) #-}
splitDigit :: Sized a => Int -> Digit a -> Split (Maybe (Digit a)) a
splitDigit i (One a) = i `seq` Split Nothing a Nothing
splitDigit i (Two a b)
| va > 0 = Split Nothing a (Just (One b))
| otherwise = Split (Just (One a)) b Nothing
where va = i + size a
splitDigit i (Three a b c)
| va > 0 = Split Nothing a (Just (Two b c))
| vab > 0 = Split (Just (One a)) b (Just (One c))
| otherwise = Split (Just (Two a b)) c Nothing
where va = i + size a
vab = va + size b
splitDigit i (Four a b c d)
| va > 0 = Split Nothing a (Just (Three b c d))
| vab > 0 = Split (Just (One a)) b (Just (Two c d))
| vabc > 0 = Split (Just (Two a b)) c (Just (One d))
| otherwise = Split (Just (Three a b c)) d Nothing
where va = i + size a
vab = va + size b
vabc = vab + size c
------------------------------------------------------------------------
-- Lists
------------------------------------------------------------------------
-- | /O(n)/. Create a sequence from a finite list of elements.
fromList :: [a] -> Seq a
fromList = Data.List.foldl' (|>) empty
-- | /O(n)/. List of elements of the sequence.
toList :: Seq a -> [a]
toList = foldr (:) []
------------------------------------------------------------------------
-- Folds
------------------------------------------------------------------------
-- | /O(n*t)/. Fold over the elements of a sequence,
-- associating to the right.
foldr :: (a -> b -> b) -> b -> Seq a -> b
foldr f z (Seq xs) = foldrTree f' z xs
where f' (Elem x) y = f x y
foldrTree :: (a -> b -> b) -> b -> FingerTree a -> b
foldrTree _ z Empty = z
foldrTree f z (Single x) = x `f` z
foldrTree f z (Deep _ pr m sf) =
foldrDigit f (foldrTree (flip (foldrNode f)) (foldrDigit f z sf) m) pr
foldrDigit :: (a -> b -> b) -> b -> Digit a -> b
foldrDigit f z (One a) = a `f` z
foldrDigit f z (Two a b) = a `f` (b `f` z)
foldrDigit f z (Three a b c) = a `f` (b `f` (c `f` z))
foldrDigit f z (Four a b c d) = a `f` (b `f` (c `f` (d `f` z)))
foldrNode :: (a -> b -> b) -> b -> Node a -> b
foldrNode f z (Node2 _ a b) = a `f` (b `f` z)
foldrNode f z (Node3 _ a b c) = a `f` (b `f` (c `f` z))
-- | /O(n*t)/. A variant of 'foldr' that has no base case,
-- and thus may only be applied to non-empty sequences.
foldr1 :: (a -> a -> a) -> Seq a -> a
foldr1 f (Seq xs) = getElem (foldr1Tree f' xs)
where f' (Elem x) (Elem y) = Elem (f x y)
foldr1Tree :: (a -> a -> a) -> FingerTree a -> a
foldr1Tree _ Empty = error "foldr1: empty sequence"
foldr1Tree _ (Single x) = x
foldr1Tree f (Deep _ pr m sf) =
foldrDigit f (foldrTree (flip (foldrNode f)) (foldr1Digit f sf) m) pr
foldr1Digit :: (a -> a -> a) -> Digit a -> a
foldr1Digit f (One a) = a
foldr1Digit f (Two a b) = a `f` b
foldr1Digit f (Three a b c) = a `f` (b `f` c)
foldr1Digit f (Four a b c d) = a `f` (b `f` (c `f` d))
-- | /O(n*t)/. Fold over the elements of a sequence,
-- associating to the left.
foldl :: (a -> b -> a) -> a -> Seq b -> a
foldl f z (Seq xs) = foldlTree f' z xs
where f' x (Elem y) = f x y
foldlTree :: (a -> b -> a) -> a -> FingerTree b -> a
foldlTree _ z Empty = z
foldlTree f z (Single x) = z `f` x
foldlTree f z (Deep _ pr m sf) =
foldlDigit f (foldlTree (foldlNode f) (foldlDigit f z pr) m) sf
foldlDigit :: (a -> b -> a) -> a -> Digit b -> a
foldlDigit f z (One a) = z `f` a
foldlDigit f z (Two a b) = (z `f` a) `f` b
foldlDigit f z (Three a b c) = ((z `f` a) `f` b) `f` c
foldlDigit f z (Four a b c d) = (((z `f` a) `f` b) `f` c) `f` d
foldlNode :: (a -> b -> a) -> a -> Node b -> a
foldlNode f z (Node2 _ a b) = (z `f` a) `f` b
foldlNode f z (Node3 _ a b c) = ((z `f` a) `f` b) `f` c
-- | /O(n*t)/. A variant of 'foldl' that has no base case,
-- and thus may only be applied to non-empty sequences.
foldl1 :: (a -> a -> a) -> Seq a -> a
foldl1 f (Seq xs) = getElem (foldl1Tree f' xs)
where f' (Elem x) (Elem y) = Elem (f x y)
foldl1Tree :: (a -> a -> a) -> FingerTree a -> a
foldl1Tree _ Empty = error "foldl1: empty sequence"
foldl1Tree _ (Single x) = x
foldl1Tree f (Deep _ pr m sf) =
foldlDigit f (foldlTree (foldlNode f) (foldl1Digit f pr) m) sf
foldl1Digit :: (a -> a -> a) -> Digit a -> a
foldl1Digit f (One a) = a
foldl1Digit f (Two a b) = a `f` b
foldl1Digit f (Three a b c) = (a `f` b) `f` c
foldl1Digit f (Four a b c d) = ((a `f` b) `f` c) `f` d
------------------------------------------------------------------------
-- Derived folds
------------------------------------------------------------------------
-- | /O(n*t)/. Fold over the elements of a sequence,
-- associating to the right, but strictly.
foldr' :: (a -> b -> b) -> b -> Seq a -> b
foldr' f z xs = foldl f' id xs z
where f' k x z = k $! f x z
-- | /O(n*t)/. Monadic fold over the elements of a sequence,
-- associating to the right, i.e. from right to left.
foldrM :: Monad m => (a -> b -> m b) -> b -> Seq a -> m b
foldrM f z xs = foldl f' return xs z
where f' k x z = f x z >>= k
-- | /O(n*t)/. Fold over the elements of a sequence,
-- associating to the right, but strictly.
foldl' :: (a -> b -> a) -> a -> Seq b -> a
foldl' f z xs = foldr f' id xs z
where f' x k z = k $! f z x
-- | /O(n*t)/. Monadic fold over the elements of a sequence,
-- associating to the left, i.e. from left to right.
foldlM :: Monad m => (a -> b -> m a) -> a -> Seq b -> m a
foldlM f z xs = foldr f' return xs z
where f' x k z = f z x >>= k
------------------------------------------------------------------------
-- Reverse
------------------------------------------------------------------------
-- | /O(n)/. The reverse of a sequence.
reverse :: Seq a -> Seq a
reverse (Seq xs) = Seq (reverseTree id xs)
reverseTree :: (a -> a) -> FingerTree a -> FingerTree a
reverseTree _ Empty = Empty
reverseTree f (Single x) = Single (f x)
reverseTree f (Deep s pr m sf) =
Deep s (reverseDigit f sf)
(reverseTree (reverseNode f) m)
(reverseDigit f pr)
reverseDigit :: (a -> a) -> Digit a -> Digit a
reverseDigit f (One a) = One (f a)
reverseDigit f (Two a b) = Two (f b) (f a)
reverseDigit f (Three a b c) = Three (f c) (f b) (f a)
reverseDigit f (Four a b c d) = Four (f d) (f c) (f b) (f a)
reverseNode :: (a -> a) -> Node a -> Node a
reverseNode f (Node2 s a b) = Node2 s (f b) (f a)
reverseNode f (Node3 s a b c) = Node3 s (f c) (f b) (f a)
#if TESTING
------------------------------------------------------------------------
-- QuickCheck
------------------------------------------------------------------------
instance Arbitrary a => Arbitrary (Seq a) where
arbitrary = liftM Seq arbitrary
coarbitrary (Seq x) = coarbitrary x
instance Arbitrary a => Arbitrary (Elem a) where
arbitrary = liftM Elem arbitrary
coarbitrary (Elem x) = coarbitrary x
instance (Arbitrary a, Sized a) => Arbitrary (FingerTree a) where
arbitrary = sized arb
where arb :: (Arbitrary a, Sized a) => Int -> Gen (FingerTree a)
arb 0 = return Empty
arb 1 = liftM Single arbitrary
arb n = liftM3 deep arbitrary (arb (n `div` 2)) arbitrary
coarbitrary Empty = variant 0
coarbitrary (Single x) = variant 1 . coarbitrary x
coarbitrary (Deep _ pr m sf) =
variant 2 . coarbitrary pr . coarbitrary m . coarbitrary sf
instance (Arbitrary a, Sized a) => Arbitrary (Node a) where
arbitrary = oneof [
liftM2 node2 arbitrary arbitrary,
liftM3 node3 arbitrary arbitrary arbitrary]
coarbitrary (Node2 _ a b) = variant 0 . coarbitrary a . coarbitrary b
coarbitrary (Node3 _ a b c) =
variant 1 . coarbitrary a . coarbitrary b . coarbitrary c
instance Arbitrary a => Arbitrary (Digit a) where
arbitrary = oneof [
liftM One arbitrary,
liftM2 Two arbitrary arbitrary,
liftM3 Three arbitrary arbitrary arbitrary,
liftM4 Four arbitrary arbitrary arbitrary arbitrary]
coarbitrary (One a) = variant 0 . coarbitrary a
coarbitrary (Two a b) = variant 1 . coarbitrary a . coarbitrary b
coarbitrary (Three a b c) =
variant 2 . coarbitrary a . coarbitrary b . coarbitrary c
coarbitrary (Four a b c d) =
variant 3 . coarbitrary a . coarbitrary b . coarbitrary c . coarbitrary d
------------------------------------------------------------------------
-- Valid trees
------------------------------------------------------------------------
class Valid a where
valid :: a -> Bool
instance Valid (Elem a) where
valid _ = True
instance Valid (Seq a) where
valid (Seq xs) = valid xs
instance (Sized a, Valid a) => Valid (FingerTree a) where
valid Empty = True
valid (Single x) = valid x
valid (Deep s pr m sf) =
s == size pr + size m + size sf && valid pr && valid m && valid sf
instance (Sized a, Valid a) => Valid (Node a) where
valid (Node2 s a b) = s == size a + size b && valid a && valid b
valid (Node3 s a b c) =
s == size a + size b + size c && valid a && valid b && valid c
instance Valid a => Valid (Digit a) where
valid (One a) = valid a
valid (Two a b) = valid a && valid b
valid (Three a b c) = valid a && valid b && valid c
valid (Four a b c d) = valid a && valid b && valid c && valid d
#endif
| juhp/gtk2hs | gtk/Graphics/UI/Gtk/ModelView/Sequence.hs | lgpl-3.0 | 39,001 | 722 | 18 | 9,286 | 18,049 | 9,274 | 8,775 | 704 | 2 |
-----------------------------------------------------------------------------
-- Standard Library: System operations
--
-- Note: on Windows 9x, system always yields ExitSuccess.
--
-- Suitable for use with Hugs 98
-----------------------------------------------------------------------------
module Hugs.System (
getArgs, getProgName, withArgs, withProgName, getEnv,
system
) where
import Hugs.Prelude( ExitCode(..), catchException, throw )
-- In interpretive mode, the initial values of these two are [] and "Hugs",
-- but they can be (temporarily) changed using withArgs and withProgName.
primitive getArgs "primGetArgs" :: IO [String]
primitive getProgName "primGetProgName" :: IO String
primitive setArgs "primSetArgs" :: [String] -> IO ()
primitive setProgName "primSetProgName" :: String -> IO ()
-- Run an action with a value temporarily overridden
-- (a special case of Control.Exception.bracket)
with :: IO a -> (a -> IO ()) -> a -> IO b -> IO b
with getVal setVal newVal act = do
oldVal <- getVal
setVal newVal
r <- act `catchException` \e -> setVal oldVal >> throw e
setVal oldVal
return r
withArgs :: [String] -> IO a -> IO a
withArgs = with getArgs setArgs
withProgName :: String -> IO a -> IO a
withProgName = with getProgName setProgName
primitive getEnv :: String -> IO String
system :: String -> IO ExitCode
system s = do r <- primSystem s
return (toExitCode r)
primitive primSystem :: String -> IO Int
toExitCode :: Int -> ExitCode
toExitCode 0 = ExitSuccess
toExitCode n = ExitFailure n
-----------------------------------------------------------------------------
| kaoskorobase/mescaline | resources/hugs/packages/hugsbase/Hugs/System.hs | gpl-3.0 | 1,777 | 25 | 11 | 421 | 397 | 206 | 191 | -1 | -1 |
module Lint where
import Control.Applicative
import Data.List
import Language.Haskell.HLint
import Types
lintSyntax :: Options -> String -> IO String
lintSyntax opt file = pretty <$> lint opt file
where
pretty = unlines . map (intercalate "\0" . lines)
lint :: Options -> String -> IO [String]
lint opt file = map show <$> hlint ([file, "--quiet"] ++ hlintOpts opt)
| syohex/ghc-mod | Lint.hs | bsd-3-clause | 375 | 0 | 11 | 68 | 137 | 72 | 65 | 10 | 1 |
{- $Id: AFRPTestsPSwitch.hs,v 1.2 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* A F R P *
* *
* Module: AFRPTestsPSwitch *
* Purpose: Test cases for pSwitchB and dpSwitchB *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module AFRPTestsPSwitch (
pswitch_tr,
pswitch_trs,
pswitch_st0,
pswitch_st0r,
pswitch_st1,
pswitch_st1r
) where
import Data.List (findIndex)
import FRP.Yampa
import FRP.Yampa.Internals (Event(NoEvent, Event))
import AFRPTestsCommon
------------------------------------------------------------------------------
-- Test cases for pSwitchB and dpSwitchB
------------------------------------------------------------------------------
pswitch_inp1 = deltaEncode 0.1 [0.0, 0.5 ..]
whenFstGE :: Ord a => a -> c -> SF (a, b) (Event c)
whenFstGE a c = arr fst >>> arr (>= a) >>> edge >>> arr (`tag` c)
pswitch_t0 :: [[Double]]
pswitch_t0 = take 20 $ embed sf pswitch_inp1
where
sf =
pSwitchB [] (whenFstGE 1.25 10.0) $ \sfs x ->
pSwitchB (integral:sfs) (whenFstGE 3.75 10.0) $ \sfs x ->
pSwitchB ((integral>>>arr(+x)):sfs)(whenFstGE 5.25 20.0) $ \sfs x->
pSwitchB ((integral>>>arr(+x)):sfs)(whenFstGE 7.25 20.0) $ \sfs _->
parB (take 2 sfs)
pswitch_t0r =
[[], -- 0.0
[], -- 0.5
[], -- 1.0
[0.0], -- 1.5
[0.15], -- 2.0
[0.35], -- 2.5
[0.60], -- 3.0
[0.90], -- 3.5
[10.00, 1.25], -- 4.0
[10.40, 1.65], -- 4.5
[10.85, 2.10], -- 5.0
[20.00, 11.35, 2.60], -- 5.5
[20.55, 11.90, 3.15], -- 6.0
[21.15, 12.50, 3.75], -- 6.5
[21.80, 13.15, 4.40], -- 7.0
[22.50, 13.85], -- 7.5
[23.25, 14.60], -- 8.0
[24.05, 15.40], -- 8.5
[24.90, 16.25], -- 9.0
[25.80, 17.15]] -- 9.5
pswitch_t1 :: [[Double]]
pswitch_t1 = take 20 $ embed sf pswitch_inp1
where
sf =
dpSwitchB [] (whenFstGE 1.25 10.0) $ \sfs x ->
dpSwitchB (integral:sfs) (whenFstGE 3.75 10.0) $ \sfs x ->
dpSwitchB ((integral>>>arr(+x)):sfs)(whenFstGE 5.25 20.0)$ \sfs x->
dpSwitchB ((integral>>>arr(+x)):sfs)(whenFstGE 7.25 20.0)$ \sfs _->
parB (take 2 sfs)
pswitch_t1r =
[[], -- 0.0
[], -- 0.5
[], -- 1.0
[], -- 1.5
[0.15], -- 2.0
[0.35], -- 2.5
[0.60], -- 3.0
[0.90], -- 3.5
[1.25], -- 4.0
[10.40, 1.65], -- 4.5
[10.85, 2.10], -- 5.0
[11.35, 2.60], -- 5.5
[20.55, 11.90, 3.15], -- 6.0
[21.15, 12.50, 3.75], -- 6.5
[21.80, 13.15, 4.40], -- 7.0
[22.50, 13.85, 5.10], -- 7.5
[23.25, 14.60], -- 8.0
[24.05, 15.40], -- 8.5
[24.90, 16.25], -- 9.0
[25.80, 17.15]] -- 9.5
pswitch_t2 :: [[Double]]
pswitch_t2 = take 20 $ embed sf pswitch_inp1
where
sf =
pSwitchB [] (now 10.0) $ \sfs x ->
pSwitchB (integral:sfs) (whenFstGE 3.75 10.0) $ \sfs x ->
pSwitchB ((integral>>>arr(+x)):sfs)(whenFstGE 5.25 20.0) $ \sfs x->
pSwitchB ((integral>>>arr(+x)):sfs)(now 20.0) $ \sfs _->
parB (take 2 sfs)
pswitch_t2r =
[[0.00], -- 0.0
[0.00], -- 0.5
[0.05], -- 1.0
[0.15], -- 1.5
[0.30], -- 2.0
[0.50], -- 2.5
[0.75], -- 3.0
[1.05], -- 3.5
[10.00, 1.40], -- 4.0
[10.40, 1.80], -- 4.5
[10.85, 2.25], -- 5.0
[20.00, 11.35], -- 5.5
[20.55, 11.90], -- 6.0
[21.15, 12.50], -- 6.5
[21.80, 13.15], -- 7.0
[22.50, 13.85], -- 7.5
[23.25, 14.60], -- 8.0
[24.05, 15.40], -- 8.5
[24.90, 16.25], -- 9.0
[25.80, 17.15]] -- 9.5
pswitch_t3 :: [[Double]]
pswitch_t3 = take 20 $ embed sf pswitch_inp1
where
sf =
dpSwitchB [] (now 10.0) $ \sfs x ->
dpSwitchB (integral:sfs) (whenFstGE 3.75 10.0) $ \sfs x ->
dpSwitchB ((integral>>>arr(+x)):sfs)(whenFstGE 5.25 20.0)$ \sfs x->
dpSwitchB ((integral>>>arr(+x)):sfs) (now 20.0) $ \sfs _->
parB (take 2 sfs)
pswitch_t3r =
[[], -- 0.0
[0.00], -- 0.5
[0.05], -- 1.0
[0.15], -- 1.5
[0.30], -- 2.0
[0.50], -- 2.5
[0.75], -- 3.0
[1.05], -- 3.5
[1.40], -- 4.0
[10.40, 1.80], -- 4.5
[10.85, 2.25], -- 5.0
[11.35, 2.75], -- 5.5
[20.55, 11.90], -- 6.0
[21.15, 12.50], -- 6.5
[21.80, 13.15], -- 7.0
[22.50, 13.85], -- 7.5
[23.25, 14.60], -- 8.0
[24.05, 15.40], -- 8.5
[24.90, 16.25], -- 9.0
[25.80, 17.15]] -- 9.5
-- Starts three "ramps" with different phase. As soon as one exceeds a
-- threshold, it's restarted, while the others are left alone. The
-- observaton of the output is done via the loop (rather than the directly
-- from the outputs of the signal functions in the collection), thus the
-- use of a delayed switch is essential.
pswitch_ramp :: Double -> SF a Double
pswitch_ramp phase = constant 2.0 >>> integral >>> arr (+phase)
-- We assume that only one signal function will reach the limit at a time.
pswitch_limit :: Double -> SF ((a, [Double]), b) (Event Int)
pswitch_limit x = arr (snd . fst) >>> arr (findIndex (>=x)) >>> edgeJust
pswitch_t4 :: [[Double]]
pswitch_t4 = take 30 $ embed (loop sf) (deltaEncode 0.1 (repeat ()))
where
sf :: SF (a, [Double]) ([Double],[Double])
sf = dpSwitchB [pswitch_ramp 0.0, pswitch_ramp 1.0, pswitch_ramp 2.0]
(pswitch_limit 2.99)
pswitch_t4rec
>>> arr dup
pswitch_t4rec :: [SF (a, [Double]) Double]
-> Int
-> SF (a, [Double]) [Double]
pswitch_t4rec sfs n =
dpSwitchB (take n sfs ++ [pswitch_ramp 0.0] ++ drop (n+1) sfs)
(pswitch_limit 2.99)
pswitch_t4rec
pswitch_t4r =
[[0.0, 1.0, 2.0],
[0.2, 1.2, 2.2],
[0.4, 1.4, 2.4],
[0.6, 1.6, 2.6],
[0.8, 1.8, 2.8],
[1.0, 2.0, 3.0],
[1.2, 2.2, 0.2],
[1.4, 2.4, 0.4],
[1.6, 2.6, 0.6],
[1.8, 2.8, 0.8],
[2.0, 3.0, 1.0],
[2.2, 0.2, 1.2],
[2.4, 0.4, 1.4],
[2.6, 0.6, 1.6],
[2.8, 0.8, 1.8],
[3.0, 1.0, 2.0],
[0.2, 1.2, 2.2],
[0.4, 1.4, 2.4],
[0.6, 1.6, 2.6],
[0.8, 1.8, 2.8],
[1.0, 2.0, 3.0],
[1.2, 2.2, 0.2],
[1.4, 2.4, 0.4],
[1.6, 2.6, 0.6],
[1.8, 2.8, 0.8],
[2.0, 3.0, 1.0],
[2.2, 0.2, 1.2],
[2.4, 0.4, 1.4],
[2.6, 0.6, 1.6],
[2.8, 0.8, 1.8]]
-- Variation of the test above, with direct observation (not via loop) and
-- immediate switch.
-- We assume that only one signal function will reach the limit at a time.
pswitch_limit2 :: Double -> SF (a, [Double]) (Event Int)
pswitch_limit2 x = arr snd >>> arr (findIndex (>=x)) >>> edgeJust
pswitch_t5 :: [([Double], Double)]
pswitch_t5 = take 30 $ embed (loop sf) (deltaEncode 0.1 (repeat ()))
where
sf :: SF (a, [Double]) (([Double], Double), [Double])
sf = ((pSwitchB [pswitch_ramp 0.0, pswitch_ramp 1.0, pswitch_ramp 2.0]
(pswitch_limit2 2.99)
pswitch_t5rec)
&&& (arr snd >>> arr sum))
>>> arr (\(xs, y) -> ((xs, y), xs))
pswitch_t5rec :: [SF (a, [Double]) Double]
-> Int
-> SF (a, [Double]) [Double]
pswitch_t5rec sfs n =
pSwitchB (take n sfs ++ [pswitch_ramp 0.0] ++ drop (n+1) sfs)
(pswitch_limit2 2.99)
pswitch_t5rec
pswitch_t5r =
[([0.0, 1.0, 2.0], 3.0),
([0.2, 1.2, 2.2], 3.6),
([0.4, 1.4, 2.4], 4.2),
([0.6, 1.6, 2.6], 4.8),
([0.8, 1.8, 2.8], 5.4),
([1.0, 2.0, 0.0], 3.0),
([1.2, 2.2, 0.2], 3.6),
([1.4, 2.4, 0.4], 4.2),
([1.6, 2.6, 0.6], 4.8),
([1.8, 2.8, 0.8], 5.4),
([2.0, 0.0, 1.0], 3.0),
([2.2, 0.2, 1.2], 3.6),
([2.4, 0.4, 1.4], 4.2),
([2.6, 0.6, 1.6], 4.8),
([2.8, 0.8, 1.8], 5.4),
([0.0, 1.0, 2.0], 3.0),
([0.2, 1.2, 2.2], 3.6),
([0.4, 1.4, 2.4], 4.2),
([0.6, 1.6, 2.6], 4.8),
([0.8, 1.8, 2.8], 5.4),
([1.0, 2.0, 0.0], 3.0),
([1.2, 2.2, 0.2], 3.6),
([1.4, 2.4, 0.4], 4.2),
([1.6, 2.6, 0.6], 4.8),
([1.8, 2.8, 0.8], 5.4),
([2.0, 0.0, 1.0], 3.0),
([2.2, 0.2, 1.2], 3.6),
([2.4, 0.4, 1.4], 4.2),
([2.6, 0.6, 1.6], 4.8),
([2.8, 0.8, 1.8], 5.4)]
pswitch_trs =
[ pswitch_t0 ~= pswitch_t0r,
pswitch_t1 ~= pswitch_t1r,
pswitch_t2 ~= pswitch_t2r,
pswitch_t3 ~= pswitch_t3r,
pswitch_t4 ~= pswitch_t4r,
pswitch_t5 ~= pswitch_t5r
]
pswitch_tr = and pswitch_trs
pswitch_st0 = testSFSpaceLeak 1000000 (loop sf)
where
sf :: SF (a, [Double]) ([Double],[Double])
sf = dpSwitchB [pswitch_ramp 0.0, pswitch_ramp 1.0, pswitch_ramp 2.0]
(pswitch_limit 2.99)
pswitch_t4rec
>>> arr dup
pswitch_st0r = [1.5,2.5,0.5]
pswitch_st1 = testSFSpaceLeak 1000000 (loop sf)
where
sf :: SF (a, [Double]) (([Double], Double), [Double])
sf = ((pSwitchB [pswitch_ramp 0.0, pswitch_ramp 1.0, pswitch_ramp 2.0]
(pswitch_limit2 2.99)
pswitch_t5rec)
&&& (arr snd >>> arr sum))
>>> arr (\(xs, y) -> ((xs, y), xs))
pswitch_st1r = ([1.5,2.5,0.5],4.5)
| ony/Yampa-core | tests/AFRPTestsPSwitch.hs | bsd-3-clause | 9,592 | 68 | 20 | 2,925 | 3,663 | 2,206 | 1,457 | 250 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sk-SK">
<title>DOM XSS Active Scan Rule | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/domxss/src/main/javahelp/org/zaproxy/zap/extension/domxss/resources/help_sk_SK/helpset_sk_SK.hs | apache-2.0 | 985 | 83 | 52 | 162 | 402 | 212 | 190 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>Highlighter</title>
<maps>
<homeID>highlighter</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/highlighter/src/main/javahelp/help_pt_BR/helpset_pt_BR.hs | apache-2.0 | 964 | 82 | 52 | 155 | 387 | 205 | 182 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.HTools.PeerMap (testHTools_PeerMap) where
import Test.QuickCheck
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import qualified Ganeti.HTools.PeerMap as PeerMap
-- | Make sure add is idempotent.
prop_addIdempotent :: PeerMap.PeerMap
-> PeerMap.Key -> PeerMap.Elem -> Property
prop_addIdempotent pmap key em =
fn (fn puniq) ==? fn puniq
where fn = PeerMap.add key em
puniq = PeerMap.accumArray const pmap
-- | Make sure remove is idempotent.
prop_removeIdempotent :: PeerMap.PeerMap -> PeerMap.Key -> Property
prop_removeIdempotent pmap key =
fn (fn puniq) ==? fn puniq
where fn = PeerMap.remove key
puniq = PeerMap.accumArray const pmap
-- | Make sure a missing item returns 0.
prop_findMissing :: PeerMap.PeerMap -> PeerMap.Key -> Property
prop_findMissing pmap key =
PeerMap.find key (PeerMap.remove key puniq) ==? 0
where puniq = PeerMap.accumArray const pmap
-- | Make sure an added item is found.
prop_addFind :: PeerMap.PeerMap
-> PeerMap.Key -> PeerMap.Elem -> Property
prop_addFind pmap key em =
PeerMap.find key (PeerMap.add key em puniq) ==? em
where puniq = PeerMap.accumArray const pmap
-- | Manual check that maxElem returns the maximum indeed, or 0 for null.
prop_maxElem :: PeerMap.PeerMap -> Property
prop_maxElem pmap =
PeerMap.maxElem puniq ==? if null puniq then 0
else (maximum . snd . unzip) puniq
where puniq = PeerMap.accumArray const pmap
-- | List of tests for the PeerMap module.
testSuite "HTools/PeerMap"
[ 'prop_addIdempotent
, 'prop_removeIdempotent
, 'prop_maxElem
, 'prop_addFind
, 'prop_findMissing
]
| narurien/ganeti-ceph | test/hs/Test/Ganeti/HTools/PeerMap.hs | gpl-2.0 | 2,618 | 0 | 10 | 569 | 422 | 227 | 195 | 38 | 2 |
#!/usr/bin/env runghc
-- delink.hs
import Text.Pandoc.JSON
main = toJSONFilter delink
delink :: Inline -> [Inline]
delink (Link _ txt _) = txt
delink x = [x]
| heroxbd/SHTOOLS | src/pydoc/delink.hs | bsd-3-clause | 171 | 0 | 7 | 39 | 58 | 32 | 26 | 5 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.