code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module YaLedger.Output.ASCII where
import Data.List
import Data.String
import YaLedger.Output.Formatted
import YaLedger.Output.Tables
data ASCII = ASCII
deriving (Eq, Show)
align :: Int -> Align -> FormattedText -> FormattedText
align w ALeft str
| textLength str >= w = takeText w str
| otherwise = str <> replicate (w - textLength str) ' '
align w ARight str
| textLength str >= w = takeText w str
| otherwise = replicate (w - textLength str) ' ' <> str
align w ACenter str
| textLength str >= w = takeText w str
| otherwise =
let m = (w - textLength str) `div` 2
n = w - textLength str - m
pad1 = replicate m ' '
pad2 = replicate n ' '
in pad1 <> str <> pad2
alignPad :: Int -> Align -> FormattedText -> FormattedText
alignPad w ALeft str
| textLength str >= w = takeText w str
| otherwise = space <> str <> spaces (w - textLength str - 1)
alignPad w ARight str
| textLength str >= w = takeText w str
| otherwise = spaces (w - textLength str - 1) <> str <> space
alignPad w ACenter str
| textLength str >= w = takeText w str
| otherwise =
let m = (w - textLength str) `div` 2
n = w - textLength str - m
pad1 = spaces m
pad2 = spaces n
in pad1 <> str <> pad2
alignMax :: Align -> Column -> Column
alignMax a list =
let m = maximum (map textLength list)
in map (pad . align m a) list
pad :: FormattedText -> FormattedText
pad s = space <> s <> space
zipS :: FormattedText -> Column -> Column -> Column
zipS sep l1 l2 =
let m = max (length l1) (length l2)
m1 = if null l1 then 0 else maximum (map textLength l1)
m2 = if null l2 then 0 else maximum (map textLength l2)
l1' = take m $ map Just l1 ++ repeat Nothing
l2' = take m $ map Just l2 ++ repeat Nothing
s n Nothing = spaces n
s _ (Just x) = x
go x y = s m1 x <> sep <> s m2 y
in zipWith go l1' l2'
twoColumns :: FormattedText -> FormattedText -> Column -> Column -> Column
twoColumns h1 h2 l1 l2 =
let m1 = maximum (map textLength (h1:l1))
m2 = maximum (map textLength (h2:l2))
h1' = align m1 ACenter h1
h2' = align m2 ACenter h2
in tabline TopLine [m1,m2]:
(vbar <> h1' <> vbar <> h2' <> vbar):
tabline MidLine [m1,m2]:
map (\l -> vbar <> l <> vbar) (zipS vbar l1 l2) ++
[tabline BottomLine [m1, m2]]
columns' :: [Column] -> Column
columns' list = foldr (zipS vbar) [] list
understrike :: Column -> Column
understrike list =
let m = maximum (map textLength list)
in list ++ [fromString $ replicate m '═']
data LineKind = TopLine | MidLine | BottomLine
deriving (Eq, Show)
startchar :: LineKind -> Char
startchar TopLine = '╒'
startchar MidLine = '╞'
startchar BottomLine = '╘'
midchar :: LineKind -> Char
midchar TopLine = '╤'
midchar MidLine = '╪'
midchar BottomLine = '╧'
endchar :: LineKind -> Char
endchar TopLine = '╕'
endchar MidLine = '╡'
endchar BottomLine = '╛'
tabline :: LineKind -> [Int] -> FormattedText
tabline k ms = boldText $ startchar k: concatMap go (init ms) ++ line (last ms) ++ [endchar k]
where
go m = line m ++ [midchar k]
line m = replicate m '═'
instance TableFormat ASCII where
tableColumns ASCII list =
let ms = [(a, maximum (map textLength (h ++ l)) + 2) | (h, a, l) <- list]
ws = map snd ms
ss = [replicate m '═' | (_,m) <- ms]
hs = map (\(x,_,_) -> x) list
bs = map (\(_,_,x) -> x) list
in tabline TopLine ws :
map (vbar <>) ( foldr (zipS vbar) [] [map (alignPad m ACenter) h | (h,(a,m),s,l) <- zip4 hs ms ss bs] ) ++
[tabline MidLine ws] ++
map (vbar <>) ( foldr (zipS vbar) [] [map (alignPad m a) l | (h,(a,m),s,l) <- zip4 hs ms ss bs] ) ++
[tabline BottomLine ws]
tableGrid ASCII _ [] = []
tableGrid ASCII colHeaders rows =
let headers = map snd colHeaders
aligns = map fst colHeaders
rows' = map padColumns rows :: [Row]
cols = foldr1 (zipWith (++)) rows' :: Row
wds = [maximum $ map textLength (h ++ column) | (h,column) <- zip headers cols]
colsAligned = [map (align (w+2) a) col | (w,col,a) <- zip3 wds cols aligns]
headersAligned = [map (align (w+2) ACenter) h | (w,h) <- zip wds headers]
in tableColumns ASCII $ zip3 headersAligned aligns colsAligned
maxFieldWidth ASCII = Just 24
| portnov/yaledger | YaLedger/Output/ASCII.hs | bsd-3-clause | 4,421 | 0 | 19 | 1,182 | 2,004 | 1,014 | 990 | 113 | 4 |
{-# LANGUAGE
MultiWayIf,
RecordWildCards,
ScopedTypeVariables,
TemplateHaskell,
NoImplicitPrelude
#-}
module Schedule
(
Schedule,
PartialSchedule(..),
schPastGames,
schPlayerCount,
schCurrent,
schBest,
schIterationsTotal,
schIterationsLeft,
randomSchedule,
advancePartialSchedule,
precomputedSchedules,
)
where
-- General
import BasePrelude
-- Lenses
import Lens.Micro.Platform hiding ((&))
-- Lists
import Data.List.Index
-- Containers
import qualified Data.Map as M
import Data.Map (Map)
import qualified Data.IntMap as IM
import qualified Data.Vector.Unboxed as U
-- Random
import System.Random.Shuffle
import Control.Monad.Random
-- acid-state
import Data.SafeCopy
type Schedule = U.Vector (Int, Int)
data PartialSchedule = PartialSchedule {
_schPlayerCount :: Int,
_schPastGames :: U.Vector (Int, Int),
_schCurrent :: Schedule,
_schBest :: Schedule,
_schIterationsTotal :: Int,
_schIterationsLeft :: Int }
deriving (Eq, Show)
deriveSafeCopySimple 0 'base ''PartialSchedule
makeLenses ''PartialSchedule
advancePartialSchedule
:: Int -> PartialSchedule -> IO (Either PartialSchedule Schedule)
advancePartialSchedule n p@PartialSchedule{..}
| _schIterationsLeft <= 0 = return (Right _schBest)
| otherwise = do
let curIter = _schIterationsTotal - _schIterationsLeft
iters = min n _schIterationsLeft
(cur', bst') <- iterateSchedule
_schPlayerCount _schPastGames
curIter iters
(_schCurrent, _schBest)
return $ Left p{
_schCurrent = cur',
_schBest = bst',
_schIterationsLeft = _schIterationsLeft - iters }
{- |
Things influencing rating:
* We want people to have good minimum time between rounds
* We want to avoid the situation when the person plays in the same role
(word-namer / word-guesser) several times in a row
Smaller score = better.
-}
rateSchedule
:: Int -- ^ Player count
-> U.Vector (Int, Int) -- ^ Past games
-> Schedule -- ^ Solution
-> Double
rateSchedule pc past = do
-- We store the following information about each player:
-- * time of last played round
-- * time of last played round as namer
-- * time of last played round as guesser
-- * the player they last played against (to prevent pairs like
-- (x, y), (y, x) from occurring in a row)
-- * minimum time between rounds
-- * minimum time between same-role rounds in a row
-- * minimum time between swapped pairs
let update turn isNamer opponent
(lastTurn, lastNamer, lastGuesser, prevOpponent,
minRounds, minRoundsSame, minSwapped)
=
(turn,
if isNamer then turn else lastNamer,
if isNamer then lastGuesser else turn,
opponent,
if lastTurn /= -1
then min minRounds (turn-lastTurn)
else minRounds,
-- if both are -1 and the player hasn't played before, none of
-- these branches will fire because -1 isn't greater than -1
if | isNamer && lastNamer > lastGuesser ->
min minRoundsSame (turn-lastNamer)
| not isNamer && lastGuesser > lastNamer ->
min minRoundsSame (turn-lastGuesser)
| otherwise ->
minRoundsSame,
if opponent == prevOpponent
then min minSwapped (turn-lastTurn)
else minSwapped)
-- Now we calculate all that info for past games.
let initTuple = (-1, -1, -1, -1, 1000000, 1000000, 1000000)
let initMap = IM.fromList [(i, initTuple) | i <- [0..pc-1]]
let updateMap :: Int
-> IM.IntMap (Int, Int, Int, Int, Int, Int, Int)
-> U.Vector (Int, Int)
-> IM.IntMap (Int, Int, Int, Int, Int, Int, Int)
updateMap sh =
U.ifoldl (\mp turn (a, b) -> mp & ix a %~ update (sh+turn) True b
& ix b %~ update (sh+turn) False a)
let pastMap = updateMap 0 initMap past
let rate (_, _, _, _, minRounds, minRoundsSame, minSwapped) =
(case minSwapped of
1 -> 5
2 -> 4
_ -> 1 / (1 + fromIntegral minSwapped)) +
(case minRounds of
1 -> 2
_ -> 1 / fromIntegral minRounds) +
(case minRoundsSame of
1 -> 3
2 -> 1.5
_ -> 1 / fromIntegral minRoundsSame)
(\future -> sum $ imap (\i x -> x*0.5^(pc-i-1)) $ sort $
map rate $ IM.elems $ updateMap (U.length past) pastMap future)
-- | Generate a random future schedule (given past games).
randomSchedule :: Int -> U.Vector (Int, Int) -> IO Schedule
randomSchedule pc past =
fmap U.fromList $ shuffleM $
[(x, y) | x <- [0..pc-1], y <- [0..pc-1], x/=y] \\ U.toList past
-- https://en.wikipedia.org/wiki/Simulated_annealing
iterateSchedule
:: Int -- ^ Player count
-> U.Vector (Int, Int) -- ^ Past games
-> Int -- ^ Current iteration
-> Int -- ^ Iterations to do
-> (Schedule, Schedule) -- ^ Current schedule, best schedule
-> IO (Schedule, Schedule) -- ^ New current and best schedule
iterateSchedule pc past kcur kn (cur, bst)
| U.null cur = return (cur, bst)
| otherwise = go (cur, rate cur) (bst, rate bst) kcur kn
where
rate = rateSchedule pc past
p e e' t = if e' < e then 1 else exp ((e-e')/t)
go (s, _) (sbest, _) _ 0 = return (s, sbest)
go (s, rs) (sbest, rsbest) k n = do
s' <- swap2 s
let t = 0.99999**(fromIntegral k)
rs' = rate s'
rnd <- randomIO
let (sbest', rsbest')
| rs' < rsbest = (s', rs')
| otherwise = (sbest, rsbest)
if p rs rs' t >= rnd
then go (s', rs') (sbest', rsbest') (k+1) (n-1)
else go (s , rs ) (sbest', rsbest') (k+1) (n-1)
-- | Swap 2 random elements of an array.
swap2 :: U.Unbox a => U.Vector a -> IO (U.Vector a)
swap2 xs = do
let len = U.length xs
i <- randomRIO (0, len-1)
j <- randomRIO (0, len-1)
return (U.unsafeUpd xs [(i, U.unsafeIndex xs j), (j, U.unsafeIndex xs i)])
-- | Good solutions for group sizes from 4 to 7 (found by running simulated
-- annealing several times and choosing good solutions manually).
precomputedSchedules :: Map Int [U.Vector (Int, Int)]
precomputedSchedules =
over (each.each) U.fromList $
M.fromList $ zip [1..7]
[schedule1, schedule2, schedule3, schedule4,
schedule5, schedule6, schedule7]
schedule1 :: [[(Int, Int)]]
schedule1 = [[]]
schedule2 :: [[(Int, Int)]]
schedule2 = [
[(0,1),(1,0)],
[(1,0),(0,1)] ]
schedule3 :: [[(Int, Int)]]
schedule3 = [
[(0,1),(2,0),(1,2),(0,2),(1,0),(2,1)],
[(2,1),(1,0),(0,2),(1,2),(2,0),(0,1)] ]
schedule4 :: [[(Int, Int)]]
schedule4 = [
[(3,2),(1,0),(0,3),(2,1),(3,0),(0,1),(2,3),(1,2),(2,0),(3,1),(0,2),(1,3)],
[(0,3),(1,2),(3,1),(2,3),(1,0),(0,2),(3,0),(2,1),(1,3),(3,2),(0,1),(2,0)],
[(2,3),(1,0),(3,1),(1,2),(0,3),(2,0),(3,2),(0,1),(1,3),(2,1),(3,0),(0,2)],
[(3,2),(0,1),(1,3),(2,1),(3,0),(0,2),(2,3),(1,0),(3,1),(1,2),(0,3),(2,0)],
[(2,0),(3,1),(0,3),(3,2),(1,0),(2,1),(0,2),(1,3),(3,0),(2,3),(0,1),(1,2)],
[(2,3),(0,1),(3,0),(0,2),(1,3),(2,1),(1,0),(3,2),(0,3),(2,0),(3,1),(1,2)],
[(2,0),(3,1),(0,3),(3,2),(1,0),(2,1),(1,3),(0,2),(3,0),(2,3),(0,1),(1,2)] ]
schedule5 :: [[(Int, Int)]]
schedule5 = [
[(0,1),(2,3),(4,0),(1,2),(3,4),(2,0),(4,1),(3,2),(1,0),(4,3),(2,1),(3,0),(4,2),(1,3),(0,4),(3,1),(2,4),(0,3),(1,4),(0,2)],
[(3,2),(1,0),(4,3),(2,1),(0,4),(1,3),(4,0),(1,2),(3,4),(0,1),(2,3),(1,4),(0,2),(3,1),(4,2),(3,0),(2,4),(0,3),(4,1),(2,0)],
[(1,3),(0,2),(4,1),(3,0),(2,4),(1,0),(4,2),(0,3),(1,4),(3,2),(4,0),(2,1),(0,4),(2,3),(0,1),(3,4),(1,2),(4,3),(2,0),(3,1)],
[(1,0),(4,3),(2,1),(3,0),(2,4),(0,1),(3,2),(0,4),(1,3),(4,2),(3,1),(0,2),(1,4),(0,3),(4,1),(2,0),(3,4),(1,2),(4,0),(2,3)],
[(4,3),(1,0),(2,4),(0,3),(1,2),(4,0),(2,3),(0,1),(3,2),(4,1),(2,0),(3,1),(0,2),(1,4),(3,0),(4,2),(1,3),(0,4),(2,1),(3,4)],
[(3,4),(0,2),(1,3),(2,4),(0,1),(4,3),(2,0),(3,1),(0,4),(1,2),(4,0),(2,1),(0,3),(4,2),(3,0),(1,4),(2,3),(4,1),(3,2),(1,0)] ]
schedule6 :: [[(Int, Int)]]
schedule6 = [
[(3,4),(0,1),(2,5),(1,3),(5,0),(2,1),(4,3),(1,5),(0,4),(3,2),(4,1),(0,3),(5,4),(0,2),(3,1),(0,5),(2,4),(5,3),(1,2),(4,0),(2,3),(1,4),(3,0),(5,2),(1,0),(3,5),(4,2),(5,1),(2,0),(4,5)],
[(2,4),(0,5),(1,3),(4,0),(3,2),(5,4),(0,2),(3,5),(4,1),(5,0),(2,3),(1,5),(0,4),(2,1),(4,3),(1,0),(2,5),(0,3),(5,1),(3,4),(1,2),(5,3),(4,2),(3,1),(2,0),(1,4),(5,2),(0,1),(4,5),(3,0)],
[(1,0),(5,2),(3,4),(1,5),(2,0),(4,1),(5,0),(1,3),(4,2),(3,5),(0,4),(2,1),(0,3),(5,4),(3,1),(0,2),(4,3),(2,5),(1,4),(3,0),(4,5),(1,2),(5,3),(2,4),(0,1),(3,2),(4,0),(5,1),(2,3),(0,5)],
[(1,2),(0,5),(3,4),(5,2),(4,1),(3,5),(2,4),(1,0),(3,2),(4,5),(1,3),(2,0),(5,1),(0,3),(4,2),(3,1),(5,0),(4,3),(2,1),(0,4),(1,5),(2,3),(5,4),(0,1),(2,5),(3,0),(1,4),(0,2),(5,3),(4,0)],
[(5,3),(0,1),(2,4),(5,0),(4,1),(3,2),(1,0),(4,5),(3,1),(2,0),(4,3),(1,2),(0,4),(3,5),(2,1),(5,4),(3,0),(4,2),(1,3),(2,5),(4,0),(5,1),(0,2),(1,4),(2,3),(0,5),(3,4),(5,2),(0,3),(1,5)],
[(2,1),(0,4),(5,3),(4,1),(2,0),(5,4),(1,3),(2,5),(3,4),(0,2),(1,5),(3,0),(4,2),(0,5),(3,1),(4,0),(2,3),(1,4),(3,5),(0,1),(2,4),(5,0),(1,2),(0,3),(4,5),(3,2),(5,1),(4,3),(1,0),(5,2)],
[(4,1),(3,2),(0,5),(1,3),(2,4),(5,1),(4,3),(2,5),(3,0),(5,4),(0,1),(4,2),(1,5),(0,2),(3,1),(5,0),(2,3),(1,4),(3,5),(4,0),(2,1),(3,4),(1,0),(5,2),(0,3),(4,5),(1,2),(0,4),(5,3),(2,0)] ]
schedule7 :: [[(Int, Int)]]
schedule7 = [
[(4,1),(2,6),(3,0),(1,5),(0,2),(6,4),(2,1),(5,0),(3,2),(0,1),(4,5),(3,6),(1,4),(2,5),(0,3),(6,2),(3,1),(5,6),(4,3),(2,0),(5,1),(0,4),(6,3),(5,2),(4,6),(1,3),(6,0),(5,4),(1,6),(2,3),(0,5),(4,2),(1,0),(5,3),(0,6),(3,4),(6,5),(4,0),(1,2),(3,5),(2,4),(6,1)],
[(0,1),(5,4),(3,6),(0,2),(1,3),(6,0),(5,1),(2,6),(4,0),(5,2),(1,4),(3,0),(2,1),(4,3),(5,0),(6,4),(1,5),(0,6),(2,3),(6,1),(4,2),(5,6),(2,0),(3,4),(0,5),(6,2),(1,0),(4,6),(5,3),(2,4),(6,5),(3,2),(1,6),(2,5),(4,1),(0,3),(1,2),(3,5),(0,4),(6,3),(4,5),(3,1)],
[(2,3),(5,4),(6,0),(1,2),(0,5),(3,6),(4,1),(6,2),(5,3),(1,6),(2,4),(0,1),(6,5),(4,0),(5,2),(0,6),(3,1),(2,0),(1,5),(0,3),(2,6),(3,5),(6,4),(1,0),(2,5),(4,3),(5,1),(3,2),(0,4),(5,6),(4,2),(6,1),(3,4),(5,0),(4,6),(0,2),(1,4),(6,3),(2,1),(3,0),(4,5),(1,3)],
[(4,3),(2,0),(1,5),(6,3),(5,4),(0,1),(4,6),(3,0),(1,4),(5,2),(3,6),(2,1),(6,5),(4,2),(1,6),(0,4),(3,5),(6,0),(2,3),(5,1),(0,2),(1,3),(2,6),(4,5),(3,2),(5,0),(6,1),(3,4),(0,6),(2,5),(1,0),(5,3),(4,1),(6,2),(0,3),(2,4),(5,6),(1,2),(4,0),(3,1),(6,4),(0,5)],
[(0,3),(5,4),(2,1),(3,6),(4,2),(6,5),(0,4),(5,3),(1,0),(4,6),(0,5),(3,4),(6,0),(1,3),(2,6),(4,1),(3,2),(6,4),(2,0),(4,3),(0,1),(6,2),(1,4),(2,5),(0,6),(5,1),(4,0),(3,5),(1,2),(5,6),(2,3),(4,5),(0,2),(1,6),(5,0),(3,1),(2,4),(6,3),(1,5),(3,0),(5,2),(6,1)],
[(4,6),(5,0),(3,2),(6,1),(0,3),(2,4),(6,0),(1,2),(3,5),(2,6),(4,1),(5,2),(1,3),(0,5),(6,4),(3,0),(5,1),(2,3),(1,4),(6,5),(3,1),(5,4),(1,6),(0,2),(4,3),(2,5),(1,0),(6,2),(0,4),(2,1),(3,6),(4,2),(5,3),(0,6),(3,4),(2,0),(6,3),(4,5),(0,1),(5,6),(4,0),(1,5)],
[(0,2),(5,4),(6,1),(2,3),(4,6),(3,5),(2,1),(6,3),(4,0),(5,6),(3,1),(6,2),(1,5),(0,6),(2,4),(5,0),(4,3),(6,5),(1,4),(3,2),(0,1),(4,5),(1,3),(5,2),(3,6),(4,1),(6,0),(1,2),(0,4),(5,3),(4,2),(1,0),(3,4),(2,5),(0,3),(1,6),(2,0),(5,1),(6,4),(0,5),(2,6),(3,0)],
[(3,6),(4,1),(0,5),(2,6),(3,4),(1,2),(4,6),(3,0),(5,1),(6,3),(0,2),(4,5),(2,3),(0,6),(5,2),(1,4),(3,5),(4,0),(5,6),(2,4),(1,0),(4,3),(6,1),(0,4),(1,3),(6,5),(2,1),(5,0),(3,2),(1,5),(6,4),(2,0),(3,1),(6,2),(0,3),(5,4),(1,6),(2,5),(0,1),(4,2),(5,3),(6,0)],
[(5,3),(4,0),(1,6),(3,2),(5,4),(6,0),(3,1),(2,4),(5,6),(0,2),(4,1),(3,5),(1,2),(4,6),(2,3),(0,1),(6,5),(4,3),(2,6),(0,5),(1,4),(2,0),(3,6),(5,1),(0,4),(1,3),(5,2),(6,4),(3,0),(1,5),(0,6),(4,2),(5,0),(6,3),(2,1),(4,5),(0,3),(6,2),(1,0),(3,4),(2,5),(6,1)] ]
| neongreen/hat | src/Schedule.hs | bsd-3-clause | 11,878 | 0 | 20 | 2,056 | 9,422 | 6,049 | 3,373 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{- |
Module : Verifier.SAW.Cryptol.Prelude
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer : huffman@galois.com
Stability : experimental
Portability : non-portable (language extensions)
-}
module Verifier.SAW.Cryptol.PreludeM
( Module
, module Verifier.SAW.Cryptol.PreludeM
, scLoadPreludeModule
) where
import Verifier.SAW.Prelude
import Verifier.SAW.ParserUtils
$(defineModuleFromFileWithFns
"cryptolMModule" "scLoadCryptolMModule" "saw/CryptolM.sawcore")
| GaloisInc/saw-script | cryptol-saw-core/src/Verifier/SAW/Cryptol/PreludeM.hs | bsd-3-clause | 532 | 0 | 7 | 76 | 52 | 33 | 19 | 9 | 0 |
module QueryArrow.ElasticSearch.Record where
import Data.Aeson (parseJSON, toJSON, FromJSON, ToJSON, Value)
import Data.Map.Strict (Map, union, delete)
import Data.Text (Text)
import Control.Applicative ((<$>))
newtype ESRecord = ESRecord (Map Text Value) deriving Show
instance FromJSON ESRecord where
parseJSON a = ESRecord <$> parseJSON a
instance ToJSON ESRecord where
toJSON (ESRecord a) = toJSON a
updateProps :: ESRecord -> ESRecord -> ESRecord
updateProps (ESRecord diff) (ESRecord orig) = ESRecord (diff `union` orig)
deleteProps :: [Text] -> ESRecord -> ESRecord
deleteProps diff (ESRecord orig) = ESRecord (foldr delete orig diff)
| xu-hao/QueryArrow | QueryArrow-db-elastic/src/QueryArrow/ElasticSearch/Record.hs | bsd-3-clause | 656 | 0 | 8 | 99 | 227 | 126 | 101 | 14 | 1 |
module Matterhorn.Draw.ReactionEmojiListOverlay
( drawReactionEmojiListOverlay
)
where
import Prelude ()
import Matterhorn.Prelude
import Brick
import Brick.Widgets.List ( listSelectedFocusedAttr )
import qualified Data.Text as T
import Matterhorn.Draw.ListOverlay ( drawListOverlay, OverlayPosition(..) )
import Matterhorn.Types
import Matterhorn.Themes
drawReactionEmojiListOverlay :: ChatState -> Widget Name
drawReactionEmojiListOverlay st =
let overlay = drawListOverlay (st^.csCurrentTeam.tsReactionEmojiListOverlay)
(const $ txt "Search Emoji")
(const $ txt "No matching emoji found.")
(const $ txt "Search emoji:")
renderEmoji
Nothing
OverlayCenter
80
in joinBorders overlay
renderEmoji :: Bool -> (Bool, T.Text) -> Widget Name
renderEmoji sel (mine, e) =
let maybeForce = if sel
then forceAttr listSelectedFocusedAttr
else id
in clickable (ReactionEmojiListOverlayEntry (mine, e)) $
maybeForce $
padRight Max $
hBox [ if mine then txt " * " else txt " "
, withDefAttr emojiAttr $ txt $ ":" <> e <> ":"
]
| matterhorn-chat/matterhorn | src/Matterhorn/Draw/ReactionEmojiListOverlay.hs | bsd-3-clause | 1,436 | 0 | 14 | 554 | 297 | 161 | 136 | -1 | -1 |
module SimpleModule.EvaluatorSuite
( tests
) where
import Data.List (stripPrefix)
import SimpleModule.Data
import SimpleModule.Evaluator
import SimpleModule.Parser (expression)
import Test.HUnit
tests :: Test
tests = TestList
[ testEq "Eval const" (ExprNum 3) "3"
, testEq "Eval bounded var" (ExprNum 5) "let v = 5 in v"
, testEq "Eval binary num-to-num operator" (ExprNum 5) "-(10, 5)"
, testEq "Eval binary num-to-bool operator (true case)"
(ExprBool True) "greater?(4, 3)"
, testEq "Eval binary num-to-bool operator (false case)"
(ExprBool False) "less?(5,2)"
, testEq "Eval minus" (ExprNum (-1)) "minus(1)"
, testLet
, testCond
, testProc
, testModule
]
testLet :: Test
testLet = TestList
[ testEq "Eval let"
(ExprNum 1)
$ unlines
[ "let x = 30"
, "in let x = -(x,1)"
, " y = -(x,2)"
, " in -(x,y)"
]
, testEq "Eval letrec"
(ExprNum 12)
$ unlines
[ "letrec int double(x: int)"
, " = if zero?(x) then 0 else -((double -(x,1)), -2)"
, "in (double 6)"
]
, testEq "Eval letrec with multi parameters"
(ExprNum 12)
$ unlines
[ "letrec int double(x: int, dummy: int)"
, " = if zero?(x) then 0 else -((double -(x,1) dummy), -2)"
, "in (double 6 10000)"
]
, testEq "Eval co-recursion"
(ExprNum 1)
$ unlines
[ "letrec"
, " int even(x: int) = if zero?(x) then 1 else (odd -(x,1))"
, " int odd(x: int) = if zero?(x) then 0 else (even -(x,1))"
, "in (odd 13)"
]
]
testCond :: Test
testCond = TestList
[ testEq "Eval true branch of if expression"
(ExprNum 3)
"if zero? (0) then 3 else 4"
, testEq "Eval false branch of if expression"
(ExprNum 4)
"if zero? (5) then 3 else 4"
, testError "Empty condition expression should fail" "cond end"
, testError "A condition expression with no true predicate should fail"
"cond zero?(5) ==> 3 greater?(5, 10) ==> 4 end"
, testEq "Match first condition"
(ExprNum 1)
"cond zero?(0) ==> 1 zero?(0) ==> 2 zero?(0) ==> 3 end"
, testEq "Match third condition"
(ExprNum 3)
"cond zero?(1) ==> 1 zero?(2) ==> 2 zero?(0) ==> 3 end"
]
testProc :: Test
testProc = TestList
[ testEq "Eval proc and call expression (no parameter)"
(ExprNum 1)
"(proc () 1)"
, testEq "Eval proc and call expression (1 parameter)"
(ExprNum 2)
"(proc (x: int) + (x, x) 1)"
, testEq "Eval proc and call expression (many parameters)"
(ExprNum 7)
"(proc (x: int, y: int, z: int) + (x, * (y, z)) 1 2 3)"
, testEq "Eval named proc"
(ExprNum 7)
"let f = proc (x: int, y: int, z: int) + (x, * (y, z)) in (f 1 2 3)"
, testError "Too many parameters" "(proc () 1 1)"
, testError "Too many arguments" "(proc (x: int, y: int) +(x, y) 1)"
]
testModule :: Test
testModule = TestList
[ testEq "Test program with modules"
(ExprNum 44)
$ unlines
[ "module m1"
, "interface"
, " [a : int"
, " b : int"
, " c : int]"
, "body"
, " [a = 33"
, " b = 44"
, " c = 55]"
, "module m2"
, "interface"
, " [a : int"
, " b : int]"
, "body"
, " [a = 66"
, " b = 77]"
, "let z = 11"
, "in -(z, -(from m1 take a, from m2 take a))"
]
]
testEq :: String -> ExpressedValue -> String -> Test
testEq msg expect input = TestCase $
assertEqual msg (Right expect) (run input)
testError :: String -> String -> Test
testError msg input = TestCase $
assertBool msg evalRes
where
evalRes = case run input of
Left (ParseError _) -> False
Right _ -> False
_ -> True
| li-zhirui/EoplLangs | test/SimpleModule/EvaluatorSuite.hs | bsd-3-clause | 4,254 | 0 | 12 | 1,649 | 713 | 379 | 334 | 115 | 3 |
{-# OPTIONS_GHC -w #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances #-}
-- | A module where tuple classes and instances are created up to 16-tuple using 'makeTupleRefs'.
-- The number of classes and instances can be changed by hiding import from this module
-- and calling 'makeTupleRefs' in an other module.
module Control.Reference.TupleInstances where
import Control.Reference.TH.Tuple
$(makeTupleRefs hsTupConf 16 16)
| nboldi/references | Control/Reference/TupleInstances.hs | bsd-3-clause | 492 | 0 | 7 | 67 | 34 | 22 | 12 | 6 | 0 |
module Main (main) where
import Prelude ()
import Prelude.Compat
import Criterion.Main
import qualified Typed.Generic as Generic
import qualified Typed.Manual as Manual
import qualified Typed.TH as TH
main :: IO ()
main = defaultMain [
Generic.benchmarks
, Manual.benchmarks
, TH.benchmarks
, Generic.decodeBenchmarks
, Manual.decodeBenchmarks
, TH.decodeBenchmarks
]
| sol/aeson | benchmarks/Typed.hs | bsd-3-clause | 387 | 0 | 7 | 65 | 97 | 61 | 36 | 15 | 1 |
module Examples.Language.MicroKanren where
import Language.MicroKanren
import Numeric.Natural
q5 :: Goal Natural
q5 = callFresh $ \q -> Variable q === Term 5
aAndB :: Goal Natural
aAndB = conj (callFresh $ \a -> Variable a === Term 7)
(callFresh $ \b -> disj (Variable b === Term 5)
(Variable b === Term 6))
bs :: Goal Natural
bs = callFresh $ \b -> disj (Variable b === Term 1) (Variable b === Term 2)
fives :: Var -> Goal Natural
fives x = disj (Variable x === Term 5) (fives x)
sixes :: Var -> Goal Natural
sixes x = disj (Variable x === Term 6) (sixes x)
fivesAndSixes :: Goal Natural
fivesAndSixes = callFresh $ disj <$> fives <*> sixes
q3 =
callFresh $ \q ->
callFresh $ \x ->
callFresh $ \z ->
conj (Variable x === Variable z) (conj (Term (Just 3) === Variable z) (Variable q === Variable x))
q = callFresh $ \q ->
callFresh $ \x ->
Variable x === Term 3
main :: IO ()
main = do
print $ q5 emptyState
print $ aAndB emptyState
print $ bs emptyState
print $ take 5 $ fivesAndSixes emptyState
print $ run 1 q3
print $ run 1 q5
print $ run 5 fivesAndSixes
| joneshf/MicroKanren | examples/Examples/Language/MicroKanren.hs | bsd-3-clause | 1,172 | 0 | 18 | 322 | 519 | 253 | 266 | 34 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.Exception.IOChoice.TH
import Control.Exception
(|||>) :: IO a -> IO a -> IO a
(|||>) = $(newIOChoice [''ErrorCall, ''ArithException])
main :: IO ()
main = do
a0 <- evaluate (1 `div` 0)
|||> return 3
putStrLn $ "Should be 3: " ++ show a0
a1 <- error "Unexpected answer!"
|||> return "expected answer."
putStrLn $ "This is an " ++ a1
a2 <- ioError (userError "IO Error!")
|||> return "IO Exception is handled by default."
putStrLn a2
a3 <- assert False (return "should be fail.")
|||> return "this should not be seen."
putStrLn $ "This message should not be printed: " ++ a3
| kazu-yamamoto/io-choice | examples/derive-test.hs | bsd-3-clause | 669 | 0 | 11 | 140 | 213 | 105 | 108 | 20 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.N2O (
b2t,t2b,
send,
call,
assign,
Network.N2O.runServer,
broadcast
) where
import Control.Exception
import Control.Concurrent
import Control.Monad
import Data.BERT
import Data.Binary
import Data.Text.Encoding
import Network.N2O.PubSub
import qualified Data.Text as T
import Network.WebSockets as WS hiding (send)
import qualified Data.ByteString.Lazy as BL
b2t :: BL.ByteString -> T.Text
b2t = decodeUtf8 . BL.toStrict
t2b :: T.Text -> BL.ByteString
t2b = BL.fromStrict . encodeUtf8
eval :: T.Text -> BL.ByteString
eval x = encode $ TupleTerm [AtomTerm "io", showBERT $ t2b x, NilTerm]
call fun arg = T.concat [fun, "('", arg, "');"]
assign elem arg = T.concat [elem, ".innerHTML='", arg, "';"]
application emptyEntry nextMessage handle state pending = do
connection <- WS.acceptRequest pending
putStrLn "accepted"
socketId <- modifyMVar state $ return . subscribe connection emptyEntry
putStrLn $ "Connected socketId = " ++ show socketId
(receiveN2O connection >> forever (nextMessage handle state connection socketId)) `catch` somecatch `catch` iocatch -- `catch` (\e -> print $ "Got exception " ++ show (e::WS.ConnectionException)) `catch` somecatch
putStrLn $ "Disconnected socketId = " ++ show socketId
entry <- byUnique state socketId
modifyMVar_ state $ return . unsubscribe socketId
handle state entry [AtomTerm "N2O_DISCONNECT"]
somecatch :: SomeException -> IO ()
somecatch e = print "SomeException" >> print e
iocatch :: IOException -> IO ()
iocatch _ = print "IOException"
runServer ip port handle emptyEntry = do
putStrLn $ "Listening on " ++ ip ++ ":" ++ show port
state <- newMVar newChannel
WS.runServer ip port $ application emptyEntry nextMessage handle state
nextMessage handle state connection socketId = do
message <- receiveMessage connection
print message
entry <- byUnique state socketId
handle state entry message
receiveN2O connection = do
message <- WS.receiveDataMessage connection
case message of
WS.Text "N2O," -> return ()
WS.Binary _ -> error "Protocol violation 4"
WS.Text _ -> error "Protocol violation 3"
receiveMessage connection = do
let loop = receiveMessage connection
message <- WS.receiveDataMessage connection
case message of
WS.Binary x -> case decode x of
TupleTerm x -> return x
_ -> error "Protocol violation"
WS.Text x
| x == "PING" -> loop
| otherwise -> error "protocol violation 2"
send conn = WS.sendBinaryData conn . eval
broadcast message
= mapM_ $ \entry -> send entry message
| 5HT/n2o.hs | src/Network/N2O.hs | isc | 2,712 | 0 | 13 | 591 | 819 | 403 | 416 | 68 | 3 |
{- |
Module : ./CASL/CCC/TermFormula.hs
Description : auxiliary functions on terms and formulas
Copyright : (c) Mingyi Liu, Till Mossakowski, Uni Bremen 2004-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
Auxiliary functions on terms and formulas
-}
module CASL.CCC.TermFormula where
import CASL.AS_Basic_CASL
import CASL.Fold
import CASL.Overload
import CASL.Quantification
import CASL.Sign
import CASL.ToDoc
import CASL.Utils
import Common.Id
import Common.Result
import Control.Monad
import Data.Function
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Set as Set
-- | the sorted term is always ignored
unsortedTerm :: TERM f -> TERM f
unsortedTerm t = case t of
Sorted_term t' _ _ -> unsortedTerm t'
Cast t' _ _ -> unsortedTerm t'
_ -> t
-- | check whether it exist a (unique)existent quantification
isExQuanti :: FORMULA f -> Bool
isExQuanti f = case f of
Quantification Universal _ f' _ -> isExQuanti f'
Quantification {} -> True
Relation f1 _ f2 _ -> isExQuanti f1 || isExQuanti f2
Negation f' _ -> isExQuanti f'
_ -> False
-- | check whether it contains a membership formula
isMembership :: FORMULA f -> Bool
isMembership f = case f of
Quantification _ _ f' _ -> isMembership f'
Junction _ fs _ -> any isMembership fs
Negation f' _ -> isMembership f'
Relation f1 _ f2 _ -> isMembership f1 || isMembership f2
Membership {} -> True
_ -> False
-- | check whether it contains a definedness formula
containDef :: FORMULA f -> Bool
containDef f = case f of
Quantification _ _ f' _ -> containDef f'
Junction _ fs _ -> any containDef fs
Relation f1 _ f2 _ -> containDef f1 || containDef f2
Negation f' _ -> containDef f'
Definedness _ _ -> True
_ -> False
-- | check whether it contains a negation
containNeg :: FORMULA f -> Bool
containNeg f = case f of
Quantification _ _ f' _ -> containNeg f'
Relation _ c f' _ | c /= Equivalence -> containNeg f'
Relation f' Equivalence _ _ -> containNeg f'
Negation _ _ -> True
_ -> False
domainDef :: FORMULA f -> Maybe (TERM f, FORMULA f)
domainDef f = case stripAllQuant f of
Relation (Definedness t _) Equivalence f' _
| not (containDef f') -> Just (t, f')
_ -> Nothing
-- | check whether it is a Variable
isVar :: TERM t -> Bool
isVar t = case unsortedTerm t of
Qual_var {} -> True
_ -> False
-- | extract all variables of a term
varOfTerm :: Ord f => TERM f -> [TERM f]
varOfTerm t = case unsortedTerm t of
Qual_var {} -> [t]
Application _ ts _ -> concatMap varOfTerm ts
_ -> []
-- | extract all arguments of a term
arguOfTerm :: TERM f -> [TERM f]
arguOfTerm = snd . topIdOfTerm
nullId :: ((Id, Int), [TERM f])
nullId = ((stringToId "", 0), [])
topIdOfTerm :: TERM f -> ((Id, Int), [TERM f])
topIdOfTerm t = case unsortedTerm t of
Application o ts _ -> ((opSymbName o, length ts), ts)
_ -> nullId
-- | get the patterns of a axiom
patternsOfAxiom :: FORMULA f -> [TERM f]
patternsOfAxiom = snd . topIdOfAxiom
topIdOfAxiom :: FORMULA f -> ((Id, Int), [TERM f])
topIdOfAxiom f = case stripAllQuant f of
Negation f' _ -> topIdOfAxiom f'
Relation _ c f' _ | c /= Equivalence -> topIdOfAxiom f'
Relation f' Equivalence _ _ -> topIdOfAxiom f'
Predication p ts _ -> ((predSymbName p, length ts), ts)
Equation t _ _ _ -> topIdOfTerm t
Definedness t _ -> topIdOfTerm t
_ -> nullId
-- | split the axiom into condition and rest axiom
splitAxiom :: FORMULA f -> ([FORMULA f], FORMULA f)
splitAxiom f = case stripAllQuant f of
Relation f1 c f2 _ | c /= Equivalence ->
let (f3, f4) = splitAxiom f2 in (f1 : f3, f4)
f' -> ([], f')
-- | get the premise of a formula, without implication return true
conditionAxiom :: FORMULA f -> FORMULA f
conditionAxiom = conjunct . fst . splitAxiom
-- | get the conclusion of a formula, without implication return the formula
restAxiom :: FORMULA f -> FORMULA f
restAxiom = snd . splitAxiom
-- | get right hand side of an equivalence, without equivalence return true
resultAxiom :: FORMULA f -> FORMULA f
resultAxiom f = case restAxiom f of
Relation _ Equivalence f' _ -> f'
_ -> trueForm
-- | get right hand side of an equation, without equation return dummy term
resultTerm :: FORMULA f -> TERM f
resultTerm f = case restAxiom f of
Negation (Definedness _ _) _ ->
varOrConst (mkSimpleId "undefined")
Equation _ _ t _ -> t
_ -> varOrConst (mkSimpleId "unknown")
getSubstForm :: (FormExtension f, TermExtension f, Ord f) => Sign f e
-> FORMULA f -> FORMULA f
-> Result ((Subst f, [FORMULA f]), (Subst f, [FORMULA f]))
getSubstForm sig f1 f2 = do
let p1 = patternsOfAxiom f1
p2 = patternsOfAxiom f2
getVars = Set.map fst . freeVars sig . stripAllQuant
getSubst sig (p1, getVars f1) (p2, getVars f2)
mkCast :: SORT -> TERM f -> SORT -> (TERM f, [FORMULA f])
mkCast s2 t s1 = if s1 == s2 then (t, []) else
case unsortedTerm t of
Qual_var v _ r -> (Qual_var v s1 r, [])
_ -> (Cast t s1 nullRange, [Membership t s1 nullRange])
mkSortedTerm :: SORT -> TERM f -> SORT -> TERM f
mkSortedTerm s1 t s2 = if s1 == s2 then t else
case unsortedTerm t of
Qual_var v _ r -> Qual_var v s2 r
_ -> Sorted_term t s2 nullRange
minSortTerm :: TermExtension f => TERM f -> TERM f
minSortTerm t = case t of
Sorted_term st _ _ -> case optTermSort st of
Nothing -> t
Just _ -> minSortTerm st
_ -> t
mkSTerm :: TermExtension f => Sign f e -> SORT -> TERM f
-> (TERM f, [FORMULA f])
mkSTerm sig s t =
let s2 = fromMaybe s $ optTermSort t
t0 = minSortTerm t
s0 = fromMaybe s $ optTermSort t0
in case maximalSubs sig s s2 of
l@(s1 : _) -> let
s3 = if s0 == s2 then s1 else
fromMaybe s1 $ find (leqSort sig s0) l
(s4, fs) = mkCast s2 t s3
in (mkSortedTerm s3 s4 s, fs)
_ -> error $ "CCC.mkSTerm " ++ show (s0, s, s2)
getSubst :: (FormExtension f, TermExtension f, Ord f) => Sign f e
-> ([TERM f], Set.Set VAR) -> ([TERM f], Set.Set VAR)
-> Result ((Subst f, [FORMULA f]), (Subst f, [FORMULA f]))
getSubst sig (p1, vs1) (p2, vs2) =
let getVars = Set.map fst . freeTermVars sig
in case (p1, p2) of
([], []) -> do
let i = Set.intersection vs1 vs2
unless (Set.null i)
$ appendDiags [mkDiag Warning "possibly conflicting variables" i]
return ((Map.empty, []), (Map.empty, []))
(hd1 : tl1, hd2 : tl2) ->
let r = getSubst sig (tl1, vs1) (tl2, vs2)
mkS1 v1 s1 = do
let (hd3, fs) = mkSTerm sig s1 hd2
((m1, fs1), m2) <- getSubst sig
(tl1, Set.delete v1 vs1) (tl2, vs2)
return ((Map.insert v1 hd3 m1, fs ++ fs1), m2)
mkS2 v2 s2 = do
let (hd3, fs) = mkSTerm sig s2 hd1
(m1, (m2, fs2)) <- getSubst sig (tl1, vs1)
(tl2, Set.delete v2 vs2)
return (m1, (Map.insert v2 hd3 m2, fs ++ fs2))
cond v vs hd = Set.member v vs && Set.notMember v (getVars hd)
diag v = appendDiags [mkDiag Warning
"unsupported occurrence of variable" v] >> r
in case (unsortedTerm hd1, unsortedTerm hd2) of
(Qual_var v1 s1 _, Qual_var v2 s2 _)
| v1 == v2 && s1 == s2 -> getSubst sig (tl1, Set.delete v1 vs1)
(tl2, Set.delete v2 vs2)
| Set.member v1 vs2 ->
if Set.member v2 vs1
then appendDiags [mkDiag Warning
("unsupported mix of variables '"
++ show v1 ++ "' and") v2] >> r
else mkS1 v1 s1
| otherwise -> mkS2 v2 s2
(Qual_var v1 s1 _, _) ->
if cond v1 vs2 hd2 then diag v1
else mkS1 v1 s1
(_, Qual_var v2 s2 _) ->
if cond v2 vs1 hd1 then diag v2
else mkS2 v2 s2
(_, _) | sameOpsApp sig hd1 hd2 ->
getSubst sig (arguOfTerm hd1 ++ tl1, vs1)
(arguOfTerm hd2 ++ tl2, vs2)
_ -> mkError "no overlap at" hd1
_ -> error "non-matching leading terms"
-- | extract defined subsorts
isSubsortDef :: FORMULA f -> Maybe (SORT, VAR_DECL, FORMULA f)
isSubsortDef f = case f of
Quantification Universal [vd@(Var_decl [v] super _)]
(Relation (Membership (Qual_var v2 super2 _) sub _) Equivalence f1 _) _
| (v, super) == (v2, super2) -> Just (sub, vd, f1)
_ -> Nothing
-- | create the obligations for subsorts
infoSubsorts :: Set.Set SORT -> [(SORT, VAR_DECL, FORMULA f)] -> [FORMULA f]
infoSubsorts emptySorts = map (\ (_, v, f) -> mkExist [v] f)
. filter (\ (s, _, _) -> not $ Set.member s emptySorts)
-- | extract the leading symbol from a formula
leadingSym :: GetRange f => FORMULA f -> Maybe (Either OP_SYMB PRED_SYMB)
leadingSym = fmap extractLeadingSymb . leadingTermPredication
-- | extract the leading symbol with the range from a formula
leadingSymPos :: GetRange f => FORMULA f
-> (Maybe (Either (TERM f) (FORMULA f)), Range)
leadingSymPos f = leading (f, False, False, False) where
-- three booleans to indicate inside implication, equivalence or negation
leadTerm t q = case unsortedTerm t of
a@(Application _ _ p) -> (Just (Left a), p)
_ -> (Nothing, q)
leading (f1, b1, b2, b3) = case (stripAllQuant f1, b1, b2, b3) of
(Negation f' _, _, _, False) ->
leading (f', b1, b2, True)
(Relation _ c f' _, _, False, False)
| c /= Equivalence ->
leading (f', True, False, False)
(Relation f' Equivalence _ _, _, False, False) ->
leading (f', b1, True, False)
(Definedness t q, _, _, _) -> leadTerm t q
(pr@(Predication _ _ p), _, _, _) ->
(Just (Right pr), p)
(Equation t _ _ q, _, False, False) -> leadTerm t q
_ -> (Nothing, getRange f1)
-- | extract the leading term or predication from a formula
leadingTermPredication :: GetRange f => FORMULA f
-> Maybe (Either (TERM f) (FORMULA f))
leadingTermPredication = fst . leadingSymPos
-- | extract the leading symbol from a term or a formula
extractLeadingSymb :: Either (TERM f) (FORMULA f) -> Either OP_SYMB PRED_SYMB
extractLeadingSymb lead = case lead of
Left (Application os _ _) -> Left os
Right (Predication p _ _) -> Right p
_ -> error "CASL.CCC.TermFormula<extractLeadingSymb>"
-- | replaces variables by terms in a term or formula
substRec :: Eq f => [(TERM f, TERM f)] -> Record f (FORMULA f) (TERM f)
substRec subs = (mapRecord id)
{ foldQual_var = \ t _ _ _ -> subst subs t } where
subst l tt = case l of
[] -> tt
(n, v) : r -> if tt == v then n else subst r tt
substitute :: Eq f => [(TERM f, TERM f)] -> TERM f -> TERM f
substitute = foldTerm . substRec
substiF :: Eq f => [(TERM f, TERM f)] -> FORMULA f -> FORMULA f
substiF = foldFormula . substRec
sameOpTypes :: Sign f e -> OP_TYPE -> OP_TYPE -> Bool
sameOpTypes sig = on (leqF sig) toOpType
sameOpSymbs :: Sign f e -> OP_SYMB -> OP_SYMB -> Bool
sameOpSymbs sig o1 o2 = on (==) opSymbName o1 o2 && case (o1, o2) of
(Qual_op_name _ t1 _, Qual_op_name _ t2 _) -> sameOpTypes sig t1 t2
_ -> False
-- | check whether two terms are the terms of same application symbol
sameOpsApp :: Sign f e -> TERM f -> TERM f -> Bool
sameOpsApp sig app1 app2 = case (unsortedTerm app1, unsortedTerm app2) of
(Application o1 _ _, Application o2 _ _) -> sameOpSymbs sig o1 o2
_ -> False
eqPattern :: Sign f e -> TERM f -> TERM f -> Bool
eqPattern sig t1 t2 = case (unsortedTerm t1, unsortedTerm t2) of
(Qual_var v1 _ _, Qual_var v2 _ _) -> v1 == v2
_ | sameOpsApp sig t1 t2 ->
and $ on (zipWith $ eqPattern sig) arguOfTerm t1 t2
_ -> False
| spechub/Hets | CASL/CCC/TermFormula.hs | gpl-2.0 | 11,837 | 0 | 24 | 3,141 | 4,599 | 2,324 | 2,275 | 252 | 10 |
module PMC10 where
main :: Int
main = length (filter even [1..1000])
| roberth/uu-helium | test/correct/PMC10.hs | gpl-3.0 | 70 | 0 | 8 | 13 | 30 | 17 | 13 | 3 | 1 |
{-# LANGUAGE CPP #-}
-- |
-- Module: Data.Aeson
-- Copyright: (c) 2011, 2012 Bryan O'Sullivan
-- (c) 2011 MailRank, Inc.
-- License: Apache
-- Maintainer: Bryan O'Sullivan <bos@serpentine.com>
-- Stability: experimental
-- Portability: portable
--
-- Types and functions for working efficiently with JSON data.
--
-- (A note on naming: in Greek mythology, Aeson was the father of Jason.)
module Data.Aeson
(
-- * How to use this library
-- $use
-- ** Working with the AST
-- $ast
-- ** Decoding to a Haskell value
-- $haskell
-- ** Decoding a mixed-type object
-- $mixed
-- ** Automatically decoding data types
-- $typeable
-- ** Pitfalls
-- $pitfalls
-- * Encoding and decoding
-- $encoding_and_decoding
decode
, decode'
, eitherDecode
, eitherDecode'
, encode
-- ** Variants for strict bytestrings
, decodeStrict
, decodeStrict'
, eitherDecodeStrict
, eitherDecodeStrict'
-- * Core JSON types
, Value(..)
, Array
, Object
-- * Convenience types
, DotNetTime(..)
-- * Type conversion
, FromJSON(..)
, Result(..)
, fromJSON
, ToJSON(..)
#ifdef GENERICS
-- ** Generic JSON classes
, GFromJSON(..)
, GToJSON(..)
, genericToJSON
, genericParseJSON
#endif
-- * Inspecting @'Value's@
, withObject
, withText
, withArray
, withNumber
, withBool
-- * Constructors and accessors
, (.=)
, (.:)
, (.:?)
, (.!=)
, object
-- * Parsing
, json
, json'
) where
import Data.Aeson.Encode (encode)
import Data.Aeson.Parser.Internal (decodeWith, decodeStrictWith,
eitherDecodeWith, eitherDecodeStrictWith,
jsonEOF, json, jsonEOF', json')
import Data.Aeson.Types
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
-- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace. This restriction is necessary to ensure
-- that if data is being lazily read from a file handle, the file
-- handle will be closed in a timely fashion once the document has
-- been parsed.
--
-- This function parses immediately, but defers conversion. See
-- 'json' for details.
decode :: (FromJSON a) => L.ByteString -> Maybe a
decode = decodeWith jsonEOF fromJSON
{-# INLINE decode #-}
-- | Efficiently deserialize a JSON value from a strict 'B.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses immediately, but defers conversion. See
-- 'json' for details.
decodeStrict :: (FromJSON a) => B.ByteString -> Maybe a
decodeStrict = decodeStrictWith jsonEOF fromJSON
{-# INLINE decodeStrict #-}
-- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace. This restriction is necessary to ensure
-- that if data is being lazily read from a file handle, the file
-- handle will be closed in a timely fashion once the document has
-- been parsed.
--
-- This function parses and performs conversion immediately. See
-- 'json'' for details.
decode' :: (FromJSON a) => L.ByteString -> Maybe a
decode' = decodeWith jsonEOF' fromJSON
{-# INLINE decode' #-}
-- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses and performs conversion immediately. See
-- 'json'' for details.
decodeStrict' :: (FromJSON a) => B.ByteString -> Maybe a
decodeStrict' = decodeStrictWith jsonEOF' fromJSON
{-# INLINE decodeStrict' #-}
-- | Like 'decode' but returns an error message when decoding fails.
eitherDecode :: (FromJSON a) => L.ByteString -> Either String a
eitherDecode = eitherDecodeWith jsonEOF fromJSON
{-# INLINE eitherDecode #-}
-- | Like 'decodeStrict' but returns an error message when decoding fails.
eitherDecodeStrict :: (FromJSON a) => B.ByteString -> Either String a
eitherDecodeStrict = eitherDecodeStrictWith jsonEOF fromJSON
{-# INLINE eitherDecodeStrict #-}
-- | Like 'decode'' but returns an error message when decoding fails.
eitherDecode' :: (FromJSON a) => L.ByteString -> Either String a
eitherDecode' = eitherDecodeWith jsonEOF' fromJSON
{-# INLINE eitherDecode' #-}
-- | Like 'decodeStrict'' but returns an error message when decoding fails.
eitherDecodeStrict' :: (FromJSON a) => B.ByteString -> Either String a
eitherDecodeStrict' = eitherDecodeStrictWith jsonEOF' fromJSON
{-# INLINE eitherDecodeStrict' #-}
-- $use
--
-- This section contains basic information on the different ways to
-- decode data using this library. These range from simple but
-- inflexible, to complex but flexible.
--
-- The most common way to use the library is to define a data type,
-- corresponding to some JSON data you want to work with, and then
-- write either a 'FromJSON' instance, a to 'ToJSON' instance, or both
-- for that type. For example, given this JSON data:
--
-- > { "name": "Joe", "age": 12 }
--
-- we create a matching data type:
--
-- > data Person = Person
-- > { name :: Text
-- > , age :: Int
-- > } deriving Show
--
-- To decode data, we need to define a 'FromJSON' instance:
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- >
-- > instance FromJSON Person where
-- > parseJSON (Object v) = Person <$>
-- > v .: "name" <*>
-- > v .: "age"
-- > -- A non-Object value is of the wrong type, so fail.
-- > parseJSON _ = mzero
--
-- We can now parse the JSON data like so:
--
-- > >>> decode "{\"name\":\"Joe\",\"age\":12}" :: Maybe Person
-- > Just (Person {name = "Joe", age = 12})
--
-- To encode data, we need to define a 'ToJSON' instance:
--
-- > instance ToJSON Person where
-- > toJSON (Person name age) = object ["name" .= name, "age" .= age]
--
-- We can now encode a value like so:
--
-- > >>> encode (Person {name = "Joe", age = 12})
-- > "{\"name\":\"Joe\",\"age\":12}"
--
-- There are predefined 'FromJSON' and 'ToJSON' instances for many
-- types. Here's an example using lists and 'Int's:
--
-- > >>> decode "[1,2,3]" :: Maybe [Int]
-- > Just [1,2,3]
--
-- And here's an example using the 'Data.Map.Map' type to get a map of
-- 'Int's.
--
-- > >>> decode "{\"foo\":1,\"bar\":2}" :: Maybe (Map String Int)
-- > Just (fromList [("bar",2),("foo",1)])
-- While the notes below focus on decoding, you can apply almost the
-- same techniques to /encoding/ data. (The main difference is that
-- encoding always succeeds, but decoding has to handle the
-- possibility of failure, where an input doesn't match our
-- expectations.)
--
-- See the documentation of 'FromJSON' and 'ToJSON' for some examples
-- of how you can automatically derive instances in some
-- circumstances.
-- $ast
--
-- Sometimes you want to work with JSON data directly, without first
-- converting it to a custom data type. This can be useful if you want
-- to e.g. convert JSON data to YAML data, without knowing what the
-- contents of the original JSON data was. The 'Value' type, which is
-- an instance of 'FromJSON', is used to represent an arbitrary JSON
-- AST (abstract syntax tree). Example usage:
--
-- > >>> decode "{\"foo\": 123}" :: Maybe Value
-- > Just (Object (fromList [("foo",Number 123)]))
--
-- > >>> decode "{\"foo\": [\"abc\",\"def\"]}" :: Maybe Value
-- > Just (Object (fromList [("foo",Array (fromList [String "abc",String "def"]))]))
--
-- Once you have a 'Value' you can write functions to traverse it and
-- make arbitrary transformations.
-- $haskell
--
-- Any instance of 'FromJSON' can be specified (but see the
-- \"Pitfalls\" section here—"Data.Aeson#pitfalls"):
--
-- > λ> decode "[1,2,3]" :: Maybe [Int]
-- > Just [1,2,3]
--
-- Alternatively, there are instances for standard data types, so you
-- can use them directly. For example, use the 'Data.Map.Map' type to
-- get a map of 'Int's.
--
-- > λ> :m + Data.Map
-- > λ> decode "{\"foo\":1,\"bar\":2}" :: Maybe (Map String Int)
-- > Just (fromList [("bar",2),("foo",1)])
-- $mixed
--
-- The above approach with maps of course will not work for mixed-type
-- objects that don't follow a strict schema, but there are a couple
-- of approaches available for these.
--
-- The 'Object' type contains JSON objects:
--
-- > λ> decode "{\"name\":\"Dave\",\"age\":2}" :: Maybe Object
-- > Just (fromList) [("name",String "Dave"),("age",Number 2)]
--
-- You can extract values from it with a parser using 'parse',
-- 'parseEither' or, in this example, 'parseMaybe':
--
-- > λ> do result <- decode "{\"name\":\"Dave\",\"age\":2}"
-- > flip parseMaybe result $ \obj -> do
-- > age <- obj .: "age"
-- > name <- obj .: "name"
-- > return (name ++ ": " ++ show (age*2))
-- >
-- > Just "Dave: 4"
--
-- Considering that any type that implements 'FromJSON' can be used
-- here, this is quite a powerful way to parse JSON. See the
-- documentation in 'FromJSON' for how to implement this class for
-- your own data types.
--
-- The downside is that you have to write the parser yourself; the
-- upside is that you have complete control over the way the JSON is
-- parsed.
-- $typeable
--
-- If you don't want fine control and would prefer the JSON be parsed
-- to your own data types automatically according to some reasonably
-- sensible isomorphic implementation, you can use the generic parser
-- based on 'Data.Typeable.Typeable' and 'Data.Data.Data'. Switch to
-- the 'Data.Aeson.Generic' module, and you can do the following:
--
-- > λ> decode "[1]" :: Maybe [Int]
-- > Just [1]
-- > λ> :m + Data.Typeable Data.Data
-- > λ> :set -XDeriveDataTypeable
-- > λ> data Person = Person { personName :: String, personAge :: Int } deriving (Data,Typeable,Show)
-- > λ> encode Person { personName = "Chris", personAge = 123 }
-- > "{\"personAge\":123,\"personName\":\"Chris\"}"
-- > λ> decode "{\"personAge\":123,\"personName\":\"Chris\"}" :: Maybe Person
-- > Just (Person {
-- > personName = "Chris", personAge = 123
-- > })
--
-- Be aware that the encoding may not always be what you'd naively
-- expect:
--
-- > λ> data Foo = Foo Int Int deriving (Data,Typeable,Show)
-- > λ> encode (Foo 1 2)
-- > "[1,2]"
--
-- With this approach, it's best to treat the
-- 'Data.Aeson.Generic.decode' and 'Data.Aeson.Generic.encode'
-- functions as an isomorphism, and not to rely upon (or care about)
-- the specific intermediate representation.
-- $pitfalls
-- #pitfalls#
--
-- Note that the JSON standard requires that the top-level value be
-- either an array or an object. If you try to use 'decode' with a
-- result type that is /not/ represented in JSON as an array or
-- object, your code will typecheck, but it will always \"fail\" at
-- runtime:
--
-- > >>> decode "1" :: Maybe Int
-- > Nothing
-- > >>> decode "1" :: Maybe String
-- > Nothing
--
-- So stick to objects (e.g. maps in Haskell) or arrays (lists or
-- vectors in Haskell):
--
-- > >>> decode "[1,2,3]" :: Maybe [Int]
-- > Just [1,2,3]
--
-- When encoding to JSON you can encode anything that's an instance of
-- 'ToJSON', and this may include simple types. So beware that this
-- aspect of the API is not isomorphic. You can round-trip arrays and
-- maps, but not simple values:
--
-- > >>> encode [1,2,3]
-- > "[1,2,3]"
-- > >>> decode (encode [1]) :: Maybe [Int]
-- > Just [1]
-- > >>> encode 1
-- > "1"
-- > >>> decode (encode (1 :: Int)) :: Maybe Int
-- > Nothing
--
-- Alternatively, see 'Data.Aeson.Parser.value' to parse non-top-level
-- JSON values.
-- $encoding_and_decoding
--
-- Encoding and decoding are each two-step processes.
--
-- * To encode a value, it is first converted to an abstract syntax
-- tree (AST), using 'ToJSON'. This generic representation is then
-- encoded as bytes.
--
-- * When decoding a value, the process is reversed: the bytes are
-- converted to an AST, then the 'FromJSON' class is used to convert
-- to the desired type.
--
-- For convenience, the 'encode' and 'decode' functions combine both
-- steps.
| moonKimura/aeson-0.6.2.1 | Data/Aeson.hs | bsd-3-clause | 12,627 | 0 | 7 | 2,566 | 782 | 586 | 196 | 63 | 1 |
{-# LANGUAGE ScopedTypeVariables, CPP, BangPatterns, RankNTypes #-}
#if __GLASGOW_HASKELL__ == 700
-- This is needed as a workaround for an old bug in GHC 7.0.1 (Trac #4498)
{-# LANGUAGE MonoPatBinds #-}
#endif
#if __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Unsafe #-}
#endif
{-# OPTIONS_HADDOCK not-home #-}
-- | Copyright : (c) 2010 - 2011 Simon Meier
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Stability : unstable, private
-- Portability : GHC
--
-- *Warning:* this module is internal. If you find that you need it then please
-- contact the maintainers and explain what you are trying to do and discuss
-- what you would need in the public API. It is important that you do this as
-- the module may not be exposed at all in future releases.
--
-- Core types and functions for the 'Builder' monoid and its generalization,
-- the 'Put' monad.
--
-- The design of the 'Builder' monoid is optimized such that
--
-- 1. buffers of arbitrary size can be filled as efficiently as possible and
--
-- 2. sequencing of 'Builder's is as cheap as possible.
--
-- We achieve (1) by completely handing over control over writing to the buffer
-- to the 'BuildStep' implementing the 'Builder'. This 'BuildStep' is just told
-- the start and the end of the buffer (represented as a 'BufferRange'). Then,
-- the 'BuildStep' can write to as big a prefix of this 'BufferRange' in any
-- way it desires. If the 'BuildStep' is done, the 'BufferRange' is full, or a
-- long sequence of bytes should be inserted directly, then the 'BuildStep'
-- signals this to its caller using a 'BuildSignal'.
--
-- We achieve (2) by requiring that every 'Builder' is implemented by a
-- 'BuildStep' that takes a continuation 'BuildStep', which it calls with the
-- updated 'BufferRange' after it is done. Therefore, only two pointers have
-- to be passed in a function call to implement concatenation of 'Builder's.
-- Moreover, many 'Builder's are completely inlined, which enables the compiler
-- to sequence them without a function call and with no boxing at all.
--
-- This design gives the implementation of a 'Builder' full access to the 'IO'
-- monad. Therefore, utmost care has to be taken to not overwrite anything
-- outside the given 'BufferRange's. Moreover, further care has to be taken to
-- ensure that 'Builder's and 'Put's are referentially transparent. See the
-- comments of the 'builder' and 'put' functions for further information.
-- Note that there are /no safety belts/ at all, when implementing a 'Builder'
-- using an 'IO' action: you are writing code that might enable the next
-- buffer-overflow attack on a Haskell server!
--
module Data.ByteString.Builder.Internal (
-- * Buffer management
Buffer(..)
, BufferRange(..)
, newBuffer
, bufferSize
, byteStringFromBuffer
, ChunkIOStream(..)
, buildStepToCIOS
, ciosUnitToLazyByteString
, ciosToLazyByteString
-- * Build signals and steps
, BuildSignal
, BuildStep
, finalBuildStep
, done
, bufferFull
, insertChunk
, fillWithBuildStep
-- * The Builder monoid
, Builder
, builder
, runBuilder
, runBuilderWith
-- ** Primitive combinators
, empty
, append
, flush
, ensureFree
-- , sizedChunksInsert
, byteStringCopy
, byteStringInsert
, byteStringThreshold
, lazyByteStringCopy
, lazyByteStringInsert
, lazyByteStringThreshold
, shortByteString
, maximalCopySize
, byteString
, lazyByteString
-- ** Execution
, toLazyByteStringWith
, AllocationStrategy
, safeStrategy
, untrimmedStrategy
, customStrategy
, L.smallChunkSize
, L.defaultChunkSize
, L.chunkOverhead
-- * The Put monad
, Put
, put
, runPut
-- ** Execution
, putToLazyByteString
, putToLazyByteStringWith
, hPut
-- ** Conversion to and from Builders
, putBuilder
, fromPut
-- -- ** Lifting IO actions
-- , putLiftIO
) where
import Control.Arrow (second)
#if !(MIN_VERSION_base(4,11,0)) && MIN_VERSION_base(4,9,0)
import Data.Semigroup (Semigroup((<>)))
#endif
#if !(MIN_VERSION_base(4,8,0))
import Data.Monoid
import Control.Applicative (Applicative(..),(<$>))
#endif
import qualified Data.ByteString as S
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString.Lazy.Internal as L
import qualified Data.ByteString.Short.Internal as Sh
#if __GLASGOW_HASKELL__ >= 611
import qualified GHC.IO.Buffer as IO (Buffer(..), newByteBuffer)
import GHC.IO.Handle.Internals (wantWritableHandle, flushWriteBuffer)
import GHC.IO.Handle.Types (Handle__, haByteBuffer, haBufferMode)
import System.IO (hFlush, BufferMode(..))
import Data.IORef
#else
import qualified Data.ByteString.Lazy as L
#endif
import System.IO (Handle)
#if MIN_VERSION_base(4,4,0)
#if MIN_VERSION_base(4,7,0)
import Foreign
#else
import Foreign hiding (unsafeForeignPtrToPtr)
#endif
import Foreign.ForeignPtr.Unsafe (unsafeForeignPtrToPtr)
import System.IO.Unsafe (unsafeDupablePerformIO)
#else
import Foreign
import GHC.IO (unsafeDupablePerformIO)
#endif
------------------------------------------------------------------------------
-- Buffers
------------------------------------------------------------------------------
-- | A range of bytes in a buffer represented by the pointer to the first byte
-- of the range and the pointer to the first byte /after/ the range.
data BufferRange = BufferRange {-# UNPACK #-} !(Ptr Word8) -- First byte of range
{-# UNPACK #-} !(Ptr Word8) -- First byte /after/ range
-- | A 'Buffer' together with the 'BufferRange' of free bytes. The filled
-- space starts at offset 0 and ends at the first free byte.
data Buffer = Buffer {-# UNPACK #-} !(ForeignPtr Word8)
{-# UNPACK #-} !BufferRange
-- | Combined size of the filled and free space in the buffer.
{-# INLINE bufferSize #-}
bufferSize :: Buffer -> Int
bufferSize (Buffer fpbuf (BufferRange _ ope)) =
ope `minusPtr` unsafeForeignPtrToPtr fpbuf
-- | Allocate a new buffer of the given size.
{-# INLINE newBuffer #-}
newBuffer :: Int -> IO Buffer
newBuffer size = do
fpbuf <- S.mallocByteString size
let pbuf = unsafeForeignPtrToPtr fpbuf
return $! Buffer fpbuf (BufferRange pbuf (pbuf `plusPtr` size))
-- | Convert the filled part of a 'Buffer' to a strict 'S.ByteString'.
{-# INLINE byteStringFromBuffer #-}
byteStringFromBuffer :: Buffer -> S.ByteString
byteStringFromBuffer (Buffer fpbuf (BufferRange op _)) =
S.PS fpbuf 0 (op `minusPtr` unsafeForeignPtrToPtr fpbuf)
-- | Prepend the filled part of a 'Buffer' to a lazy 'L.ByteString'
-- trimming it if necessary.
{-# INLINE trimmedChunkFromBuffer #-}
trimmedChunkFromBuffer :: AllocationStrategy -> Buffer
-> L.ByteString -> L.ByteString
trimmedChunkFromBuffer (AllocationStrategy _ _ trim) buf k
| S.null bs = k
| trim (S.length bs) (bufferSize buf) = L.Chunk (S.copy bs) k
| otherwise = L.Chunk bs k
where
bs = byteStringFromBuffer buf
------------------------------------------------------------------------------
-- Chunked IO Stream
------------------------------------------------------------------------------
-- | A stream of chunks that are constructed in the 'IO' monad.
--
-- This datatype serves as the common interface for the buffer-by-buffer
-- execution of a 'BuildStep' by 'buildStepToCIOS'. Typical users of this
-- interface are 'ciosToLazyByteString' or iteratee-style libraries like
-- @enumerator@.
data ChunkIOStream a =
Finished Buffer a
-- ^ The partially filled last buffer together with the result.
| Yield1 S.ByteString (IO (ChunkIOStream a))
-- ^ Yield a /non-empty/ strict 'S.ByteString'.
-- | A smart constructor for yielding one chunk that ignores the chunk if
-- it is empty.
{-# INLINE yield1 #-}
yield1 :: S.ByteString -> IO (ChunkIOStream a) -> IO (ChunkIOStream a)
yield1 bs cios | S.null bs = cios
| otherwise = return $ Yield1 bs cios
-- | Convert a @'ChunkIOStream' ()@ to a lazy 'L.ByteString' using
-- 'unsafeDupablePerformIO'.
{-# INLINE ciosUnitToLazyByteString #-}
ciosUnitToLazyByteString :: AllocationStrategy
-> L.ByteString -> ChunkIOStream () -> L.ByteString
ciosUnitToLazyByteString strategy k = go
where
go (Finished buf _) = trimmedChunkFromBuffer strategy buf k
go (Yield1 bs io) = L.Chunk bs $ unsafeDupablePerformIO (go <$> io)
-- | Convert a 'ChunkIOStream' to a lazy tuple of the result and the written
-- 'L.ByteString' using 'unsafeDupablePerformIO'.
{-# INLINE ciosToLazyByteString #-}
ciosToLazyByteString :: AllocationStrategy
-> (a -> (b, L.ByteString))
-> ChunkIOStream a
-> (b, L.ByteString)
ciosToLazyByteString strategy k =
go
where
go (Finished buf x) =
second (trimmedChunkFromBuffer strategy buf) $ k x
go (Yield1 bs io) = second (L.Chunk bs) $ unsafeDupablePerformIO (go <$> io)
------------------------------------------------------------------------------
-- Build signals
------------------------------------------------------------------------------
-- | 'BuildStep's may be called *multiple times* and they must not rise an
-- async. exception.
type BuildStep a = BufferRange -> IO (BuildSignal a)
-- | 'BuildSignal's abstract signals to the caller of a 'BuildStep'. There are
-- three signals: 'done', 'bufferFull', or 'insertChunks signals
data BuildSignal a =
Done {-# UNPACK #-} !(Ptr Word8) a
| BufferFull
{-# UNPACK #-} !Int
{-# UNPACK #-} !(Ptr Word8)
(BuildStep a)
| InsertChunk
{-# UNPACK #-} !(Ptr Word8)
S.ByteString
(BuildStep a)
-- | Signal that the current 'BuildStep' is done and has computed a value.
{-# INLINE done #-}
done :: Ptr Word8 -- ^ Next free byte in current 'BufferRange'
-> a -- ^ Computed value
-> BuildSignal a
done = Done
-- | Signal that the current buffer is full.
{-# INLINE bufferFull #-}
bufferFull :: Int
-- ^ Minimal size of next 'BufferRange'.
-> Ptr Word8
-- ^ Next free byte in current 'BufferRange'.
-> BuildStep a
-- ^ 'BuildStep' to run on the next 'BufferRange'. This 'BuildStep'
-- may assume that it is called with a 'BufferRange' of at least the
-- required minimal size; i.e., the caller of this 'BuildStep' must
-- guarantee this.
-> BuildSignal a
bufferFull = BufferFull
-- | Signal that a 'S.ByteString' chunk should be inserted directly.
{-# INLINE insertChunk #-}
insertChunk :: Ptr Word8
-- ^ Next free byte in current 'BufferRange'
-> S.ByteString
-- ^ Chunk to insert.
-> BuildStep a
-- ^ 'BuildStep' to run on next 'BufferRange'
-> BuildSignal a
insertChunk op bs = InsertChunk op bs
-- | Fill a 'BufferRange' using a 'BuildStep'.
{-# INLINE fillWithBuildStep #-}
fillWithBuildStep
:: BuildStep a
-- ^ Build step to use for filling the 'BufferRange'.
-> (Ptr Word8 -> a -> IO b)
-- ^ Handling the 'done' signal
-> (Ptr Word8 -> Int -> BuildStep a -> IO b)
-- ^ Handling the 'bufferFull' signal
-> (Ptr Word8 -> S.ByteString -> BuildStep a -> IO b)
-- ^ Handling the 'insertChunk' signal
-> BufferRange
-- ^ Buffer range to fill.
-> IO b
-- ^ Value computed while filling this 'BufferRange'.
fillWithBuildStep step fDone fFull fChunk !br = do
signal <- step br
case signal of
Done op x -> fDone op x
BufferFull minSize op nextStep -> fFull op minSize nextStep
InsertChunk op bs nextStep -> fChunk op bs nextStep
------------------------------------------------------------------------------
-- The 'Builder' monoid
------------------------------------------------------------------------------
-- | 'Builder's denote sequences of bytes.
-- They are 'Monoid's where
-- 'mempty' is the zero-length sequence and
-- 'mappend' is concatenation, which runs in /O(1)/.
newtype Builder = Builder (forall r. BuildStep r -> BuildStep r)
-- | Construct a 'Builder'. In contrast to 'BuildStep's, 'Builder's are
-- referentially transparent.
{-# INLINE builder #-}
builder :: (forall r. BuildStep r -> BuildStep r)
-- ^ A function that fills a 'BufferRange', calls the continuation with
-- the updated 'BufferRange' once its done, and signals its caller how
-- to proceed using 'done', 'bufferFull', or 'insertChunk'.
--
-- This function must be referentially transparent; i.e., calling it
-- multiple times with equally sized 'BufferRange's must result in the
-- same sequence of bytes being written. If you need mutable state,
-- then you must allocate it anew upon each call of this function.
-- Moroever, this function must call the continuation once its done.
-- Otherwise, concatenation of 'Builder's does not work. Finally, this
-- function must write to all bytes that it claims it has written.
-- Otherwise, the resulting 'Builder' is not guaranteed to be
-- referentially transparent and sensitive data might leak.
-> Builder
builder = Builder
-- | The final build step that returns the 'done' signal.
finalBuildStep :: BuildStep ()
finalBuildStep !(BufferRange op _) = return $ Done op ()
-- | Run a 'Builder' with the 'finalBuildStep'.
{-# INLINE runBuilder #-}
runBuilder :: Builder -- ^ 'Builder' to run
-> BuildStep () -- ^ 'BuildStep' that writes the byte stream of this
-- 'Builder' and signals 'done' upon completion.
runBuilder b = runBuilderWith b finalBuildStep
-- | Run a 'Builder'.
{-# INLINE runBuilderWith #-}
runBuilderWith :: Builder -- ^ 'Builder' to run
-> BuildStep a -- ^ Continuation 'BuildStep'
-> BuildStep a
runBuilderWith (Builder b) = b
-- | The 'Builder' denoting a zero-length sequence of bytes. This function is
-- only exported for use in rewriting rules. Use 'mempty' otherwise.
{-# INLINE[1] empty #-}
empty :: Builder
empty = Builder (\cont -> (\range -> cont range))
-- This eta expansion (hopefully) allows GHC to worker-wrapper the
-- 'BufferRange' in the 'empty' base case of loops (since
-- worker-wrapper requires (TODO: verify this) that all paths match
-- against the wrapped argument.
-- | Concatenate two 'Builder's. This function is only exported for use in rewriting
-- rules. Use 'mappend' otherwise.
{-# INLINE[1] append #-}
append :: Builder -> Builder -> Builder
append (Builder b1) (Builder b2) = Builder $ b1 . b2
#if MIN_VERSION_base(4,9,0)
instance Semigroup Builder where
{-# INLINE (<>) #-}
(<>) = append
#endif
instance Monoid Builder where
{-# INLINE mempty #-}
mempty = empty
{-# INLINE mappend #-}
#if MIN_VERSION_base(4,9,0)
mappend = (<>)
#else
mappend = append
#endif
{-# INLINE mconcat #-}
mconcat = foldr mappend mempty
-- | Flush the current buffer. This introduces a chunk boundary.
{-# INLINE flush #-}
flush :: Builder
flush = builder step
where
step k !(BufferRange op _) = return $ insertChunk op S.empty k
------------------------------------------------------------------------------
-- Put
------------------------------------------------------------------------------
-- | A 'Put' action denotes a computation of a value that writes a stream of
-- bytes as a side-effect. 'Put's are strict in their side-effect; i.e., the
-- stream of bytes will always be written before the computed value is
-- returned.
--
-- 'Put's are a generalization of 'Builder's. The typical use case is the
-- implementation of an encoding that might fail (e.g., an interface to the
-- 'zlib' compression library or the conversion from Base64 encoded data to
-- 8-bit data). For a 'Builder', the only way to handle and report such a
-- failure is ignore it or call 'error'. In contrast, 'Put' actions are
-- expressive enough to allow reportng and handling such a failure in a pure
-- fashion.
--
-- @'Put' ()@ actions are isomorphic to 'Builder's. The functions 'putBuilder'
-- and 'fromPut' convert between these two types. Where possible, you should
-- use 'Builder's, as sequencing them is slightly cheaper than sequencing
-- 'Put's because they do not carry around a computed value.
newtype Put a = Put { unPut :: forall r. (a -> BuildStep r) -> BuildStep r }
-- | Construct a 'Put' action. In contrast to 'BuildStep's, 'Put's are
-- referentially transparent in the sense that sequencing the same 'Put'
-- multiple times yields every time the same value with the same side-effect.
{-# INLINE put #-}
put :: (forall r. (a -> BuildStep r) -> BuildStep r)
-- ^ A function that fills a 'BufferRange', calls the continuation with
-- the updated 'BufferRange' and its computed value once its done, and
-- signals its caller how to proceed using 'done', 'bufferFull', or
-- 'insertChunk' signals.
--
-- This function must be referentially transparent; i.e., calling it
-- multiple times with equally sized 'BufferRange's must result in the
-- same sequence of bytes being written and the same value being
-- computed. If you need mutable state, then you must allocate it anew
-- upon each call of this function. Moroever, this function must call
-- the continuation once its done. Otherwise, monadic sequencing of
-- 'Put's does not work. Finally, this function must write to all bytes
-- that it claims it has written. Otherwise, the resulting 'Put' is
-- not guaranteed to be referentially transparent and sensitive data
-- might leak.
-> Put a
put = Put
-- | Run a 'Put'.
{-# INLINE runPut #-}
runPut :: Put a -- ^ Put to run
-> BuildStep a -- ^ 'BuildStep' that first writes the byte stream of
-- this 'Put' and then yields the computed value using
-- the 'done' signal.
runPut (Put p) = p $ \x (BufferRange op _) -> return $ Done op x
instance Functor Put where
fmap f p = Put $ \k -> unPut p (\x -> k (f x))
{-# INLINE fmap #-}
-- | Synonym for '<*' from 'Applicative'; used in rewriting rules.
{-# INLINE[1] ap_l #-}
ap_l :: Put a -> Put b -> Put a
ap_l (Put a) (Put b) = Put $ \k -> a (\a' -> b (\_ -> k a'))
-- | Synonym for '*>' from 'Applicative' and '>>' from 'Monad'; used in
-- rewriting rules.
{-# INLINE[1] ap_r #-}
ap_r :: Put a -> Put b -> Put b
ap_r (Put a) (Put b) = Put $ \k -> a (\_ -> b k)
instance Applicative Put where
{-# INLINE pure #-}
pure x = Put $ \k -> k x
{-# INLINE (<*>) #-}
Put f <*> Put a = Put $ \k -> f (\f' -> a (\a' -> k (f' a')))
{-# INLINE (<*) #-}
(<*) = ap_l
{-# INLINE (*>) #-}
(*>) = ap_r
instance Monad Put where
{-# INLINE return #-}
return = pure
{-# INLINE (>>=) #-}
Put m >>= f = Put $ \k -> m (\m' -> unPut (f m') k)
{-# INLINE (>>) #-}
(>>) = (*>)
-- Conversion between Put and Builder
-------------------------------------
-- | Run a 'Builder' as a side-effect of a @'Put' ()@ action.
{-# INLINE[1] putBuilder #-}
putBuilder :: Builder -> Put ()
putBuilder (Builder b) = Put $ \k -> b (k ())
-- | Convert a @'Put' ()@ action to a 'Builder'.
{-# INLINE fromPut #-}
fromPut :: Put () -> Builder
fromPut (Put p) = Builder $ \k -> p (\_ -> k)
-- We rewrite consecutive uses of 'putBuilder' such that the append of the
-- involved 'Builder's is used. This can significantly improve performance,
-- when the bound-checks of the concatenated builders are fused.
-- ap_l rules
{-# RULES
"ap_l/putBuilder" forall b1 b2.
ap_l (putBuilder b1) (putBuilder b2)
= putBuilder (append b1 b2)
"ap_l/putBuilder/assoc_r" forall b1 b2 (p :: Put a).
ap_l (putBuilder b1) (ap_l (putBuilder b2) p)
= ap_l (putBuilder (append b1 b2)) p
"ap_l/putBuilder/assoc_l" forall (p :: Put a) b1 b2.
ap_l (ap_l p (putBuilder b1)) (putBuilder b2)
= ap_l p (putBuilder (append b1 b2))
#-}
-- ap_r rules
{-# RULES
"ap_r/putBuilder" forall b1 b2.
ap_r (putBuilder b1) (putBuilder b2)
= putBuilder (append b1 b2)
"ap_r/putBuilder/assoc_r" forall b1 b2 (p :: Put a).
ap_r (putBuilder b1) (ap_r (putBuilder b2) p)
= ap_r (putBuilder (append b1 b2)) p
"ap_r/putBuilder/assoc_l" forall (p :: Put a) b1 b2.
ap_r (ap_r p (putBuilder b1)) (putBuilder b2)
= ap_r p (putBuilder (append b1 b2))
#-}
-- combined ap_l/ap_r rules
{-# RULES
"ap_l/ap_r/putBuilder/assoc_r" forall b1 b2 (p :: Put a).
ap_l (putBuilder b1) (ap_r (putBuilder b2) p)
= ap_l (putBuilder (append b1 b2)) p
"ap_r/ap_l/putBuilder/assoc_r" forall b1 b2 (p :: Put a).
ap_r (putBuilder b1) (ap_l (putBuilder b2) p)
= ap_l (putBuilder (append b1 b2)) p
"ap_l/ap_r/putBuilder/assoc_l" forall (p :: Put a) b1 b2.
ap_l (ap_r p (putBuilder b1)) (putBuilder b2)
= ap_r p (putBuilder (append b1 b2))
"ap_r/ap_l/putBuilder/assoc_l" forall (p :: Put a) b1 b2.
ap_r (ap_l p (putBuilder b1)) (putBuilder b2)
= ap_r p (putBuilder (append b1 b2))
#-}
-- Lifting IO actions
---------------------
{-
-- | Lift an 'IO' action to a 'Put' action.
{-# INLINE putLiftIO #-}
putLiftIO :: IO a -> Put a
putLiftIO io = put $ \k br -> io >>= (`k` br)
-}
------------------------------------------------------------------------------
-- Executing a Put directly on a buffered Handle
------------------------------------------------------------------------------
-- | Run a 'Put' action redirecting the produced output to a 'Handle'.
--
-- The output is buffered using the 'Handle's associated buffer. If this
-- buffer is too small to execute one step of the 'Put' action, then
-- it is replaced with a large enough buffer.
hPut :: forall a. Handle -> Put a -> IO a
#if __GLASGOW_HASKELL__ >= 611
hPut h p = do
fillHandle 1 (runPut p)
where
fillHandle :: Int -> BuildStep a -> IO a
fillHandle !minFree step = do
next <- wantWritableHandle "hPut" h fillHandle_
next
where
-- | We need to return an inner IO action that is executed outside
-- the lock taken on the Handle for two reasons:
--
-- 1. GHC.IO.Handle.Internals mentions in "Note [async]" that
-- we should never do any side-effecting operations before
-- an interuptible operation that may raise an async. exception
-- as long as we are inside 'wantWritableHandle' and the like.
-- We possibly run the interuptible 'flushWriteBuffer' right at
-- the start of 'fillHandle', hence entering it a second time is
-- not safe, as it could lead to a 'BuildStep' being run twice.
--
-- FIXME (SM): Adapt this function or at least its documentation,
-- as it is OK to run a 'BuildStep' twice. We dropped this
-- requirement in favor of being able to use
-- 'unsafeDupablePerformIO' and the speed improvement that it
-- brings.
--
-- 2. We use the 'S.hPut' function to also write to the handle.
-- This function tries to take the same lock taken by
-- 'wantWritableHandle'. Therefore, we cannot call 'S.hPut'
-- inside 'wantWritableHandle'.
--
fillHandle_ :: Handle__ -> IO (IO a)
fillHandle_ h_ = do
makeSpace =<< readIORef refBuf
fillBuffer =<< readIORef refBuf
where
refBuf = haByteBuffer h_
freeSpace buf = IO.bufSize buf - IO.bufR buf
makeSpace buf
| IO.bufSize buf < minFree = do
flushWriteBuffer h_
s <- IO.bufState <$> readIORef refBuf
IO.newByteBuffer minFree s >>= writeIORef refBuf
| freeSpace buf < minFree = flushWriteBuffer h_
| otherwise =
#if __GLASGOW_HASKELL__ >= 613
return ()
#else
-- required for ghc-6.12
flushWriteBuffer h_
#endif
fillBuffer buf
| freeSpace buf < minFree =
error $ unlines
[ "Data.ByteString.Builder.Internal.hPut: internal error."
, " Not enough space after flush."
, " required: " ++ show minFree
, " free: " ++ show (freeSpace buf)
]
| otherwise = do
let !br = BufferRange op (pBuf `plusPtr` IO.bufSize buf)
res <- fillWithBuildStep step doneH fullH insertChunkH br
touchForeignPtr fpBuf
return res
where
fpBuf = IO.bufRaw buf
pBuf = unsafeForeignPtrToPtr fpBuf
op = pBuf `plusPtr` IO.bufR buf
{-# INLINE updateBufR #-}
updateBufR op' = do
let !off' = op' `minusPtr` pBuf
!buf' = buf {IO.bufR = off'}
writeIORef refBuf buf'
doneH op' x = do
updateBufR op'
-- We must flush if this Handle is set to NoBuffering.
-- If it is set to LineBuffering, be conservative and
-- flush anyway (we didn't check for newlines in the data).
-- Flushing must happen outside this 'wantWriteableHandle'
-- due to the possible async. exception.
case haBufferMode h_ of
BlockBuffering _ -> return $ return x
_line_or_no_buffering -> return $ hFlush h >> return x
fullH op' minSize nextStep = do
updateBufR op'
return $ fillHandle minSize nextStep
-- 'fillHandle' will flush the buffer (provided there is
-- really less than 'minSize' space left) before executing
-- the 'nextStep'.
insertChunkH op' bs nextStep = do
updateBufR op'
return $ do
S.hPut h bs
fillHandle 1 nextStep
#else
hPut h p =
go =<< buildStepToCIOS strategy (runPut p)
where
strategy = untrimmedStrategy L.smallChunkSize L.defaultChunkSize
go (Finished buf x) = S.hPut h (byteStringFromBuffer buf) >> return x
go (Yield1 bs io) = S.hPut h bs >> io >>= go
#endif
-- | Execute a 'Put' and return the computed result and the bytes
-- written during the computation as a lazy 'L.ByteString'.
--
-- This function is strict in the computed result and lazy in the writing of
-- the bytes. For example, given
--
-- @
--infinitePut = sequence_ (repeat (putBuilder (word8 1))) >> return 0
-- @
--
-- evaluating the expression
--
-- @
--fst $ putToLazyByteString infinitePut
-- @
--
-- does not terminate, while evaluating the expression
--
-- @
--L.head $ snd $ putToLazyByteString infinitePut
-- @
--
-- does terminate and yields the value @1 :: Word8@.
--
-- An illustrative example for these strictness properties is the
-- implementation of Base64 decoding (<http://en.wikipedia.org/wiki/Base64>).
--
-- @
--type DecodingState = ...
--
--decodeBase64 :: 'S.ByteString' -> DecodingState -> 'Put' (Maybe DecodingState)
--decodeBase64 = ...
-- @
--
-- The above function takes a strict 'S.ByteString' supposed to represent
-- Base64 encoded data and the current decoding state.
-- It writes the decoded bytes as the side-effect of the 'Put' and returns the
-- new decoding state, if the decoding of all data in the 'S.ByteString' was
-- successful. The checking if the strict 'S.ByteString' represents Base64
-- encoded data and the actual decoding are fused. This makes the common case,
-- where all data represents Base64 encoded data, more efficient. It also
-- implies that all data must be decoded before the final decoding
-- state can be returned. 'Put's are intended for implementing such fused
-- checking and decoding/encoding, which is reflected in their strictness
-- properties.
{-# NOINLINE putToLazyByteString #-}
putToLazyByteString
:: Put a -- ^ 'Put' to execute
-> (a, L.ByteString) -- ^ Result and lazy 'L.ByteString'
-- written as its side-effect
putToLazyByteString = putToLazyByteStringWith
(safeStrategy L.smallChunkSize L.defaultChunkSize) (\x -> (x, L.Empty))
-- | Execute a 'Put' with a buffer-allocation strategy and a continuation. For
-- example, 'putToLazyByteString' is implemented as follows.
--
-- @
--putToLazyByteString = 'putToLazyByteStringWith'
-- ('safeStrategy' 'L.smallChunkSize' 'L.defaultChunkSize') (\x -> (x, L.empty))
-- @
--
{-# INLINE putToLazyByteStringWith #-}
putToLazyByteStringWith
:: AllocationStrategy
-- ^ Buffer allocation strategy to use
-> (a -> (b, L.ByteString))
-- ^ Continuation to use for computing the final result and the tail of
-- its side-effect (the written bytes).
-> Put a
-- ^ 'Put' to execute
-> (b, L.ByteString)
-- ^ Resulting lazy 'L.ByteString'
putToLazyByteStringWith strategy k p =
ciosToLazyByteString strategy k $ unsafeDupablePerformIO $
buildStepToCIOS strategy (runPut p)
------------------------------------------------------------------------------
-- ByteString insertion / controlling chunk boundaries
------------------------------------------------------------------------------
-- Raw memory
-------------
-- | Ensure that there are at least 'n' free bytes for the following 'Builder'.
{-# INLINE ensureFree #-}
ensureFree :: Int -> Builder
ensureFree minFree =
builder step
where
step k br@(BufferRange op ope)
| ope `minusPtr` op < minFree = return $ bufferFull minFree op k
| otherwise = k br
-- | Copy the bytes from a 'BufferRange' into the output stream.
wrappedBytesCopyStep :: BufferRange -- ^ Input 'BufferRange'.
-> BuildStep a -> BuildStep a
wrappedBytesCopyStep !(BufferRange ip0 ipe) k =
go ip0
where
go !ip !(BufferRange op ope)
| inpRemaining <= outRemaining = do
copyBytes op ip inpRemaining
let !br' = BufferRange (op `plusPtr` inpRemaining) ope
k br'
| otherwise = do
copyBytes op ip outRemaining
let !ip' = ip `plusPtr` outRemaining
return $ bufferFull 1 ope (go ip')
where
outRemaining = ope `minusPtr` op
inpRemaining = ipe `minusPtr` ip
-- Strict ByteStrings
------------------------------------------------------------------------------
-- | Construct a 'Builder' that copies the strict 'S.ByteString's, if it is
-- smaller than the treshold, and inserts it directly otherwise.
--
-- For example, @byteStringThreshold 1024@ copies strict 'S.ByteString's whose size
-- is less or equal to 1kb, and inserts them directly otherwise. This implies
-- that the average chunk-size of the generated lazy 'L.ByteString' may be as
-- low as 513 bytes, as there could always be just a single byte between the
-- directly inserted 1025 byte, strict 'S.ByteString's.
--
{-# INLINE byteStringThreshold #-}
byteStringThreshold :: Int -> S.ByteString -> Builder
byteStringThreshold maxCopySize =
\bs -> builder $ step bs
where
step !bs@(S.PS _ _ len) !k br@(BufferRange !op _)
| len <= maxCopySize = byteStringCopyStep bs k br
| otherwise = return $ insertChunk op bs k
-- | Construct a 'Builder' that copies the strict 'S.ByteString'.
--
-- Use this function to create 'Builder's from smallish (@<= 4kb@)
-- 'S.ByteString's or if you need to guarantee that the 'S.ByteString' is not
-- shared with the chunks generated by the 'Builder'.
--
{-# INLINE byteStringCopy #-}
byteStringCopy :: S.ByteString -> Builder
byteStringCopy = \bs -> builder $ byteStringCopyStep bs
{-# INLINE byteStringCopyStep #-}
byteStringCopyStep :: S.ByteString -> BuildStep a -> BuildStep a
byteStringCopyStep (S.PS ifp ioff isize) !k0 br0@(BufferRange op ope)
-- Ensure that the common case is not recursive and therefore yields
-- better code.
| op' <= ope = do copyBytes op ip isize
touchForeignPtr ifp
k0 (BufferRange op' ope)
| otherwise = do wrappedBytesCopyStep (BufferRange ip ipe) k br0
where
op' = op `plusPtr` isize
ip = unsafeForeignPtrToPtr ifp `plusPtr` ioff
ipe = ip `plusPtr` isize
k br = do touchForeignPtr ifp -- input consumed: OK to release here
k0 br
-- | Construct a 'Builder' that always inserts the strict 'S.ByteString'
-- directly as a chunk.
--
-- This implies flushing the output buffer, even if it contains just
-- a single byte. You should therefore use 'byteStringInsert' only for large
-- (@> 8kb@) 'S.ByteString's. Otherwise, the generated chunks are too
-- fragmented to be processed efficiently afterwards.
--
{-# INLINE byteStringInsert #-}
byteStringInsert :: S.ByteString -> Builder
byteStringInsert =
\bs -> builder $ \k (BufferRange op _) -> return $ insertChunk op bs k
-- Short bytestrings
------------------------------------------------------------------------------
-- | Construct a 'Builder' that copies the 'SH.ShortByteString'.
--
{-# INLINE shortByteString #-}
shortByteString :: Sh.ShortByteString -> Builder
shortByteString = \sbs -> builder $ shortByteStringCopyStep sbs
-- | Copy the bytes from a 'SH.ShortByteString' into the output stream.
{-# INLINE shortByteStringCopyStep #-}
shortByteStringCopyStep :: Sh.ShortByteString -- ^ Input 'SH.ShortByteString'.
-> BuildStep a -> BuildStep a
shortByteStringCopyStep !sbs k =
go 0 (Sh.length sbs)
where
go !ip !ipe !(BufferRange op ope)
| inpRemaining <= outRemaining = do
Sh.copyToPtr sbs ip op inpRemaining
let !br' = BufferRange (op `plusPtr` inpRemaining) ope
k br'
| otherwise = do
Sh.copyToPtr sbs ip op outRemaining
let !ip' = ip + outRemaining
return $ bufferFull 1 ope (go ip' ipe)
where
outRemaining = ope `minusPtr` op
inpRemaining = ipe - ip
-- Lazy bytestrings
------------------------------------------------------------------------------
-- | Construct a 'Builder' that uses the thresholding strategy of 'byteStringThreshold'
-- for each chunk of the lazy 'L.ByteString'.
--
{-# INLINE lazyByteStringThreshold #-}
lazyByteStringThreshold :: Int -> L.ByteString -> Builder
lazyByteStringThreshold maxCopySize =
L.foldrChunks (\bs b -> byteStringThreshold maxCopySize bs `mappend` b) mempty
-- TODO: We could do better here. Currently, Large, Small, Large, leads to
-- an unnecessary copy of the 'Small' chunk.
-- | Construct a 'Builder' that copies the lazy 'L.ByteString'.
--
{-# INLINE lazyByteStringCopy #-}
lazyByteStringCopy :: L.ByteString -> Builder
lazyByteStringCopy =
L.foldrChunks (\bs b -> byteStringCopy bs `mappend` b) mempty
-- | Construct a 'Builder' that inserts all chunks of the lazy 'L.ByteString'
-- directly.
--
{-# INLINE lazyByteStringInsert #-}
lazyByteStringInsert :: L.ByteString -> Builder
lazyByteStringInsert =
L.foldrChunks (\bs b -> byteStringInsert bs `mappend` b) mempty
-- | Create a 'Builder' denoting the same sequence of bytes as a strict
-- 'S.ByteString'.
-- The 'Builder' inserts large 'S.ByteString's directly, but copies small ones
-- to ensure that the generated chunks are large on average.
--
{-# INLINE byteString #-}
byteString :: S.ByteString -> Builder
byteString = byteStringThreshold maximalCopySize
-- | Create a 'Builder' denoting the same sequence of bytes as a lazy
-- 'L.ByteString'.
-- The 'Builder' inserts large chunks of the lazy 'L.ByteString' directly,
-- but copies small ones to ensure that the generated chunks are large on
-- average.
--
{-# INLINE lazyByteString #-}
lazyByteString :: L.ByteString -> Builder
lazyByteString = lazyByteStringThreshold maximalCopySize
-- FIXME: also insert the small chunk for [large,small,large] directly.
-- Perhaps it makes even sense to concatenate the small chunks in
-- [large,small,small,small,large] and insert them directly afterwards to avoid
-- unnecessary buffer spilling. Hmm, but that uncontrollably increases latency
-- => no good!
-- | The maximal size of a 'S.ByteString' that is copied.
-- @2 * 'L.smallChunkSize'@ to guarantee that on average a chunk is of
-- 'L.smallChunkSize'.
maximalCopySize :: Int
maximalCopySize = 2 * L.smallChunkSize
------------------------------------------------------------------------------
-- Builder execution
------------------------------------------------------------------------------
-- | A buffer allocation strategy for executing 'Builder's.
-- The strategy
--
-- > 'AllocationStrategy' firstBufSize bufSize trim
--
-- states that the first buffer is of size @firstBufSize@, all following buffers
-- are of size @bufSize@, and a buffer of size @n@ filled with @k@ bytes should
-- be trimmed iff @trim k n@ is 'True'.
data AllocationStrategy = AllocationStrategy
(Maybe (Buffer, Int) -> IO Buffer)
{-# UNPACK #-} !Int
(Int -> Int -> Bool)
-- | Create a custom allocation strategy. See the code for 'safeStrategy' and
-- 'untrimmedStrategy' for examples.
{-# INLINE customStrategy #-}
customStrategy
:: (Maybe (Buffer, Int) -> IO Buffer)
-- ^ Buffer allocation function. If 'Nothing' is given, then a new first
-- buffer should be allocated. If @'Just' (oldBuf, minSize)@ is given,
-- then a buffer with minimal size 'minSize' must be returned. The
-- strategy may reuse the 'oldBuffer', if it can guarantee that this
-- referentially transparent and 'oldBuffer' is large enough.
-> Int
-- ^ Default buffer size.
-> (Int -> Int -> Bool)
-- ^ A predicate @trim used allocated@ returning 'True', if the buffer
-- should be trimmed before it is returned.
-> AllocationStrategy
customStrategy = AllocationStrategy
-- | Sanitize a buffer size; i.e., make it at least the size of an 'Int'.
{-# INLINE sanitize #-}
sanitize :: Int -> Int
sanitize = max (sizeOf (undefined :: Int))
-- | Use this strategy for generating lazy 'L.ByteString's whose chunks are
-- discarded right after they are generated. For example, if you just generate
-- them to write them to a network socket.
{-# INLINE untrimmedStrategy #-}
untrimmedStrategy :: Int -- ^ Size of the first buffer
-> Int -- ^ Size of successive buffers
-> AllocationStrategy
-- ^ An allocation strategy that does not trim any of the
-- filled buffers before converting it to a chunk
untrimmedStrategy firstSize bufSize =
AllocationStrategy nextBuffer (sanitize bufSize) (\_ _ -> False)
where
{-# INLINE nextBuffer #-}
nextBuffer Nothing = newBuffer $ sanitize firstSize
nextBuffer (Just (_, minSize)) = newBuffer minSize
-- | Use this strategy for generating lazy 'L.ByteString's whose chunks are
-- likely to survive one garbage collection. This strategy trims buffers
-- that are filled less than half in order to avoid spilling too much memory.
{-# INLINE safeStrategy #-}
safeStrategy :: Int -- ^ Size of first buffer
-> Int -- ^ Size of successive buffers
-> AllocationStrategy
-- ^ An allocation strategy that guarantees that at least half
-- of the allocated memory is used for live data
safeStrategy firstSize bufSize =
AllocationStrategy nextBuffer (sanitize bufSize) trim
where
trim used size = 2 * used < size
{-# INLINE nextBuffer #-}
nextBuffer Nothing = newBuffer $ sanitize firstSize
nextBuffer (Just (_, minSize)) = newBuffer minSize
-- | /Heavy inlining./ Execute a 'Builder' with custom execution parameters.
--
-- This function is inlined despite its heavy code-size to allow fusing with
-- the allocation strategy. For example, the default 'Builder' execution
-- function 'toLazyByteString' is defined as follows.
--
-- @
-- {-\# NOINLINE toLazyByteString \#-}
-- toLazyByteString =
-- toLazyByteStringWith ('safeStrategy' 'L.smallChunkSize' 'L.defaultChunkSize') L.empty
-- @
--
-- where @L.empty@ is the zero-length lazy 'L.ByteString'.
--
-- In most cases, the parameters used by 'toLazyByteString' give good
-- performance. A sub-performing case of 'toLazyByteString' is executing short
-- (<128 bytes) 'Builder's. In this case, the allocation overhead for the first
-- 4kb buffer and the trimming cost dominate the cost of executing the
-- 'Builder'. You can avoid this problem using
--
-- >toLazyByteStringWith (safeStrategy 128 smallChunkSize) L.empty
--
-- This reduces the allocation and trimming overhead, as all generated
-- 'L.ByteString's fit into the first buffer and there is no trimming
-- required, if more than 64 bytes and less than 128 bytes are written.
--
{-# INLINE toLazyByteStringWith #-}
toLazyByteStringWith
:: AllocationStrategy
-- ^ Buffer allocation strategy to use
-> L.ByteString
-- ^ Lazy 'L.ByteString' to use as the tail of the generated lazy
-- 'L.ByteString'
-> Builder
-- ^ 'Builder' to execute
-> L.ByteString
-- ^ Resulting lazy 'L.ByteString'
toLazyByteStringWith strategy k b =
ciosUnitToLazyByteString strategy k $ unsafeDupablePerformIO $
buildStepToCIOS strategy (runBuilder b)
-- | Convert a 'BuildStep' to a 'ChunkIOStream' stream by executing it on
-- 'Buffer's allocated according to the given 'AllocationStrategy'.
{-# INLINE buildStepToCIOS #-}
buildStepToCIOS
:: AllocationStrategy -- ^ Buffer allocation strategy to use
-> BuildStep a -- ^ 'BuildStep' to execute
-> IO (ChunkIOStream a)
buildStepToCIOS !(AllocationStrategy nextBuffer bufSize trim) =
\step -> nextBuffer Nothing >>= fill step
where
fill !step !buf@(Buffer fpbuf br@(BufferRange _ pe)) = do
res <- fillWithBuildStep step doneH fullH insertChunkH br
touchForeignPtr fpbuf
return res
where
pbuf = unsafeForeignPtrToPtr fpbuf
doneH op' x = return $
Finished (Buffer fpbuf (BufferRange op' pe)) x
fullH op' minSize nextStep =
wrapChunk op' $ const $
nextBuffer (Just (buf, max minSize bufSize)) >>= fill nextStep
insertChunkH op' bs nextStep =
wrapChunk op' $ \isEmpty -> yield1 bs $
-- Checking for empty case avoids allocating 'n-1' empty
-- buffers for 'n' insertChunkH right after each other.
if isEmpty
then fill nextStep buf
else do buf' <- nextBuffer (Just (buf, bufSize))
fill nextStep buf'
-- Wrap and yield a chunk, trimming it if necesary
{-# INLINE wrapChunk #-}
wrapChunk !op' mkCIOS
| chunkSize == 0 = mkCIOS True
| trim chunkSize size = do
bs <- S.create chunkSize $ \pbuf' ->
copyBytes pbuf' pbuf chunkSize
-- FIXME: We could reuse the trimmed buffer here.
return $ Yield1 bs (mkCIOS False)
| otherwise =
return $ Yield1 (S.PS fpbuf 0 chunkSize) (mkCIOS False)
where
chunkSize = op' `minusPtr` pbuf
size = pe `minusPtr` pbuf
| CloudI/CloudI | src/api/haskell/external/bytestring-0.10.10.0/Data/ByteString/Builder/Internal.hs | mit | 43,928 | 0 | 21 | 10,675 | 5,530 | 3,073 | 2,457 | 445 | 3 |
module Main where
import Test.MiniUnitTest
main :: IO ()
main = tests
| bagl/takusen-oracle | Test/Main.hs | bsd-3-clause | 80 | 0 | 6 | 21 | 24 | 14 | 10 | 4 | 1 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
{-| Binary instances for the core datatypes -}
module Idris.Core.Binary where
import Control.Applicative ((<*>), (<$>))
import Control.Monad (liftM2)
import Control.DeepSeq (($!!))
import Data.Binary
import Data.Vector.Binary
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import Idris.Core.TT
instance Binary ErrorReportPart where
put (TextPart msg) = do putWord8 0 ; put msg
put (NamePart n) = do putWord8 1 ; put n
put (TermPart t) = do putWord8 2 ; put t
put (SubReport ps) = do putWord8 3 ; put ps
put (RawPart r) = do putWord8 4 ; put r
get = do i <- getWord8
case i of
0 -> fmap TextPart get
1 -> fmap NamePart get
2 -> fmap TermPart get
3 -> fmap SubReport get
4 -> fmap RawPart get
_ -> error "Corrupted binary data for ErrorReportPart"
instance Binary Provenance where
put ExpectedType = putWord8 0
put (SourceTerm t) = do putWord8 1
put t
put InferredVal = putWord8 2
put GivenVal = putWord8 3
put (TooManyArgs t) = do putWord8 4
put t
get = do i <- getWord8
case i of
0 -> return ExpectedType
1 -> do x1 <- get
return (SourceTerm x1)
2 -> return InferredVal
3 -> return GivenVal
4 -> do x1 <- get
return (TooManyArgs x1)
_ -> error "Corrupted binary data for Provenance"
instance Binary UConstraint where
put (ULT x1 x2) = putWord8 0 >> put x1 >> put x2
put (ULE x1 x2) = putWord8 1 >> put x1 >> put x2
get = do i <- getWord8
case i of
0 -> ULT <$> get <*> get
1 -> ULE <$> get <*> get
_ -> error "Corrupted binary data for UConstraint"
instance Binary ConstraintFC where
put (ConstraintFC x1 x2) = putWord8 0 >> put x1 >> put x2
get = do i <- getWord8
case i of
0 -> liftM2 ConstraintFC get get
_ -> error "Corrupted binary data for ConstraintFC"
instance Binary a => Binary (Err' a) where
put (Msg str) = do putWord8 0
put str
put (InternalMsg str) = do putWord8 1
put str
put (CantUnify x y z e ctxt i) = do putWord8 2
put x
put y
put z
put e
put ctxt
put i
put (InfiniteUnify n t ctxt) = do putWord8 3
put n
put t
put ctxt
put (CantConvert x y ctxt) = do putWord8 4
put x
put y
put ctxt
put (CantSolveGoal x ctxt) = do putWord8 5
put x
put ctxt
put (UnifyScope n1 n2 x ctxt) = do putWord8 6
put n1
put n2
put x
put ctxt
put (CantInferType str) = do putWord8 7
put str
put (NonFunctionType t1 t2) = do putWord8 8
put t1
put t2
put (NotEquality t1 t2) = do putWord8 9
put t1
put t2
put (TooManyArguments n) = do putWord8 10
put n
put (CantIntroduce t) = do putWord8 11
put t
put (NoSuchVariable n) = do putWord8 12
put n
put (NoTypeDecl n) = do putWord8 13
put n
put (NotInjective x y z) = do putWord8 14
put x
put y
put z
put (CantResolve _ t) = do putWord8 15
put t
put (CantResolveAlts ns) = do putWord8 16
put ns
put (IncompleteTerm t) = do putWord8 17
put t
put (UniverseError x1 x2 x3 x4 x5) = do putWord8 18
put x1
put x2
put x3
put x4
put x5
put (UniqueError u n) = do putWord8 19
put u
put n
put (UniqueKindError u n) = do putWord8 20
put u
put n
put ProgramLineComment = putWord8 21
put (Inaccessible n) = do putWord8 22
put n
put (NonCollapsiblePostulate n) = do putWord8 23
put n
put (AlreadyDefined n) = do putWord8 24
put n
put (ProofSearchFail e) = do putWord8 25
put e
put (NoRewriting t) = do putWord8 26
put t
put (At fc e) = do putWord8 27
put fc
put e
put (Elaborating str n e) = do putWord8 28
put str
put n
put e
put (ElaboratingArg n1 n2 ns e) = do putWord8 29
put n1
put n2
put ns
put e
put (ProviderError str) = do putWord8 30
put str
put (LoadingFailed str e) = do putWord8 31
put str
put e
put (ReflectionError parts e) = do putWord8 32
put parts
put e
put (ReflectionFailed str e) = do putWord8 33
put str
put e
put (WithFnType t) = do putWord8 34
put t
put (CantMatch t) = do putWord8 35
put t
put (ElabScriptDebug x1 x2 x3) = do putWord8 36
put x1
put x2
put x3
put (NoEliminator s t) = do putWord8 37
put s
put t
put (InvalidTCArg n t) = do putWord8 38
put n
put t
put (ElabScriptStuck x1) = do putWord8 39
put x1
put (UnknownImplicit n f) = do putWord8 40
put n
put f
put (NoValidAlts ns) = do putWord8 41
put ns
get = do i <- getWord8
case i of
0 -> fmap Msg get
1 -> fmap InternalMsg get
2 -> do x <- get ; y <- get ; z <- get ; e <- get ; ctxt <- get ; i <- get
return $ CantUnify x y z e ctxt i
3 -> do x <- get ; y <- get ; z <- get
return $ InfiniteUnify x y z
4 -> do x <- get ; y <- get ; z <- get
return $ CantConvert x y z
5 -> do x <- get ; y <- get
return $ CantSolveGoal x y
6 -> do w <- get ; x <- get ; y <- get ; z <- get
return $ UnifyScope w x y z
7 -> fmap CantInferType get
8 -> do x <- get ; y <- get
return $ NonFunctionType x y
9 -> do x <- get ; y <- get
return $ NotEquality x y
10 -> fmap TooManyArguments get
11 -> fmap CantIntroduce get
12 -> fmap NoSuchVariable get
13 -> fmap NoTypeDecl get
14 -> do x <- get ; y <- get ; z <- get
return $ NotInjective x y z
15 -> fmap (CantResolve False) get
16 -> fmap CantResolveAlts get
17 -> fmap IncompleteTerm get
18 -> UniverseError <$> get <*> get <*> get <*> get <*> get
19 -> do x <- get ; y <- get
return $ UniqueError x y
20 -> do x <- get ; y <- get
return $ UniqueKindError x y
21 -> return ProgramLineComment
22 -> fmap Inaccessible get
23 -> fmap NonCollapsiblePostulate get
24 -> fmap AlreadyDefined get
25 -> fmap ProofSearchFail get
26 -> fmap NoRewriting get
27 -> do x <- get ; y <- get
return $ At x y
28 -> do x <- get ; y <- get ; z <- get
return $ Elaborating x y z
29 -> do w <- get ; x <- get ; y <- get ; z <- get
return $ ElaboratingArg w x y z
30 -> fmap ProviderError get
31 -> do x <- get ; y <- get
return $ LoadingFailed x y
32 -> do x <- get ; y <- get
return $ ReflectionError x y
33 -> do x <- get ; y <- get
return $ ReflectionFailed x y
34 -> fmap WithFnType get
35 -> fmap CantMatch get
36 -> do x1 <- get
x2 <- get
x3 <- get
return (ElabScriptDebug x1 x2 x3)
37 -> do x1 <- get
x2 <- get
return (NoEliminator x1 x2)
38 -> do x1 <- get
x2 <- get
return (InvalidTCArg x1 x2)
39 -> do x1 <- get
return (ElabScriptStuck x1)
40 -> do x <- get ; y <- get
return $ UnknownImplicit x y
41 -> fmap NoValidAlts get
_ -> error "Corrupted binary data for Err'"
----- Generated by 'derive'
instance Binary FC where
put x =
case x of
(FC x1 (x2, x3) (x4, x5)) -> do putWord8 0
put x1
put (x2 * 65536 + x3)
put (x4 * 65536 + x5)
NoFC -> putWord8 1
FileFC x1 -> do putWord8 2
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2x3 <- get
x4x5 <- get
return (FC x1 (x2x3 `div` 65536, x2x3 `mod` 65536) (x4x5 `div` 65536, x4x5 `mod` 65536))
1 -> return NoFC
2 -> do x1 <- get
return (FileFC x1)
_ -> error "Corrupted binary data for FC"
instance Binary Name where
put x
= case x of
UN x1 -> do putWord8 0
put x1
NS x1 x2 -> do putWord8 1
put x1
put x2
MN x1 x2 -> do putWord8 2
put x1
put x2
NErased -> putWord8 3
SN x1 -> do putWord8 4
put x1
SymRef x1 -> do putWord8 5
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (UN x1)
1 -> do x1 <- get
x2 <- get
return (NS x1 x2)
2 -> do x1 <- get
x2 <- get
return (MN x1 x2)
3 -> return NErased
4 -> do x1 <- get
return (SN x1)
5 -> do x1 <- get
return (SymRef x1)
_ -> error "Corrupted binary data for Name"
instance Binary SpecialName where
put x
= case x of
WhereN x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
InstanceN x1 x2 -> do putWord8 1
put x1
put x2
ParentN x1 x2 -> do putWord8 2
put x1
put x2
MethodN x1 -> do putWord8 3
put x1
CaseN x1 -> do putWord8 4; put x1
ElimN x1 -> do putWord8 5; put x1
InstanceCtorN x1 -> do putWord8 6; put x1
WithN x1 x2 -> do putWord8 7
put x1
put x2
MetaN x1 x2 -> do putWord8 8
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (WhereN x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (InstanceN x1 x2)
2 -> do x1 <- get
x2 <- get
return (ParentN x1 x2)
3 -> do x1 <- get
return (MethodN x1)
4 -> do x1 <- get
return (CaseN x1)
5 -> do x1 <- get
return (ElimN x1)
6 -> do x1 <- get
return (InstanceCtorN x1)
7 -> do x1 <- get
x2 <- get
return (WithN x1 x2)
8 -> do x1 <- get
x2 <- get
return (MetaN x1 x2)
_ -> error "Corrupted binary data for SpecialName"
instance Binary Const where
put x
= case x of
I x1 -> do putWord8 0
put x1
BI x1 -> do putWord8 1
put x1
Fl x1 -> do putWord8 2
put x1
Ch x1 -> do putWord8 3
put x1
Str x1 -> do putWord8 4
put x1
B8 x1 -> putWord8 5 >> put x1
B16 x1 -> putWord8 6 >> put x1
B32 x1 -> putWord8 7 >> put x1
B64 x1 -> putWord8 8 >> put x1
(AType (ATInt ITNative)) -> putWord8 9
(AType (ATInt ITBig)) -> putWord8 10
(AType ATFloat) -> putWord8 11
(AType (ATInt ITChar)) -> putWord8 12
StrType -> putWord8 13
Forgot -> putWord8 15
(AType (ATInt (ITFixed ity))) -> putWord8 (fromIntegral (16 + fromEnum ity)) -- 16-19 inclusive
VoidType -> putWord8 27
WorldType -> putWord8 28
TheWorld -> putWord8 29
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (I x1)
1 -> do x1 <- get
return (BI x1)
2 -> do x1 <- get
return (Fl x1)
3 -> do x1 <- get
return (Ch x1)
4 -> do x1 <- get
return (Str x1)
5 -> fmap B8 get
6 -> fmap B16 get
7 -> fmap B32 get
8 -> fmap B64 get
9 -> return (AType (ATInt ITNative))
10 -> return (AType (ATInt ITBig))
11 -> return (AType ATFloat)
12 -> return (AType (ATInt ITChar))
13 -> return StrType
15 -> return Forgot
16 -> return (AType (ATInt (ITFixed IT8)))
17 -> return (AType (ATInt (ITFixed IT16)))
18 -> return (AType (ATInt (ITFixed IT32)))
19 -> return (AType (ATInt (ITFixed IT64)))
27 -> return VoidType
28 -> return WorldType
29 -> return TheWorld
_ -> error "Corrupted binary data for Const"
instance Binary Raw where
put x
= case x of
Var x1 -> do putWord8 0
put x1
RBind x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
RApp x1 x2 -> do putWord8 2
put x1
put x2
RType -> putWord8 3
RConstant x1 -> do putWord8 4
put x1
RForce x1 -> do putWord8 5
put x1
RUType x1 -> do putWord8 6
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Var x1)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (RBind x1 x2 x3)
2 -> do x1 <- get
x2 <- get
return (RApp x1 x2)
3 -> return RType
4 -> do x1 <- get
return (RConstant x1)
5 -> do x1 <- get
return (RForce x1)
6 -> do x1 <- get
return (RUType x1)
_ -> error "Corrupted binary data for Raw"
instance Binary ImplicitInfo where
put x
= case x of
Impl x1 -> put x1
get
= do x1 <- get
return (Impl x1)
instance (Binary b) => Binary (Binder b) where
put x
= case x of
Lam x1 -> do putWord8 0
put x1
Pi x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
Let x1 x2 -> do putWord8 2
put x1
put x2
NLet x1 x2 -> do putWord8 3
put x1
put x2
Hole x1 -> do putWord8 4
put x1
GHole x1 x2 x3 -> do putWord8 5
put x1
put x2
put x3
Guess x1 x2 -> do putWord8 6
put x1
put x2
PVar x1 -> do putWord8 7
put x1
PVTy x1 -> do putWord8 8
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Lam x1)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (Pi x1 x2 x3)
2 -> do x1 <- get
x2 <- get
return (Let x1 x2)
3 -> do x1 <- get
x2 <- get
return (NLet x1 x2)
4 -> do x1 <- get
return (Hole x1)
5 -> do x1 <- get
x2 <- get
x3 <- get
return (GHole x1 x2 x3)
6 -> do x1 <- get
x2 <- get
return (Guess x1 x2)
7 -> do x1 <- get
return (PVar x1)
8 -> do x1 <- get
return (PVTy x1)
_ -> error "Corrupted binary data for Binder"
instance Binary Universe where
put x = case x of
UniqueType -> putWord8 0
AllTypes -> putWord8 1
NullType -> putWord8 2
get = do i <- getWord8
case i of
0 -> return UniqueType
1 -> return AllTypes
2 -> return NullType
_ -> error "Corrupted binary data for Universe"
instance Binary NameType where
put x
= case x of
Bound -> putWord8 0
Ref -> putWord8 1
DCon x1 x2 x3 -> do putWord8 2
put (x1 * 65536 + x2)
put x3
TCon x1 x2 -> do putWord8 3
put (x1 * 65536 + x2)
get
= do i <- getWord8
case i of
0 -> return Bound
1 -> return Ref
2 -> do x1x2 <- get
x3 <- get
return (DCon (x1x2 `div` 65536) (x1x2 `mod` 65536) x3)
3 -> do x1x2 <- get
return (TCon (x1x2 `div` 65536) (x1x2 `mod` 65536))
_ -> error "Corrupted binary data for NameType"
-- record concrete levels only, for now
instance Binary UExp where
put x = case x of
UVar t -> do putWord8 0
put ((-1) :: Int) -- TMP HACK!
UVal t -> do putWord8 1
put t
get = do i <- getWord8
case i of
0 -> do x1 <- get
return (UVar x1)
1 -> do x1 <- get
return (UVal x1)
_ -> error "Corrupted binary data for UExp"
instance {- (Binary n) => -} Binary (TT Name) where
put x
= {-# SCC "putTT" #-}
case x of
P x1 x2 x3 -> do putWord8 0
put x1
put x2
-- put x3
V x1 -> if (x1 >= 0 && x1 < 256)
then do putWord8 1
putWord8 (toEnum (x1 + 1))
else do putWord8 9
put x1
Bind x1 x2 x3 -> do putWord8 2
put x1
put x2
put x3
App _ x1 x2 -> do putWord8 3
put x1
put x2
Constant x1 -> do putWord8 4
put x1
Proj x1 x2 -> do putWord8 5
put x1
putWord8 (toEnum (x2 + 1))
Erased -> putWord8 6
TType x1 -> do putWord8 7
put x1
Impossible -> putWord8 8
UType x1 -> do putWord8 10
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
-- x3 <- get
return (P x1 x2 Erased)
1 -> do x1 <- getWord8
return (V ((fromEnum x1) - 1))
2 -> do x1 <- get
x2 <- get
x3 <- get
return (Bind x1 x2 x3)
3 -> do x1 <- get
x2 <- get
return (App Complete x1 x2)
4 -> do x1 <- get
return (Constant x1)
5 -> do x1 <- get
x2 <- getWord8
return (Proj x1 ((fromEnum x2)-1))
6 -> return Erased
7 -> do x1 <- get
return (TType x1)
8 -> return Impossible
9 -> do x1 <- get
return (V x1)
10 -> do x1 <- get
return (UType x1)
_ -> error "Corrupted binary data for TT"
| Enamex/Idris-dev | src/Idris/Core/Binary.hs | bsd-3-clause | 25,367 | 0 | 19 | 15,090 | 7,394 | 3,271 | 4,123 | 618 | 0 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
module DB where
import Data.SafeCopy
import Servant.Server.Auth.Token.Acid.Schema as A
-- | Application global state for acid-state
data DB = DB {
dbAuth :: A.Model -- ^ Storage for Auth state
, dbCustom :: () -- ^ Demo of custom state
}
-- | Generation of inital state
newDB :: DB
newDB = DB {
dbAuth = A.newModel
, dbCustom = ()
}
instance HasModelRead DB where
askModel = dbAuth
instance HasModelWrite DB where
putModel db m = db { dbAuth = m }
deriveSafeCopy 0 'base ''DB
A.deriveQueries ''DB
A.makeModelAcidic ''DB
| ivan-m/servant-auth-token | example/acid/src/DB.hs | bsd-3-clause | 594 | 0 | 9 | 117 | 150 | 87 | 63 | -1 | -1 |
{-# LANGUAGE DoRec #-}
-- | Make sure this program runs without leaking memory
import FRP.Sodium
import Control.Applicative
import Control.Exception
import System.Timeout
data Source = Source { unSource :: Reactive (Behaviour (Int, Int), Event Source) }
verbose = False
main = do
(et, pushT) <- sync $ newEvent
t <- sync $ hold 0 et
let etens = (`div` 10) <$> et
tens <- sync $ hold 0 etens
let changeTens = filterJust $ snapshot (\new old ->
if new /= old
then Just new
else Nothing) etens tens
oout <- sync $ do
let newSource = (\tens -> Source $ do
let out = ((,) tens) <$> t
return (out, newSource)
) <$> changeTens
initPair = (((,) 0) <$> t, newSource)
rec
bPair <- hold initPair eSwitch
let eSwitch = execute $ unSource <$> switchE (snd <$> bPair)
return (fst <$> bPair)
out <- sync $ switch oout
kill <- sync $ listen (value out) $ \x ->
if verbose then print x else (evaluate x >> return ())
timeout 4000000 $ mapM_ (sync . pushT) [0..]
kill
| kevintvh/sodium | haskell/examples/tests/memory-test-2.hs | bsd-3-clause | 1,162 | 0 | 26 | 391 | 428 | 217 | 211 | 31 | 3 |
module PLpatt.Tools where
import PLpatt.Sign
import PLpatt.AS_BASIC_PLpatt
import qualified MMT.Tools as Generic
import Data.Maybe
bool_from_pt :: Generic.Tree -> Maybe Bool'
bool_from_pt x = case x of
(Generic.Application n Nothing args )
| n == "equiv" && length args == 2 ->
Just (Equiv (fromJust (bool_from_pt (head args)))
(fromJust (bool_from_pt (args !! 1))))
| n == "impl" && length args == 2 ->
Just (Impl (fromJust (bool_from_pt (head args)))
(fromJust (bool_from_pt (args !! 1))))
| n == "not" && length args == 1 ->
Just (Not (fromJust (bool_from_pt (head args))))
| n == "or" && length args == 2 ->
Just (Or (fromJust (bool_from_pt (head args)))
(fromJust (bool_from_pt (args !! 1))))
| n == "and" && length args == 2 ->
Just (And (fromJust (bool_from_pt (head args)))
(fromJust (bool_from_pt (args !! 1))))
| n == "false" && null args -> Just False'
| n == "true" && null args -> Just True'
| otherwise -> Nothing
(Generic.Application n (Just (pat, inst)) args) -> Nothing
(Generic.Bind _n _v _s ) -> Nothing
(Generic.Tbind _n _a _v _s ) -> Nothing
(Generic.Variable _n ) -> Nothing
decl_from_pt :: Generic.Decl -> Maybe Decl
decl_from_pt d = case d of
(Generic.Decl pname iname args)
| pname == "prop" && null args -> Just (Prop_decl (Prop iname))
| pname == "dot" && length args == 1 ->
Just (Dot_decl (Dot iname (fromJust (bool_from_pt (head args)))))
| otherwise -> Nothing
sign_from_pt :: Generic.Sign -> Sigs
sign_from_pt (Generic.Sign sg) = Sigs (map (fromJust . decl_from_pt) sg)
axiom_from_pt :: Generic.Tree -> Bool'
axiom_from_pt ax = fromJust (bool_from_pt ax)
theo_from_pt :: Generic.Theo -> Theo
theo_from_pt th = Theo { sign = sign_from_pt (Generic.sign th),
axioms = map axiom_from_pt (Generic.axioms th) }
| keithodulaigh/Hets | PLpatt/Tools.hs | gpl-2.0 | 1,961 | 0 | 19 | 505 | 820 | 404 | 416 | 43 | 5 |
{-# LANGUAGE BangPatterns #-}
import System.Directory
import System.FilePath
import Control.Concurrent.Async
import System.Environment
import Data.List hiding (find)
import Control.Exception (finally)
import Data.Maybe (isJust)
import Control.Concurrent.MVar
import Data.IORef
import GHC.Conc (getNumCapabilities)
-- <<main
main = do
[n,s,d] <- getArgs
sem <- newNBSem (read n)
find sem s d >>= print
-- >>
-- <<find
find :: NBSem -> String -> FilePath -> IO (Maybe FilePath)
find sem s d = do
fs <- getDirectoryContents d
let fs' = sort $ filter (`notElem` [".",".."]) fs
if any (== s) fs'
then return (Just (d </> s))
else do
let ps = map (d </>) fs' -- <1>
foldr (subfind sem s) dowait ps [] -- <2>
where
dowait as = loop (reverse as) -- <3>
loop [] = return Nothing
loop (a:as) = do -- <4>
r <- wait a -- <5>
case r of
Nothing -> loop as -- <6>
Just a -> return (Just a) -- <7>
-- >>
-- <<subfind
subfind :: NBSem -> String -> FilePath
-> ([Async (Maybe FilePath)] -> IO (Maybe FilePath))
-> [Async (Maybe FilePath)] -> IO (Maybe FilePath)
subfind sem s p inner asyncs = do
isdir <- doesDirectoryExist p
if not isdir
then inner asyncs
else do
q <- tryAcquireNBSem sem -- <1>
if q
then do
let dofind = find sem s p `finally` releaseNBSem sem -- <2>
withAsync dofind $ \a -> inner (a:asyncs)
else do
r <- find sem s p -- <3>
case r of
Nothing -> inner asyncs
Just _ -> return r
-- >>
-- <<NBSem
newtype NBSem = NBSem (MVar Int)
newNBSem :: Int -> IO NBSem
newNBSem i = do
m <- newMVar i
return (NBSem m)
tryAcquireNBSem :: NBSem -> IO Bool
tryAcquireNBSem (NBSem m) =
modifyMVar m $ \i ->
if i == 0
then return (i, False)
else let !z = i-1 in return (z, True)
releaseNBSem :: NBSem -> IO ()
releaseNBSem (NBSem m) =
modifyMVar m $ \i ->
let !z = i+1 in return (z, ())
-- >>
| prt2121/haskell-practice | parconc/findpar2.hs | apache-2.0 | 2,127 | 0 | 17 | 689 | 828 | 421 | 407 | 64 | 4 |
module PackageTests.PreProcess.Check (suite) where
import PackageTests.PackageTester
(PackageSpec(..), SuiteConfig, assertBuildSucceeded, cabal_build)
import System.FilePath
import Test.Tasty.HUnit
suite :: SuiteConfig -> Assertion
suite config = do
let spec = PackageSpec
{ directory = "PackageTests" </> "PreProcess"
, distPref = Nothing
, configOpts = ["--enable-tests", "--enable-benchmarks"]
}
result <- cabal_build config spec
assertBuildSucceeded result
| enolan/cabal | Cabal/tests/PackageTests/PreProcess/Check.hs | bsd-3-clause | 527 | 0 | 12 | 116 | 120 | 69 | 51 | 13 | 1 |
{-# LANGUAGE PolyKinds, GADTs #-}
module T7328 where
data Proxy a
class Foo a where
foo :: a ~ f i => Proxy (Foo f)
| ghc-android/ghc | testsuite/tests/polykinds/T7328.hs | bsd-3-clause | 123 | 0 | 10 | 32 | 44 | 23 | 21 | -1 | -1 |
{-# LANGUAGE QuasiQuotes, TemplateHaskell, CPP, GADTs, TypeFamilies, OverloadedStrings, FlexibleContexts, EmptyDataDecls, FlexibleInstances, GeneralizedNewtypeDeriving, MultiParamTypeClasses #-}
module PersistUniqueTest where
import Init
-- mpsGeneric = False is due to a bug or at least lack of a feature in mkKeyTypeDec TH.hs
#if WITH_NOSQL
mkPersist persistSettings { mpsGeneric = False } [persistUpperCase|
#else
share [mkPersist persistSettings { mpsGeneric = False }, mkMigrate "migration"] [persistLowerCase|
#endif
Fo
foo Int
bar Int
Primary foo
UniqueBar bar
deriving Eq Show
|]
#ifdef WITH_NOSQL
cleanDB :: (MonadIO m, PersistQuery backend, PersistEntityBackend Fo ~ backend) => ReaderT backend m ()
cleanDB = do
deleteWhere ([] :: [Filter Fo])
db :: Action IO () -> Assertion
db = db' cleanDB
#endif
specs :: Spec
specs = describe "custom primary key" $ do
#ifdef WITH_NOSQL
return ()
#else
it "getBy" $ db $ do
let b = 5
k <- insert $ Fo 3 b
Just vk <- get k
Just vu <- getBy (UniqueBar b)
vu @== Entity k vk
it "insertUniqueEntity" $ db $ do
let fo = Fo 3 5
Just (Entity _ insertedFoValue) <- insertUniqueEntity fo
Nothing <- insertUniqueEntity fo
fo @== insertedFoValue
#endif
| psibi/persistent | persistent-test/src/PersistUniqueTest.hs | mit | 1,271 | 0 | 10 | 267 | 149 | 82 | 67 | 17 | 1 |
{-# LANGUAGE ScopedTypeVariables, Rank2Types #-}
--
-- (c) The University of Glasgow 2002-2006
--
-- Serialized values
module GHCJS.Prim.TH.Serialized ( Serialized
, fromSerialized
, toSerialized
, serializeWithData
, deserializeWithData
) where
import Data.Binary
import Data.Bits
import Data.Data
import Data.Typeable.Internal
-- | Represents a serialized value of a particular type. Attempts can be made to deserialize it at certain types
data Serialized = Serialized TypeRep [Word8]
instance Binary Serialized where
put (Serialized the_type bytes) =
put the_type >> put bytes
get = Serialized <$> get <*> get
instance Binary TyCon where
put (TyCon _ p m n) = put p >> put m >> put n
get = mkTyCon3 <$> get <*> get <*> get
instance Binary TypeRep where
put type_rep =
let (ty_con, child_type_reps) = splitTyConApp type_rep
in put ty_con >> put child_type_reps
get = mkTyConApp <$> get <*> get
-- | Put a Typeable value that we are able to actually turn into bytes into a 'Serialized' value ready for deserialization later
toSerialized :: Typeable a => (a -> [Word8]) -> a -> Serialized
toSerialized serialize what = Serialized (typeOf what) (serialize what)
-- | If the 'Serialized' value contains something of the given type, then use the specified deserializer to return @Just@ that.
-- Otherwise return @Nothing@.
fromSerialized :: forall a. Typeable a => ([Word8] -> a) -> Serialized -> Maybe a
fromSerialized deserialize (Serialized the_type bytes)
| the_type == typeOf (undefined :: a) = Just (deserialize bytes)
| otherwise = Nothing
-- | Force the contents of the Serialized value so weknow it doesn't contain any bottoms
seqSerialized :: Serialized -> ()
seqSerialized (Serialized the_type bytes) = the_type `seq` bytes `seqList` ()
-- | Use a 'Data' instance to implement a serialization scheme dual to that of 'deserializeWithData'
serializeWithData :: Data a => a -> [Word8]
serializeWithData what = serializeWithData' what []
serializeWithData' :: Data a => a -> [Word8] -> [Word8]
serializeWithData' what = fst $ gfoldl (\(before, a_to_b) a -> (before . serializeWithData' a, a_to_b a))
(\x -> (serializeConstr (constrRep (toConstr what)), x))
what
-- | Use a 'Data' instance to implement a deserialization scheme dual to that of 'serializeWithData'
deserializeWithData :: Data a => [Word8] -> a
deserializeWithData = snd . deserializeWithData'
deserializeWithData' :: forall a. Data a => [Word8] -> ([Word8], a)
deserializeWithData' bytes = deserializeConstr bytes $ \constr_rep bytes ->
gunfold (\(bytes, b_to_r) -> let (bytes', b) = deserializeWithData' bytes in (bytes', b_to_r b))
(\x -> (bytes, x))
(repConstr (dataTypeOf (undefined :: a)) constr_rep)
serializeConstr :: ConstrRep -> [Word8] -> [Word8]
serializeConstr (AlgConstr ix) = serializeWord8 1 . serializeInt ix
serializeConstr (IntConstr i) = serializeWord8 2 . serializeInteger i
serializeConstr (FloatConstr r) = serializeWord8 3 . serializeRational r
serializeConstr (CharConstr c) = serializeWord8 4 . serializeChar c
deserializeConstr :: [Word8] -> (ConstrRep -> [Word8] -> a) -> a
deserializeConstr bytes k = deserializeWord8 bytes $ \constr_ix bytes ->
case constr_ix of
1 -> deserializeInt bytes $ \ix -> k (AlgConstr ix)
2 -> deserializeInteger bytes $ \i -> k (IntConstr i)
3 -> deserializeRational bytes $ \r -> k (FloatConstr r)
4 -> deserializeChar bytes $ \c -> k (CharConstr c)
x -> error $ "deserializeConstr: unrecognised serialized constructor type " ++ show x ++ " in context " ++ show bytes
serializeFixedWidthNum :: forall a. (Num a, Integral a, FiniteBits a) => a -> [Word8] -> [Word8]
serializeFixedWidthNum what = go (finiteBitSize what) what
where
go :: Int -> a -> [Word8] -> [Word8]
go size current rest
| size <= 0 = rest
| otherwise = fromIntegral (current .&. 255) : go (size - 8) (current `shiftR` 8) rest
deserializeFixedWidthNum :: forall a b. (Num a, Integral a, FiniteBits a) => [Word8] -> (a -> [Word8] -> b) -> b
deserializeFixedWidthNum bytes k = go (finiteBitSize (undefined :: a)) bytes k
where
go :: Int -> [Word8] -> (a -> [Word8] -> b) -> b
go size bytes k
| size <= 0 = k 0 bytes
| otherwise = case bytes of
(byte:bytes) -> go (size - 8) bytes (\x -> k ((x `shiftL` 8) .|. fromIntegral byte))
[] -> error "deserializeFixedWidthNum: unexpected end of stream"
serializeEnum :: (Enum a) => a -> [Word8] -> [Word8]
serializeEnum = serializeInt . fromEnum
deserializeEnum :: Enum a => [Word8] -> (a -> [Word8] -> b) -> b
deserializeEnum bytes k = deserializeInt bytes (k . toEnum)
serializeWord8 :: Word8 -> [Word8] -> [Word8]
serializeWord8 x = (x:)
deserializeWord8 :: [Word8] -> (Word8 -> [Word8] -> a) -> a
deserializeWord8 (byte:bytes) k = k byte bytes
deserializeWord8 [] _ = error "deserializeWord8: unexpected end of stream"
serializeInt :: Int -> [Word8] -> [Word8]
serializeInt = serializeFixedWidthNum
deserializeInt :: [Word8] -> (Int -> [Word8] -> a) -> a
deserializeInt = deserializeFixedWidthNum
serializeRational :: (Real a) => a -> [Word8] -> [Word8]
serializeRational = serializeString . show . toRational
deserializeRational :: (Fractional a) => [Word8] -> (a -> [Word8] -> b) -> b
deserializeRational bytes k = deserializeString bytes (k . fromRational . read)
serializeInteger :: Integer -> [Word8] -> [Word8]
serializeInteger = serializeString . show
deserializeInteger :: [Word8] -> (Integer -> [Word8] -> a) -> a
deserializeInteger bytes k = deserializeString bytes (k . read)
serializeChar :: Char -> [Word8] -> [Word8]
serializeChar = serializeString . show
deserializeChar :: [Word8] -> (Char -> [Word8] -> a) -> a
deserializeChar bytes k = deserializeString bytes (k . read)
serializeString :: String -> [Word8] -> [Word8]
serializeString = serializeList serializeEnum
deserializeString :: [Word8] -> (String -> [Word8] -> a) -> a
deserializeString = deserializeList deserializeEnum
serializeList :: (a -> [Word8] -> [Word8]) -> [a] -> [Word8] -> [Word8]
serializeList serialize_element xs = serializeInt (length xs) . foldr (.) id (map serialize_element xs)
deserializeList :: forall a b. (forall c. [Word8] -> (a -> [Word8] -> c) -> c)
-> [Word8] -> ([a] -> [Word8] -> b) -> b
deserializeList deserialize_element bytes k = deserializeInt bytes $ \len bytes -> go len bytes k
where
go :: Int -> [Word8] -> ([a] -> [Word8] -> b) -> b
go len bytes k
| len <= 0 = k [] bytes
| otherwise = deserialize_element bytes (\elt bytes -> go (len - 1) bytes (k . (elt:)))
seqList :: [a] -> b -> b
seqList [] b = b
seqList (x:xs) b = x `seq` seqList xs b
| forked-upstream-packages-for-ghcjs/ghcjs | ghcjs-prim/src/GHCJS/Prim/TH/Serialized.hs | mit | 7,315 | 0 | 18 | 1,873 | 2,385 | 1,267 | 1,118 | 112 | 5 |
{-# LANGUAGE StandaloneDeriving, UndecidableInstances #-}
module Data.Term.Types where
import Data.Binding
import Data.Name
import Data.Typing
import qualified Data.Map as Map
import qualified Data.Set as Set
type Result a = Either String a
type Context term = Map.Map Name term
type Inferer term = Context term -> Result term
type Checker term = term -> Context term -> Result term
data Term f = Term { freeVariables :: Set.Set Name, typeOf :: Checker (Term f), out :: Typing (Binding f) (Term f) }
data Unification f = Unification (Set.Set Name) (Typing (Binding f) (Unification f)) | Conflict (Term f) (Term f)
expected :: Functor f => Unification f -> Term f
expected (Conflict expected _) = expected
expected (Unification freeVariables out) = Term freeVariables (const . const $ Left "Unification does not preserve typecheckers.\n") (expected <$> out)
actual :: Functor f => Unification f -> Term f
actual (Conflict _ actual) = actual
actual (Unification freeVariables out) = Term freeVariables (const . const $ Left "Unification does not preserve typecheckers.\n") (actual <$> out)
unified :: Traversable f => Unification f -> Maybe (Term f)
unified (Conflict _ _) = Nothing
unified (Unification freeVariables out) = do
out <- mapM unified out
return $ Term freeVariables (const . const $ Left "Unification does not preserve typecheckers.\n") out
into :: Functor f => Term f -> Unification f
into term = Unification (freeVariables term) $ into <$> out term
instance Eq (f (Term f)) => Eq (Term f) where
a == b = freeVariables a == freeVariables b && out a == out b
deriving instance (Eq (Term f), Eq (f (Unification f))) => Eq (Unification f)
deriving instance (Show (Term f), Show (f (Unification f))) => Show (Unification f)
| antitypical/Surface | src/Data/Term/Types.hs | mit | 1,752 | 0 | 11 | 305 | 697 | 354 | 343 | -1 | -1 |
-- Conduit
-- ref: https://wiki.haskell.org/Conduit
-- conduit-lib: https://www.schoolofhaskell.com/user/snoyberg/library-documentation/conduit-overview
{-
Streaming data library
Collection of libraries that share the same underlying data structures
Alternative to lazy I/O
Promises: deterministic resource management (memory, file descriptors)
-}
-- example
import Conduit
import Control.Monad.Trans.Resource
import qualified Data.Conduit.Binary as CB
import Data.Word8 (toUpper)
main :: IO ()
main = runResourceT
$ CB.sourceFile "input.txt"
$= omapCE toUpper
$= takeCE 500
$$ CB.sinkFile "output.txt"
{-
Core datatype: data ConduitM i o m r
Each conduit has:
Upstream (i): stream of incoming values
Downstream (o): stream of outgoing values
Monad (m): conduits are monad transformers
Result value (r): just like all monads
Producer ignores its upstream
Consumer ignores its downstream
Source has no upstream
Sink has no downstream
Conduit has both upstream and downstream
Producer unifies to Source and Conduit
Consumer unifies to Conduit and Sink
-}
-- vs. Pipes
-- ref: https://twanvl.nl/blog/haskell/conduits-vs-pipes | Airtnp/Freshman_Simple_Haskell_Lib | Idioms/Conduit.hs | mit | 1,278 | 0 | 10 | 300 | 85 | 50 | 35 | 10 | 1 |
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings
( widgetFile
, PersistConfig
, staticDir
) where
import Prelude
import Language.Haskell.TH.Syntax
import Database.Persist.Postgresql (PostgresConf)
import Yesod.Default.Util
import Settings.Development
import Data.Default (def)
import Text.Hamlet
-- | Which Persistent backend this site is using.
type PersistConfig = PostgresConf
-- Static setting below. Changing these requires a recompile
-- | The location of static files on your system. This is a file system
-- path. The default value works properly with your scaffolded site.
staticDir :: FilePath
staticDir = "static"
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
{ wfsHamletSettings = defaultHamletSettings
{ hamletNewlines = AlwaysNewlines
}
}
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if development then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
| fpco/schoolofhaskell.com | src/Settings.hs | mit | 1,468 | 0 | 8 | 286 | 148 | 96 | 52 | 22 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
-- | Strings normalized according to Normalization Form Compatibility
-- Decomposition.
module Data.Text.Normal.NFKD (
Normal,
fromText, toText
) where
import Control.Arrow (first)
import Control.DeepSeq
import Data.Data
import Data.Monoid
import Data.String
import Data.Text (Text)
import Data.Text.ICU.Normalize
-- | Normalized text.
newtype Normal = Normal
{ -- | Convert 'Normal' to 'Text'. This function just unwraps the newtype, so there is zero runtime cost.
toText :: Text } deriving (Eq, Ord, Data, Typeable)
-- | Convert 'Text' efficiently to 'Normal'.
fromText :: Text -> Normal
fromText t = Normal $ case quickCheck NFKD t of
Nothing | isNormalized NFKD t -> t
| otherwise -> normalize NFKD t
Just False -> normalize NFKD t
Just True -> t
instance Show Normal where
show = show . toText
instance Read Normal where
readsPrec i = map (first fromText) . readsPrec i
instance Monoid Normal where
mappend (Normal t1) (Normal t2) = Normal $ t1 <> t2
mempty = Normal mempty
instance IsString Normal where
fromString = fromText . fromString
instance NFData Normal where
rnf = rnf . toText
| pikajude/text-normal | src/Data/Text/Normal/NFKD.hs | mit | 1,226 | 0 | 12 | 276 | 317 | 170 | 147 | 30 | 3 |
{-# LANGUAGE DataKinds, FlexibleContexts, TypeOperators #-}
module Geometry where
import Control.Applicative
import Data.Foldable (fold, foldMap)
import Data.Vinyl
import Graphics.GLUtil
import Graphics.Rendering.OpenGL hiding (normal, normalize, light, Normal, Color)
import Linear
import Graphics.VinylGL
import System.FilePath ((</>))
type Pos = "vertexPos" ::: V3 GLfloat
type Normal = "vertexNormal" ::: V3 GLfloat
type Color = "vertexColor" ::: V3 GLfloat
pos :: Pos
pos = Field
normal :: Normal
normal = Field
col :: Color
col = Field
-- The 2D corners of a square.
square :: [V2 GLfloat]
square = V2 <$> [-1,1] <*> [1,-1]
-- The 3D faces of a cube.
front,back,left,right,top,bottom :: [V3 GLfloat]
front = map (\(V2 x y) -> V3 x y 1) square
back = map (\(V2 x y) -> V3 (-x) y (-1)) square
left = map (\(V2 z y) -> V3 (-1) y z) square
right = map (\(V2 z y) -> V3 1 y (-z)) square
top = map (\(V2 x z) -> V3 x 1 (-z)) square
bottom = map (\(V2 x z) -> V3 x (-1) z) square
-- Cube face vertices paired with normal vectors.
pts :: [PlainRec [Pos,Normal]]
pts = fold [ map (setNorm z) front
, map (setNorm $ -z) back
, map (setNorm $ -x) left
, map (setNorm x) right
, map (setNorm y) top
, map (setNorm $ -y) bottom ]
where [x,y,z] = basis
setNorm v p = (pos =: p <+> normal =: v)
-- Color the front vertices a dark blue, the back a light beige.
colorize :: PlainRec [Pos,Normal] -> PlainRec [Pos,Normal,Color]
colorize pt = pt <+> col =: c
where c | view (rLens pos._z) pt > 0 = V3 8.235294e-2 0.20392157 0.3137255
| otherwise = V3 0.95686275 0.8392157 0.7372549
-- Indices into the vertex array for each face.
inds :: [Word32]
inds = take 36 $ foldMap (flip map faceInds . (+)) [0,4..]
where faceInds = [0,1,2,2,1,3]
-- For rendering a cube, we'll need a ModelView matrix, and a
-- ProjectionModelView matrix.
type CamInfo = PlainRec ["cam" ::: M44 GLfloat, "proj" ::: M44 GLfloat]
cube :: (i <: CamInfo) => IO (i -> IO ())
cube = do s <- simpleShaderProgram ("etc"</>"poly.vert") ("etc"</>"poly.frag")
vb <- bufferVertices (map colorize pts)
eb <- makeBuffer ElementArrayBuffer inds
vao <- makeVAO $
do currentProgram $= Just (program s)
setUniforms s (light =: normalize (V3 0 0 1))
enableVertices' s vb
bindVertices vb
bindBuffer ElementArrayBuffer $= Just eb
let ss = setUniforms s
return $ \appInfo -> withVAO vao $
do currentProgram $= Just (program s)
ss (cast appInfo :: CamInfo)
drawIndexedTris 12
where light :: "lightDir" ::: V3 GLfloat
light = Field
-- We don't use normal vectors with the ground, so we just need a
-- single composite projection matrix.
type ProjInfo = PlainRec '["proj" ::: M44 GLfloat]
-- Ground texture from:
-- http://www.texturehd.com/data/media/21/Wood_floor_boards.jpg
ground :: (i <: ProjInfo) => IO (i -> IO ())
ground = do Right t <- readTexture $ "art"</>"Wood_floor_boards.png"
generateMipmap' Texture2D
s <- simpleShaderProgram ("etc"</>"ground.vert") ("etc"</>"ground.frag")
vb <- bufferVertices . map ((pos =:) . scale3D) $
V2 <$> [-1,1] <*> [-1,1]
vao <- makeVAO $
do currentProgram $= Just (program s)
enableVertices' s vb
bindVertices vb
setUniforms s (tex =: 0)
textureBinding Texture2D $= Just t
textureFilter Texture2D $=
((Linear', Just Linear'), Linear')
texture2DWrap $= (Repeated, Repeat)
let ss = setUniforms s
return $ \appInfo -> withVAO vao $
do currentProgram $= Just (program s)
ss (cast appInfo :: ProjInfo)
withTextures2D [t] $ drawArrays TriangleStrip 0 4
where scale3D :: V2 GLfloat -> V3 GLfloat
scale3D = (\(V2 x z) -> V3 x (-1.01) z) . (3*^)
tex :: "tex" ::: GLint
tex = Field
| spetz911/progames | vinyl-gl-master/examples/src/Geometry.hs | mit | 4,209 | 0 | 16 | 1,262 | 1,486 | 776 | 710 | 87 | 1 |
module Classifier (
Classifier(..),
Metadata(..),
StoredClassifier(..),
buildClassifier,
classifySequence,
classifySequenceMulti,
classifySequenceAll,
leafOTU) where
import Data.Tree
import qualified Data.Map.Strict as M
import qualified Data.List as L
import Data.Binary (Binary, put, get, Get)
import Data.Ord
import Data.Tuple.Select
import Data.Word
import MlgscTypes
import Alignment
import NucModel
import PepModel
import PWMModel (PWMModel(..), scoreSeq, cladeName)
-- When storing a Classifier to disk, we add some metadata. They may be quieried
-- with mlgsc_dump.
data StoredClassifier = StoredClassifier {
classifier :: Classifier
, metadata ::Metadata
}
instance Binary StoredClassifier where
put storedCls = do
put $ classifier storedCls
put $ metadata storedCls
get = do
cls <- get :: Get Classifier
md <- get :: Get Metadata
return $ StoredClassifier cls md
data Metadata = Metadata {
cmdLine :: String
, checksum :: Word32
}
instance Binary Metadata where
put md = do
put $ cmdLine md
put $ checksum md
get = do
cmdLine <- get :: Get String
checksum <- get :: Get Word32
return $ Metadata cmdLine checksum
data Classifier = PWMClassifier (Tree PWMModel) ScaleFactor
deriving (Show, Eq)
instance Binary Classifier where
put (PWMClassifier modTree scaleFactor) = do
put modTree
put scaleFactor
get = do
modTree <- get :: Get (Tree PWMModel)
scaleFactor <- get :: Get ScaleFactor
return $ PWMClassifier modTree scaleFactor
buildClassifier :: Molecule -> SmallProb -> ScaleFactor ->
AlnMap -> OTUTree -> Classifier
buildClassifier mol smallProb scale alnMap otuTree
= case mol of
DNA -> buildNucClassifier smallProb scale alnMap otuTree
Prot -> buildPepClassifier smallProb scale alnMap otuTree
-- TODO: these two are almost identical: refactor and pass the alnt-to-model
-- function as a parameter in the case clause of buildClassifier above.
buildNucClassifier :: SmallProb -> ScaleFactor -> AlnMap -> OTUTree
-> Classifier
buildNucClassifier smallprob scale map otuTree =
PWMClassifier cladeModTree scale
where cladeModTree = fmap NucPWMModel modTree
modTree = fmap (\(name, aln) ->
alnToNucModel smallprob scale name aln)
treeOfNamedAlns
treeOfNamedAlns = mergeNamedAlns treeOfLeafNamedAlns
treeOfLeafNamedAlns =
fmap (\k -> (k, M.findWithDefault [] k map)) otuTree
buildPepClassifier :: SmallProb -> ScaleFactor -> AlnMap -> OTUTree
-> Classifier
buildPepClassifier smallprob scale map otuTree =
PWMClassifier cladeModTree scale
where cladeModTree = fmap PepPWMModel modTree
modTree = fmap (\(name, aln) ->
alnToPepModel smallprob scale name aln)
treeOfNamedAlns
treeOfNamedAlns = mergeNamedAlns treeOfLeafNamedAlns
treeOfLeafNamedAlns =
fmap (\k -> (k, M.findWithDefault [] k map)) otuTree
-- The Int parameter is the log_10(ER) cutoff (the support value of nodes in the
-- path in the default output).
classifySequence :: Classifier -> Int -> Sequence -> Trail
classifySequence (PWMClassifier modTree scale) log10ERcutoff seq =
chooseSubtree modTree scale log10ERcutoff seq
chooseSubtree :: Tree PWMModel -> ScaleFactor -> Int -> Sequence -> Trail
chooseSubtree (Node _ []) _ _ _ = []
-- single-kid-node case - there is no meaningful ER to speak of so I just use
-- 1000 - could be optimized :-)
chooseSubtree (Node model [kid]) scale cutoff seq
= PWMStep (cladeName $ rootLabel kid) kidScore 0 1000
: chooseSubtree kid scale cutoff seq
where kidScore = scoreSeq (rootLabel kid) seq
chooseSubtree (Node model kids) scale cutoff seq
| diff < (round scale * cutoff) = []
| otherwise = PWMStep bestKidName bestKidScore
sndBestKidScore log10ER
: chooseSubtree bestKid scale cutoff seq
where diff = bestKidScore - sndBestKidScore
bestKidName = cladeName $ rootLabel bestKid
(bestKid, Down bestKidScore) = orderedKids !! 0
(sndBestKid, Down sndBestKidScore) = orderedKids !! 1
orderedKids = L.sortBy (comparing snd) $ zip kids (map Down scores)
scores = map (flip scoreSeq seq . rootLabel) kids
log10ER = log10evidenceRatio (round scale) bestKidScore sndBestKidScore
-- Intended mainly for debugging, as it enables to see a
-- query's score at every node of the tree, and therefore allows identifying
-- where the classifier chooses the wrong branch. The recursion starts at the
-- root (rather than at its children), so we get rid of the Trail's head (hence
-- the call to map tail).
classifySequenceAll :: Classifier -> Sequence -> [Trail]
classifySequenceAll (PWMClassifier modTree scale) seq =
map tail $ walkSubtrees modTree scale seq bestScore
where bestScore = maximum $ map (flip scoreSeq seq . rootLabel) (subForest modTree)
walkSubtrees :: Tree PWMModel -> ScaleFactor -> Sequence -> Score -> [Trail]
walkSubtrees (Node model []) scale seq bestScore = [[PWMStep name score (-1) log10ER]]
where name = cladeName model
score = scoreSeq model seq
log10ER = log10evidenceRatio (round scale) bestScore score
walkSubtrees (Node model kids) scale seq bestScore =
map (thisstep:) $ concat $ map (\kid -> walkSubtrees kid scale seq bestKidScore) kids
where thisstep = PWMStep (cladeName model) score (-1) log10ER
score = scoreSeq model seq
log10ER = log10evidenceRatio (round scale) bestScore score
bestKidScore = maximum kidsScores
kidsScores = map (flip scoreSeq seq . rootLabel) kids
classifySequenceMulti :: Classifier -> Int -> Sequence -> [Trail]
classifySequenceMulti (PWMClassifier modTree scale) log10ERcutoff seq =
map tail $ chooseSubtrees modTree scale log10ERcutoff seq bestScore
where bestScore = maximum $ map (flip scoreSeq seq . rootLabel) (subForest modTree)
chooseSubtrees :: Tree PWMModel -> ScaleFactor -> Int -> Sequence -> Score -> [Trail]
chooseSubtrees (Node model []) scale _ seq bestScore = [[PWMStep name score (-1) log10ER]]
where name = cladeName model
score = scoreSeq model seq
log10ER = log10evidenceRatio (round scale) score bestScore
chooseSubtrees (Node model kids) scale cutoff seq bestNonTiedScore =
map (thisstep:) $ concat $ map (\kid -> chooseSubtrees kid scale cutoff seq bestNonTiedKidsScore) tiedKids
where thisstep = PWMStep (cladeName model) score (-1) log10ER
score = scoreSeq model seq
log10ER = log10evidenceRatio (round scale) score bestNonTiedScore
bestKidScore = maximum kidsScores
kidsScores = map (flip scoreSeq seq . rootLabel) kids
kidlog10ERs = map (log10evidenceRatio (round scale) bestKidScore) kidsScores
tiedKids = L.map sel1 tiedKids_tpl
(tiedKids_tpl, otherKids_tpl) = L.partition (\(_,_,er) -> er <= cutoff') $ zip3 kids kidsScores kidlog10ERs
cutoff' = fromIntegral cutoff
bestNonTiedKidsScore = case otherKids_tpl of
[] -> sel2 $ L.minimumBy (comparing sel2) tiedKids_tpl
otherwise -> sel2 $ L.maximumBy (comparing sel2) otherKids_tpl
paths :: OTUTree -> [[OTUName]]
paths (Node name []) = [[name]]
paths (Node name kids) = map (name:) $ foldl1 (++) $ map paths kids
-- finds the (first) object in a list that maximizes some metric m (think score
-- of a sequence according to a model), returns that object and its index in
-- the list, as well as the best score and second-best score themselves. Not
-- efficient, but should be ok for short lists.
-- TODO: if we no longer need the indices, this is way to complicated.
bestByExtended :: Ord b => [a] -> (a -> b) -> (a, Int, b, b)
bestByExtended objs m = (bestObj, bestNdx, bestMetricValue, secondBestMetricValue)
where sorted = L.sortBy (flip compare) metricValues
metricValues = map m objs
bestMetricValue = sorted !! 0
secondBestMetricValue = sorted !! 1
bestNdx = head $ L.elemIndices bestMetricValue metricValues
bestObj = objs !! bestNdx
-- produces a new tree of which each node's data is a concatenation of its
-- children node's data. Meant to be called on a Tree Alignment whose inner
-- nodes are empty. To see it in action, do
-- putStrLn $ drawTree $ fmap show $ mergeAlns treeOfLeafAlns
-- in GHCi.
mergeNamedAlns :: Tree (CladeName, Alignment) -> Tree (CladeName, Alignment)
mergeNamedAlns leaf@(Node _ []) = leaf
mergeNamedAlns (Node (name,_) kids) = Node (name,mergedKidAlns) mergedKids
where mergedKids = L.map mergeNamedAlns kids
mergedKidAlns = concatMap (snd . rootLabel) mergedKids
leafOTU :: Trail -> OTUName
leafOTU trail = otuName $ last trail
-- Computes the base-10 log of the evidence ratio, i.e. log_10 (exp(delta-AIC /
-- 2)), except that I use delta-AIC' (in which the factor 2 is dropped, so I
-- avoid having to multiply by 2 only to divide by 2 again just after).
log10evidenceRatio :: Int -> Int -> Int -> Double
log10evidenceRatio scaleFactor bestScore secondBestScore = logBase 10 er
where l_min = scoreTologLikelihood scaleFactor bestScore
l_sec = scoreTologLikelihood scaleFactor secondBestScore
er = exp(deltaAIC' l_min l_sec)
-- Converts a model score (which is a scaled, rounded log-likelihood (log base
-- 10)) to a log-likelihood (log base e, i.e. ln). To do this, we _divide_ by
-- the scale factor to get an unscaled log10-likelihood, and then divide by
-- log10(e) to get a ln-based likelihood.
scoreTologLikelihood :: Int -> Int -> Double
scoreTologLikelihood scaleFactor score = log10Likelihood / logBase 10 e
where log10Likelihood = fromIntegral score / fromIntegral scaleFactor
e = exp 1.0
-- Computes the difference in AIC of two log-likelihoods, taking into account
-- that the number of parameters k is in our case the same in any two models,
-- and this cancels out, i.e. delta AIC = AIC1 - AIC2 = 2k -2 ln (L_1) - (2k -
-- 2 ln(L_2)) = -2 (ln (L_1) - ln (L_2)). Since the arguments are already _log_
-- likelihoods, the expression simplifies to -2 (l_1 - l_2), where l_1 =
-- ln(L_1), etc. I also drop the constant 2, since we'd be dividing by 2 right
-- away in evidenceRatio anyway.
deltaAIC' :: Double -> Double -> Double
deltaAIC' l1 l2 = l1 - l2
| tjunier/mlgsc | src/Classifier.hs | mit | 10,879 | 0 | 13 | 2,716 | 2,541 | 1,331 | 1,210 | 170 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
-- | SWF Decider logic.
--
module Network.AWS.Wolf.Decide
( decide
, decideMain
) where
import Data.Aeson
import Data.Time
import Data.UUID
import Data.UUID.V4
import Network.AWS.SWF
import Network.AWS.Wolf.Ctx
import Network.AWS.Wolf.File
import Network.AWS.Wolf.Prelude
import Network.AWS.Wolf.SWF
import Network.AWS.Wolf.Types
-- | Successful end of workflow.
--
end :: MonadAmazonDecision c m => Maybe Text -> m Decision
end input = do
traceInfo "end" mempty
pure $ completeWork input
-- | Next activity in workflow to run.
--
next :: MonadAmazonDecision c m => Maybe Text -> Maybe Text -> Task -> m Decision
next input priority t = do
uid <- liftIO $ toText <$> nextRandom
traceInfo "next" [ "uid" .= uid, "task" .= t ]
pure $ scheduleWork uid (t ^. tName) (t ^. tVersion) (t ^. tQueue) input priority
-- | Failed activity, stop the workflow.
--
failed :: MonadAmazonDecision c m => m Decision
failed = do
traceInfo "failed" mempty
pure failWork
-- | Completed activity, start the next activity.
--
completed :: MonadAmazonDecision c m => HistoryEvent -> m Decision
completed he = do
traceInfo "completed" mempty
hes <- view adcEvents
(input, priority, name) <- maybeThrowIO' "No Completed Information" $ do
atcea <- he ^. heActivityTaskCompletedEventAttributes
he' <- flip find hes $ (== atcea ^. atceaScheduledEventId) . view heEventId
atsea <- he' ^. heActivityTaskScheduledEventAttributes
pure (atcea ^. atceaResult, atsea ^. atseaTaskPriority, atsea ^. atseaActivityType . atName)
p <- view adcPlan
maybe (end input) (next input priority) $
tailMay (flip dropWhile (p ^. pTasks) $ (/= name) . view tName) >>= headMay
-- | Beginning of workflow, start the first activity.
--
begin :: MonadAmazonDecision c m => HistoryEvent -> m Decision
begin he = do
traceInfo "begin" mempty
(input, priority) <- maybeThrowIO' "No Start Information" $ do
wesea <- he ^. heWorkflowExecutionStartedEventAttributes
pure (wesea ^. weseaInput, wesea ^. weseaTaskPriority)
p <- view adcPlan
maybe (end input) (next input priority) $ headMay (p ^. pTasks)
-- | Schedule workflow based on historical events.
--
schedule :: MonadAmazonDecision c m => m Decision
schedule = do
traceInfo "schedule" mempty
hes <- view adcEvents
f hes >>=
maybeThrowIO' "No Select Information"
where
f [] = pure Nothing
f (he:hes) =
case he ^. heEventType of
WorkflowExecutionStarted -> Just <$> begin he
ActivityTaskCompleted -> Just <$> completed he
ActivityTaskFailed -> Just <$> failed
_et -> f hes
-- | Decider logic - poll for decisions, make decisions.
--
decide :: MonadConf c m => Plan -> m ()
decide p =
preConfCtx [ "label" .= LabelDecide ] $ do
let queue = p ^. pStart . tQueue
runAmazonWorkCtx queue $ do
traceInfo "poll" mempty
t0 <- liftIO getCurrentTime
(token, hes) <- pollDecision
t1 <- liftIO getCurrentTime
statsIncrement "wolf.decide.poll.count" [ "queue" =. queue ]
statsHistogram "wolf.decide.poll.elapsed" (realToFrac (diffUTCTime t1 t0) :: Double) [ "queue" =. queue ]
maybe_ token $ \token' ->
runAmazonDecisionCtx p hes $ do
traceInfo "start" mempty
t2 <- liftIO getCurrentTime
schedule >>=
completeDecision token'
t3 <- liftIO getCurrentTime
traceInfo "finish" mempty
statsIncrement "wolf.decide.decision.count" [ "queue" =. queue ]
statsHistogram "wolf.decide.decision.elapsed" (realToFrac (diffUTCTime t3 t2) :: Double) [ "queue" =. queue ]
-- | Run decider from main with config file.
--
decideMain :: MonadControl m => FilePath -> FilePath -> Maybe Text -> m ()
decideMain cf pf domain =
runCtx $ runTop $ do
conf <- readYaml cf
let conf' = override cDomain domain conf
runConfCtx conf' $ do
plans <- readYaml pf
runConcurrent $
forever . decide <$> plans
| swift-nav/wolf | src/Network/AWS/Wolf/Decide.hs | mit | 4,107 | 0 | 20 | 931 | 1,201 | 588 | 613 | -1 | -1 |
-- Copyright (c) 2011 Alexander Poluektov (alexander.poluektov@gmail.com)
--
-- Use, modification and distribution are subject to the MIT license
-- (See accompanyung file MIT-LICENSE)
import Distribution.Simple
main = defaultMain
| apoluektov/domino | Setup.hs | mit | 232 | 0 | 4 | 30 | 15 | 10 | 5 | 2 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE CPP #-}
module GenTM5Data (
instantiateDoc
) where
import Data.Aeson
import Data.Aeson.TH
import Data.Text (Text)
import qualified Data.Text as T
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM
import Data.Hashable (Hashable)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.List as L
import GenTM5Parser (getDoc)
import qualified GenTM5Parser as P
import Prelude hiding (read)
import qualified Prelude as Pre (read)
import Control.Applicative
import Control.Monad
import Control.Monad.Trans (lift)
import Control.Monad.Trans.State
import Control.Monad.Trans.Reader
import Data.Maybe
import Data.Either
import System.Exit (exitFailure)
import System.IO.Unsafe (unsafeDupablePerformIO)
import System.IO (hPutStrLn, stderr)
import Text.Printf
import Control.Lens
-- DBG --
import System.IO.Unsafe
unsafePeek :: (Show a) => a -> a
unsafePeek showMe = unsafePerformIO $ print showMe >> return showMe
-- DBG --
data TM5ConcreteTransition = TM5CTrans {
read :: Text
, to_state :: Text
, write :: Text
, action :: Text
} deriving (Show,Eq)
$(deriveJSON defaultOptions ''TM5ConcreteTransition)
data TM5Machine = TM5 {
name :: Text
, alphabet :: [Text]
, blank :: Text
, states :: [Text]
, initial :: Text
, finals :: [Text]
, transitions :: HashMap Text [TM5ConcreteTransition]
} deriving (Show)
$(deriveJSON defaultOptions ''TM5Machine)
-- Rich transitions: embeds instantiation meta data along serializable structure.
data RichCTransition = RCTrans {
cTransRCT :: TM5ConcreteTransition -- toJSON
, skellNameRCT :: Text -- template name, i.e. HM key to children template trans.
, paramsRCT :: [Text] -- resolved params for this trans., used by children
} deriving (Show,Eq)
data StateInstance = SI {
nameSI :: Text
, paramsSI :: [Text]
} deriving (Show)
exitError :: String -> a
exitError s = unsafeDupablePerformIO $ hPutStrLn stderr s >> exitFailure >> return undefined
lookupOrDie :: (?deathMessage :: ShowS, Show k, Hashable k, Eq k) => k -> HashMap k v -> v
lookupOrDie key hm = HM.lookupDefault (exitError $ ?deathMessage $ show key) key hm
getPlaceHolder = getDoc ^. P.templatePatterns ^. P.inheritedNth
getRCPOf = getDoc ^. P.templatePatterns ^. P.reciprocal
getSameAsRead = getDoc ^. P.templatePatterns ^. P.readPat
getSameAsState = getDoc ^. P.templatePatterns ^. P.currentState
getGlobAny = getDoc ^. P.alphabet ^. P.globAnyInput
getGlobFree = getDoc ^. P.alphabet ^. P.globFreeSymbols
getFreeSyms = getDoc ^. P.alphabet ^. P.freeSymbols
getRCPFree = getDoc ^. P.alphabet ^. P.freeSymbolsRCP
getCollection = getDoc ^. P.alphabet ^. P.collection
getExhaustiveSet = Set.fromList$ getCollection ++ getRCPFree
makeState :: StateInstance -- The current concrete state and its params (for recursive transitions)
-> StateInstance -- The a template state to instantiate with concrete params.
-> StateInstance -- the resulting concrete name and list of concrete params.
makeState currentState (SI templName params) =
if templName == getSameAsState then currentState else
let bits = T.splitOn getPlaceHolder templName :: [Text]
in case bits of
[single] -> SI single []
_ -> SI (T.concat$ L.concat$ L.transpose$ [bits, params]) params
#define CALLJUSTORDIE(tailInfo, param) (let ?dbgInfo = ?dbgInfo ++ tailInfo in justOrDie param)
justOrDie :: (?dbgInfo :: String) => Maybe a -> a
justOrDie = fromMaybe (let ?dbgInfo = "justOrDie: DYING -> " ++ ?dbgInfo in exitError ?dbgInfo)
-- Takes a selector
-- The selector can be either (rcp to Nth sym) "~~%%N" or "%%N" (Nth sym)
-- Returns the index data borne by the selector,
-- as either Left iRcp or Right i.
-- Non-selector or malformed selector will raise a deadly exception
-- through the use of fromJust.
indexFromSelector :: (?dbgInfo :: String) => Text -> Either Int Int
indexFromSelector sel =
let stripRcp = T.stripPrefix getRCPOf sel
-- ?dbgInfo = ?dbgInfo ++ "; indexFromSelector"
doRead = (Pre.read :: String -> Int)
. T.unpack . CALLJUSTORDIE("; indexFromSelector",)
. (T.stripPrefix getPlaceHolder)
in case stripRcp of
Just t -> Left$ doRead t
Nothing -> Right$ doRead sel
-- Provided a bare, litteral sym from the `freeSyms` set
-- -- May throw, provided a non-bare, non-freeSym symbol.
resolveRCP :: (?dbgInfo :: String) => Text -> Text
resolveRCP t =
let ?dbgInfo = ?dbgInfo ++ "; resolveRCP"
in case elemIndex t getFreeSyms of
Just i -> getRCPFree !! i
Nothing -> getFreeSyms !! justOrDie (elemIndex t getRCPFree)
-- From state instance params, a selector string: return the targeted parameter.
-- A malformed selector,
-- An improper (OOB index) selector,
-- A bad freeSyms <=> RCP mapping
-- will raise a deadly exception.
paramFromSelector :: (?dbgInfo :: String) => ([Text], Text) -> Text
paramFromSelector (params, sel) =
let ?dbgInfo = ?dbgInfo ++ "; paramFromSelector" in
case indexFromSelector sel of
Right i -> params !! i
Left i -> resolveRCP (params !! i)
-- CAVEAT: 'action' field is %%'ed for return_* states !
-- CAVEAT²: 'action' field is %%'ed with non-symbols !
--
--
-- Draw obtained syms from the pool !
-- Update env: => accum' <- ((accum `union` gotSyms) `inter` pool)
-- pool' <- (pool `diff` (pool `inter` accum'))
-- => Env pool' accum'
-- Starting and reference pools must include reciprocal-free-syms !
data Env = Env { availablePool :: Set Text, stateParams :: [Text], readEntry :: Text }
gatherSyms :: [Text] -> State Env [Text]
gatherSyms [] = return []
gatherSyms (sym:ls) = do
e@(Env pool params readEnt) <- get
let gotSyms =
case sym of
sym | sym == getGlobFree -> Set.fromList getFreeSyms -- Globbed...
| sym == getGlobAny -> Set.fromList getCollection -- ...categories.
_ -> let isRCP = T.isInfixOf getRCPOf sym -- Single static|template
rcpStripped = spliceOut getRCPOf sym
in let resolve = let ?dbgInfo = "sym:" ++ T.unpack sym in resolveSelector isRCP rcpStripped
in Set.singleton$ runReader resolve e
let pool' = \uSyms -> pool `Set.difference` uSyms
in let updateEnv uSyms (Env lp pms r) = Env (pool' uSyms) pms r
in modify (updateEnv gotSyms)
let gathered = Set.toList$ pool `Set.intersection` gotSyms
in return . (++) gathered =<< gatherSyms ls
where -- -- -- -- -- -- -- Helpers -- -- -- -- -- -- --
spliceOut intron txt = T.concat$ T.splitOn intron txt
resolveSelector :: (?dbgInfo :: String) => Bool -> Text -> Reader Env Text
resolveSelector isRcp remainder = do
readEnt <- asks readEntry
params <- asks stateParams
let morphRcpM dbg t = return $ if isRcp
then
let ?dbgInfo = ?dbgInfo ++ dbg++ "->gatherSyms:RCP: [" ++ T.unpack t ++ "]" in resolveRCP t
else id t
in do
case remainder of
rem
| T.isInfixOf getPlaceHolder rem ->
let dbgInfo = printf "gatherSyms:Straight: %s" (T.unpack rem)
in morphRcpM dbgInfo$ paramFromSelector (params, remainder)
| T.isInfixOf getSameAsRead rem -> let dbgInfo = printf "READPATTERN: readEnt: %s, rem: %s" (T.unpack readEnt) (T.unpack rem) in morphRcpM dbgInfo readEnt
| otherwise -> let dbgInfo = "I AM OTHERWISE" in morphRcpM dbgInfo rem
instantiateTrans :: [(Text, Text)] -- Template IO couples
-> StateT (Set Text) -- Allowed Sym pool to draw from
(Reader (StateInstance, P.M5Transition)) -- reference concrete state, template transition
[RichCTransition] -- Resulting concrete transitions
instantiateTrans [] = return []
instantiateTrans ((is,os):lio) = do
(curSt@(SI parentState concreteParams)
, P.M5Trans _ skToSt tpms act) <- lift ask
symPool <- get
let (iConcreteSyms, Env iRemPool _ _) = runState (gatherSyms [is])
(Env symPool concreteParams is)
let resolveSyms = \el poo rs -> evalState (gatherSyms el)
(Env poo concreteParams rs)
collection = getExhaustiveSet
oConcreteSyms = concat$ resolveSyms [os] collection <$> iConcreteSyms
pConcretePrms = resolveSyms tpms collection <$> iConcreteSyms
cActs = if T.isInfixOf getPlaceHolder act
then (\ps -> let ?dbgInfo = "instantiateTrans" in paramFromSelector (ps,act)) <$> pConcretePrms
else repeat act
lsStates = makeState curSt <$> SI skToSt <$> pConcretePrms
in let serialCTrans = zipWith4 TM5CTrans
iConcreteSyms
(nameSI <$> lsStates)
oConcreteSyms
cActs
in let cTrans = zipWith3 RCTrans
serialCTrans
(repeat skToSt)
(paramsSI <$> lsStates)
in do
-- return $ seq (unsafePeek curSt) ()
put iRemPool
instantiateTrans lio
>>= return . (++) (filter (\tr -> to_state (cTransRCT tr) /= parentState) cTrans)
-- ForEach I:O couple
-- comprehend template I:O couples
-- instantiate State:
-- comprehend template States
-- makeTransition
type WIPTransitions = HashMap Text [RichCTransition]
makeTransitions :: StateInstance -- Previous concrete state, whence the transition is starting from.
-> [P.M5Transition] -- Associated template transitions
-> State (Set Text) -- Track consumed symbols as a State
WIPTransitions -- fold resulting concrete transition maps.
makeTransitions si@(SI parentState pParams) lSkellTr = foldM instantiateCondense HM.empty lSkellTr
where
instantiateCondense :: WIPTransitions
-> P.M5Transition
-> State (Set Text) WIPTransitions
instantiateCondense accuHM skellTr = do
pool <- get
let foldingLRCTrM hm v = return$ HM.insertWith (flip (++)) parentState v hm
let saneSkellTr = if skellTr ^. P.toStatePattern == getSameAsState
then set P.toStatePattern parentState . set P.toStateParams pParams$ skellTr
else skellTr
let iol = P._inputOutput saneSkellTr
in let (lRichTr,remPool) =
flip runReader (si, saneSkellTr)
$ runStateT (instantiateTrans iol) pool
:: ([RichCTransition], Set Text)
in do
put remPool
foldM foldingLRCTrM accuHM ((:[]) <$> lRichTr)
type ConcreteTransitions = HashMap Text [TM5ConcreteTransition]
dispatchInstantiation :: [(Text,[RichCTransition])]
-> StateT WIPTransitions
(State (Set Text)) -- final states instances
ConcreteTransitions
dispatchInstantiation [] = return . HM.map (\lrct -> cTransRCT <$> lrct) =<< get
dispatchInstantiation ((cState, lRCTr):ls) = do
moreTasks <- mapM stateFold lRCTr >>= return . concat
dispatchInstantiation (ls ++ moreTasks)
where -----------------------------------------------------------------------------
skellHM = getDoc ^. P.transitions
fetchSkTr el =
let ?deathMessage = (++) "dispatchInstantiation: could not find state: "
in lookupOrDie skellKey skellHM
where
skellKey = skellNameRCT el
stateFold :: RichCTransition
-> StateT WIPTransitions (State (Set Text)) [(Text,[RichCTransition])]
stateFold = \el -> do
let skSt = skellNameRCT el
let callMkTrans = \si -> \lSkTr -> makeTransitions si lSkTr `evalState` getExhaustiveSet
let hmRich = callMkTrans (SI cState$ paramsRCT el) (fetchSkTr el)
when (skSt `elem` P._finalStates getDoc)$
lift$ modify (Set.insert skSt)
modify (HM.unionWith (\l r -> union (nub l) r) hmRich)
return$ HM.toList hmRich
data LocalEnv = LEnv {
_curCState :: Text
, _curCParams :: [Text]
, _curSkellState :: Text
, _curRead :: Text
, _usedSyms :: [Text]
, _transSyms :: [Text]
}
$(makeLenses ''LocalEnv)
data MetaEnv = MEnv {
_finals :: Set Text
, _wipTrans :: WIPTransitions
}
$(makeLenses ''MetaEnv)
resolveTemplateSym :: Text
-> Reader LocalEnv
[Text]
resolveTemplateSym tSym = do
used <- asks _usedSyms
readEnt <- asks _curRead
let inter l = (used \\ getExhaustiveSet) `intersection` l
return$ case tSym of
getGlobAny -> inter getCollection
getGlobFree -> inter getFreeSyms
_
| T.isInfixOf getPlaceHolder tSym -> inter$ paramFromSelector tSym : []
| case T.breakOn getSameAsRead tSym ->
(a,b)| not$ T.null b -> inter$ resolveStatic (T.concat a readEnt) : []
| otherwise -> inter$ resolveStatic a : []
where
resolveStatic sym = if T.isInfixOf getRCPOf sym then resolveRCP sym else sym
-- depth first...
rebootMakeTrans :: StateT MetaEnv
(Reader LocalEnv)
()
rebootMakeTrans = do
-- ((Initialization))
-- ForEach starting state template
-- ForEach (R,W)
-- log . instantiate: templateTrans -> (curState,R,W,params) -> WipConcreteTransition
-- ((Recursion))
-- given WipConcreteTransitions :
-- ForEach concrete state
-- ForEach (curState,R,W,params,toTemplate)
-- if instantiate (toTemplate,r,w,params) NotMemberOf (log:)WipConcreteTransitions:
-- log . instantiate: templateTrans -> (curState,R,W,params) -> WipConcreteTransition
instantiateDoc :: TM5Machine
instantiateDoc =
let dm = (++) "instantiateDoc: could not find state: "
doc = getDoc
alphaDoc = doc ^. P.alphabet
iniState = doc ^. P.initialState
staticFinals = Set.fromList$ doc ^. P.finalStates
skellHM = doc ^. P.transitions
iniTrans = let ?deathMessage = dm in lookupOrDie iniState skellHM
--bootstrapInstance = HM.map (\lrct -> cTransRCT<$>lrct)$ evalState
bootstrapInstance = evalState
(makeTransitions (SI iniState []) iniTrans)
getExhaustiveSet
(concreteTrans, concreteFinals) =
-- (,) bootstrapInstance []
((dispatchInstantiation$ HM.toList bootstrapInstance) `evalStateT` bootstrapInstance)
`runState` staticFinals
in TM5
"UniversalMachine"
(getCollection ++ getRCPFree)
(alphaDoc ^. P.hostBlank)
(HM.keys concreteTrans)
iniState
(Set.toList concreteFinals)
concreteTrans
| range12/there-is-no-B-side | tools/generators/GuestEncoder/GenTM5Data.hs | mit | 15,569 | 23 | 26 | 4,508 | 3,704 | 1,961 | 1,743 | -1 | -1 |
{-|
Module : Collapse
Description : Collapses all multi-clause functions into single-body ones
-}
module Latro.Collapse where
import Control.Monad.Except
import Latro.Ast
import Latro.Compiler
import Latro.Errors
collectFunDefs :: RawId -> [RawAst Exp] -> ([RawAst FunDef], [RawAst Exp])
collectFunDefs _ [] = ([], [])
collectFunDefs id (eFunDef@(ExpFunDef funDef@(FunDefFun _ fid _ _)) : es)
| id == fid =
let (funDefs, es') = collectFunDefs id es
in (funDef : funDefs, es')
| otherwise = ([], eFunDef : es)
collectFunDefs _ es = ([], es)
collapseBindingExp :: RawId -> [RawAst Exp] -> Collapsed (RawAst Exp, [RawAst Exp])
collapseBindingExp id (e@(ExpTopLevelAssign _ (PatExpId _ pid) _) : es)
| id == pid = return (e, es)
| otherwise = throwError $ ErrNoBindingAfterTyAnn id
collapseBindingExp id (e@(ExpAssign _ (PatExpId _ pid) _) : es)
| id == pid = return (e, es)
| otherwise = throwError $ ErrNoBindingAfterTyAnn id
collapseBindingExp id (e@(ExpFunDef (FunDefFun p fid _ _)) : es)
| id == fid = do
let (funDefs, es') = collectFunDefs fid (e : es)
eFunDef <- collapse $ ExpFunDefClauses p fid funDefs
case funDefs of
[] -> throwError $ ErrNoBindingAfterTyAnn fid
_ -> return (eFunDef, es')
| otherwise = throwError $ ErrNoBindingAfterTyAnn id
collapseBindingExp id _ = throwError $ ErrNoBindingAfterTyAnn id
collapse :: RawAst Exp -> Collapsed (RawAst Exp)
collapse (ExpAssign p patE e) = do
e' <- collapse e
return $ ExpAssign p patE e'
collapse (ExpFunDef (FunDefFun p id argPatEs bodyE)) = do
bodyE' <- collapse bodyE
return $ ExpFunDef $ FunDefFun p id argPatEs bodyE'
collapse (ExpModule p id bodyEs) = do
bodyEs' <- collapseEs bodyEs
return $ ExpModule p id bodyEs'
collapse (ExpBegin p bodyEs) = do
bodyEs' <- collapseEs bodyEs
return $ ExpBegin p bodyEs'
collapse (ExpFunDefClauses ap aid funDefs) = do
funDefs' <- mapM collapseFunDef funDefs
return $ ExpFunDefClauses ap aid funDefs'
collapse e = return e
collapseFunDef :: RawAst FunDef -> Collapsed (RawAst FunDef)
collapseFunDef (FunDefFun p id patE bodyE) = do
bodyE' <- collapse bodyE
return $ FunDefFun p id patE bodyE'
collapseEs :: [RawAst Exp] -> Collapsed [RawAst Exp]
collapseEs [] = return []
collapseEs (ExpTopLevelTyAnn tyAnn@(TyAnn _ aid _ _ _) : es) = do
(e, es') <- collapseBindingExp aid es
es'' <- collapseEs es'
return (ExpWithAnn tyAnn e : es'')
collapseEs (ExpTyAnn tyAnn@(TyAnn _ aid _ _ _) : es) = do
(e, es') <- collapseBindingExp aid es
es'' <- collapseEs es'
return (ExpWithAnn tyAnn e : es'')
collapseEs (ExpFunDef (FunDefFun p fid argPatEs bodyE) : es) = do
bodyE' <- collapse bodyE
let (funDefs, es') = collectFunDefs fid es
funDef = FunDefFun p fid argPatEs bodyE'
eFunDef = ExpFunDefClauses p fid (funDef : funDefs)
es'' <- collapseEs es'
return (eFunDef : es'')
collapseEs (ExpBegin p bodyEs : es) = do
bodyEs' <- collapseEs bodyEs
es' <- collapseEs es
return (ExpBegin p bodyEs' : es')
collapseEs (ExpProtoImp p synTy protoId straints bodyEs : es) = do
bodyEs' <- collapseEs bodyEs
es' <- collapseEs es
return (ExpProtoImp p synTy protoId straints bodyEs' : es')
collapseEs (e : es) = do
e' <- collapse e
es' <- collapseEs es
return (e' : es')
type Collapsed a = CompilerPass CompilerEnv a
runCollapseFunClauses :: RawAst CompUnit -> Collapsed (RawAst CompUnit)
runCollapseFunClauses (CompUnit pos exps) = do
exps' <- collapseEs exps
return $ CompUnit pos exps'
| Zoetermeer/latro | src/Latro/Collapse.hs | mit | 3,533 | 0 | 13 | 718 | 1,443 | 698 | 745 | 84 | 2 |
-- Copyright (C) 2013 Jorge Aparicio
import Data.Maybe (mapMaybe)
main :: IO()
main
= print
. head
. filter isPalindrome
. mapMaybe fst
$ iterate next (Nothing, nums)
where nums = [[x * y | y <- [x,x-1..start]] | x <- [end,end-1..start]]
start = 100 :: Int
end = 999
next :: Integral a => (Maybe a, [[a]]) -> (Maybe a, [[a]])
next (_, [[]]) = (Nothing, [[]])
next (_, (x:xs):[]) = (Just x, [xs])
next (_, x@(xh:xt):y@(yh:_):zs)
| xh > yh = (Just xh, xt:y:zs)
| otherwise = (h, x:zs')
where (h, zs') = next (Nothing, y:zs)
isPalindrome :: (Integral a, Show a) => a -> Bool
isPalindrome n = s == reverse s
where s = show n
| japaric/eulermark | problems/0/0/4/004.hs | mit | 669 | 0 | 12 | 168 | 409 | 224 | 185 | 21 | 1 |
-- SYNTAX TEST "source.haskell"
module Intro where
-- <- keyword.other
-- ^^^^^ support.other.module
-- ^^^^^ keyword.other
-- ^^^^^^^^^^^^^^^ meta.declaration.module
import Language.Haskell.Liquid.Prelude (liquidAssert)
-- ^^^^^^^^^^^^ meta.declaration.exports entity.name.function
-- <- keyword.other
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ support.other.module
zero' :: Int
zero' = 0
{-@ zero' :: {v: Int | 0 <= v} @-}
-- ^^^^^^^^^^^^^^^^^ liquid.type
-- ^^ keyword.operator
-- ^ constant.numeric
-- ^^^ entity.name.type
-- ^^^^^ entity.name.function
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^ block.liquidhaskell.annotation
-- <- block.liquidhaskell
{-@ zero'' :: {v: Int | (0 <= v && v < 100) } @-}
-- ^^^^^^ entity.name.function
-- ^^^ entity.name.type
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ liquid.type
-- ^^ keyword.operator
-- ^ keyword.operator
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ block.liquidhaskell.annotation
-- <- block.liquidhaskell
zero'' :: Int
-- <- meta.function.type-declaration
zero'' = 0
-- <- identifier
{-@ zero''' :: {v: Int | ((v mod 2) = 0) } @-}
zero''' :: Int
-- <- meta.function.type-declaration
zero''' = 0
-- <- identifier
{-@ zero'''' :: {v: Int | v = 0 } @-}
zero'''' :: Int
zero'''' = 0
{-@ zero :: {v: Int | ((0 <= v) && ((v mod 2) = 0) && (v < 100)) } @-}
zero :: Int
zero = 0
{-@ error' :: {v: String | false } -> a @-}
error' :: String -> a
error' = error
{-@ lAssert :: {v:Bool | (Prop v)} -> a -> a @-}
lAssert :: Bool -> a -> a
lAssert True x = x
lAssert False _ = error' "lAssert failure"
divide :: Int -> Int -> Int
divide n 0 = error' "divide by zero"
divide n d = n `div` d
{-@ divide :: Int -> {v: Int | v != 0 } -> Int @-}
{-@ divide' :: Int -> {v:Int | v /= 0} -> Int @-}
divide' :: Int -> Int -> Int
divide' n 0 = error' "divide by zero"
divide' n d = lAssert (d /= 0) $ n `div` d
abz :: Int -> Int
abz n | 0 < n = n
| otherwise = 0 - n
{-@ abz :: Int -> {v: Int | 0 <= v } @-}
-- ^^^^^^^^^^^^^^^^^^ liquid.type
-- ^^^ entity.name.type
-- ^^^ entity.name.function
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ block.liquidhaskell.annotation
-- <- block.liquidhaskell
{-@ truncate :: Int -> Int -> Int @-}
truncate i max
| i' <= max' = i
| otherwise = max' * (i `divide` i')
where i' = abz i
max' = abz max
{-@ truncate' :: Int -> Int -> Int @-}
truncate' i max
| i' <= max' = i
| otherwise = lAssert (i' /= 0) $ max' * (i `divide` i')
where i' = abz i
max' = abz max
{-@ truncate'' :: Int -> Int -> Int @-}
truncate'' i max
| i' <= max' = i
| otherwise = liquidAssert (i' /= 0) $ max' * (i `divide` i')
where i' = abz i
max' = abz max
{-@ listAssoc :: x:List a -> y:List a -> z:List a
-> {(append x (append y z)) == (append (append x y) z) } @-}
-- ^^^^^^ ^ ^^^^^^ ^ ^ ^^^^^^ ^^^^^^ ^ ^ ^ identifier
{-@ type Something = SomethingElse @-}
-- <- meta.declaration.type
{-@ instance Something where
-- <- meta.declaration.instance
asd = instance
-- <- identifier
@-}
-- >> =source.haskell
| atom-haskell/language-haskell | spec/fixture/liquidhaskell.hs | mit | 3,345 | 0 | 10 | 968 | 512 | 291 | 221 | 41 | 1 |
class YesNo a where
yesno :: a -> Bool
instance YesNo Int where
yesno 0 = False
yesno _ = True
instance YesNo [a] where
yesno [] = False
yesno _ = True
instance YesNo Bool where
yesno = id
instance YesNo (Maybe a) where
yesno Nothing = False
yesno (Just _) = True
yesnoIf :: (YesNo y) => y -> a -> a -> a
yesnoIf yesnoVal yesResult noResult =
if yesno yesnoVal then yesResult else noResult
-- yesno :: [a] -> Bool
-- yesno [] = False
-- yesno _ = True
-- yesno :: Maybe a -> Bool
-- yesno (Just _) = True
-- yesno Nothing = False | NickAger/LearningHaskell | CIS194/typeclassexperiments.hs | mit | 576 | 0 | 8 | 158 | 176 | 93 | 83 | 16 | 2 |
-- matricula 1 a00368770
-- matricula 2 a01273613
-- Problema 1 promedio de alumnos
-- funcion que regresa el promedio de los alumnos
promedio :: Integer -> [(Integer, [Char], [Double])] -> [(Integer, [Double])]
promedio _ [] = [( "", [0.0])] -- caso base
-- obtener el promedio
promedio mat1 ((mat2, _, parcialista) :resto) =
if mat1 == mat2 then --obtener el promedio por alumno
sum parcialista /
fromIntegral (length parcialista)
else promedio mat1 resto --seguir buscando por el arbol
--Problema 2 hojas
--Funcion que regresa todas las hojas de un arbol binario
data BinTree a = Empty | Node (BinTree a) a (BinTree a) deriving (Eq, Show)
hojas :: BinTree a -> [a]
hojas Empty = [] --caso base
hojas (Node left current right) = [current]++hojas left++hojas right --agregamos el elemento al final de la lista
-- mientras siga encontrando hijos, busca en su lado derecho e izquierdo
--hasta regresar solo los hijos
--Problema 3 binariza
--funcion que cambia numeros impares por 0 y pares por 1's
binariza :: Integer -> [(Char, [Integer])] | JorgeRubio96/LenguajesdeProgramacion | a00368770_a01273613_ejercicio13.hs | mit | 1,058 | 8 | 9 | 191 | 272 | 157 | 115 | 12 | 2 |
-- A letra grega λ ́e substitu ́ıda pela contrabarra “\”;
-- O ponto “.” ́e substitu ́ıdo por ->.
-- Por exemplo, λx.x 2 ́e notado, em Haskell como: \x->x^2 ou, ainda, \x->x*x.
quad = \x->x*x
expr = \x->x^2+2*x+3
raiz = \x->(sqrt x)
map (f x = x*x) [1..10] -- <interactive>:1:9: parse error on input ‘=’
map (\x->x*x) [1..10]
f x = x*x
f [1..10]
fac n = if n==1 then 1 else (n*fac(n-1))
map (\x->(fac x)) [1..10]
{- Para carregar um modulo
no interpretador ':module List'
-}
deleteBy (\x y -> y*x == 48) 6 [6,8,10,12] --fail
let p x y = if x `mod` y == 0 then True else False
let remove y list = [x | x <- list, not (p x y)]
Prelude> remove 4 [4..19]
[5,6,7,9,10,11,13,14,15,17,18,19]
[ x | x <- [1..4], y <- [x..5], (x+y) ‘mod‘ 2 == 0 ]
| tonussi/freezing-dubstep | pratica-03/Intro.hs | mit | 783 | 11 | 9 | 168 | 399 | 225 | 174 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-} -- allows "forall t. Moment t"
import Network.Socket
import System.IO
import System.Exit
import Control.Concurrent
import Control.Monad
import Data.Maybe
import Data.Word
import qualified Data.Map.Strict as Map
import Graphics.UI.Gtk as Gtk hiding (Event)
import Graphics.UI.Gtk.Builder
import Reactive.Util
import Reactive.Banana
import Reactive.Banana.Gtk
import IrcServer as S
import IrcMessage as M
data MsgPart = MsgIcon String | MsgText [TextTag] String
main :: IO ()
main = do
initGUI
bldWnd <- builderNew
builderAddFromFile bldWnd "dirc.glade"
bld <- builderNew
builderAddFromFile bld "server.glade"
dlg <- builderGetObject bldWnd castToWindow "main-dialog"
closeBtn <- builderGetObject bldWnd castToButton "close-button"
msgTabs <- builderGetObject bldWnd castToNotebook "message-tabs"
msgPage <- builderGetObject bld castToBox "server-page"
msgTxt <- builderGetObject bld castToTextView "message-text"
notebookAppendPage msgTabs msgPage "Server"
buffer <- textViewGetBuffer msgTxt
tagTbl <- textBufferGetTagTable buffer
fontTag <- textTagNew Nothing
set fontTag [ textTagFont := "Courier 12" ]
textTagTableAdd tagTbl fontTag
motdTag <- textTagNew Nothing
set motdTag [ textTagParagraphBackground := "yellow", textTagWeight := 700 ]
textTagTableAdd tagTbl motdTag
boldTag <- textTagNew Nothing
set boldTag [ textTagWeight := 700 ]
textTagTableAdd tagTbl boldTag
ulTag <- textTagNew Nothing
set ulTag [ textTagUnderline := UnderlineSingle ]
textTagTableAdd tagTbl ulTag
italicTag <- textTagNew Nothing
set italicTag [ textTagStyle := StyleItalic ]
textTagTableAdd tagTbl italicTag
let colors = [ "white", "black", "navy", "green", "red", "brown"
, "purple", "olive drab", "yellow", "lime green", "turquoise4"
, "cyan1", "blue", "magenta", "gray55", "gray90", "white"
]
let insertTag attr num name map = do tag <- textTagNew Nothing
set tag [ attr := name ]
textTagTableAdd tagTbl tag
return $ Map.insert num tag map
let mkColorMap f = foldM (\m (k, v) -> f k v m) Map.empty $ zip [0..] colors
fgColorMap <- mkColorMap $ insertTag textTagForeground
bgColorMap <- mkColorMap $ insertTag textTagBackground
exit <- newEmptyMVar
esmsg <- newAddHandler
esquit <- newAddHandler
let handler = tryEvent $ do liftIO $ fire esquit ()
closeBtn `on` Gtk.buttonReleaseEvent $ handler
dlg `on` Gtk.deleteEvent $ handler
let toMsg :: [Int] -> [TextTag] -> [IrcText] -> [MsgPart]
toMsg [] tags (Text text:ms) = (MsgText tags text:toMsg [] tags ms)
toMsg (fgc:bgc:[]) tags (Text text:ms) = let fgTag = maybeToList $ Map.lookup fgc fgColorMap
bgTag = maybeToList $ Map.lookup bgc bgColorMap
in (MsgText (concat [fgTag, bgTag, tags]) text:toMsg [fgc, bgc] tags ms)
toMsg colors tags (Bold:ms) = toMsg colors (boldTag:tags) ms
toMsg colors tags (Underlined:ms) = toMsg colors (ulTag:tags) ms
toMsg colors tags (Italic:ms) = toMsg colors (italicTag:tags) ms
toMsg colors tags (Reset:ms) = toMsg colors [fontTag] ms
toMsg (_:bgc:[]) tags (Foreground c:ms) = toMsg (c:bgc:[]) tags ms
toMsg (fgc:_:[]) tags (Background c:ms) = toMsg (fgc:c:[]) tags ms
toMsg (fgc:bgc:[]) tags (Reverse:ms) = toMsg (bgc:fgc:[]) tags ms
toMsg colors tags [] = []
let handleMsg msg = postGUIAsync $ do
case msg of
(Notice sender target text) -> insertMsg (MsgIcon "icon-info.svg":toMsg [1, 0] [fontTag] text)
(Generic sender nickname text) -> insertMsg $ toMsg [1, 0] [fontTag] text
(Welcome sender nickname text) -> insertMsg $ toMsg [1, 0] [fontTag] text
(YourHost sender nickname text) -> insertMsg $ toMsg [1, 0] [fontTag] text
(Created sender nickname text) -> insertMsg $ toMsg [1, 0] [fontTag] text
(MotD sender nickname text) -> insertMsg $ toMsg [2, 8] [fontTag, motdTag] text
(MotDStart sender nickname text) -> insertMsg $ toMsg [2, 8] [fontTag, motdTag] [Text " "]
(MotDEnd sender nickname text) -> insertMsg $ toMsg [2, 8] [fontTag, motdTag] [Text " "]
(Channel sender target channel _ topic) -> insertMsg $ MsgText [fontTag] (channel ++ ": "):toMsg [1, 0] [fontTag] topic
msg -> putStrLn $ show msg
insertMsg msg = do
m <- textBufferGetInsert buffer
i <- textBufferGetIterAtMark buffer m
case msg of
(MsgIcon icon:ms) -> do b <- pixbufNewFromFile icon
textBufferInsertPixbuf buffer i b
textBufferInsertAtCursor buffer " "
insertMsg ms
(MsgText tags text:ms) -> do o <- textIterGetOffset i
textBufferInsertAtCursor buffer text
i1 <- textBufferGetIterAtOffset buffer o
i2 <- textBufferGetIterAtMark buffer m
let applyTags (t:ts) = do textBufferApplyTag buffer t i1 i2
applyTags ts
applyTags [] = do return ()
applyTags tags
insertMsg ms
[] -> textBufferInsertAtCursor buffer "\n"
handleQuit = putMVar exit ExitSuccess
network <- compile $ setupNetwork (esmsg, esquit) handleMsg handleQuit
actuate network
sChan <- newChan
S.startServer "irc.freenode.net" 6665 esmsg sChan
--S.startServer "irc.dal.net" 7000 esmsg sChan
writeChan sChan M.Nick { nickname = "dbanerjee1979" }
writeChan sChan M.User { username = "guest", modeMask = 0, realname = "Joe" }
writeChan sChan M.Join { channel = "#haskell" }
widgetShowAll dlg
forkOS mainGUI
signal <- takeMVar exit
postGUIAsync mainQuit
exitWith signal
setupNetwork :: forall t. Frameworks t => (EventSource Message, EventSource ()) -> (Message -> IO ()) -> IO () -> Moment t ()
setupNetwork (esmsg, esquit) handleMsg handleQuit = do
emsg <- fromAddHandler (addHandler esmsg)
equit <- fromAddHandler (addHandler esquit)
reactimate $ handleMsg <$> emsg
reactimate $ handleQuit <$ equit
| dbanerjee1979/dirc | Dirc.hs | mit | 7,122 | 0 | 23 | 2,461 | 2,158 | 1,064 | 1,094 | 125 | 22 |
{-
see Chapter 17 of the Haskell 2010 Language Report
-}
module Data.Complex where
| evilcandybag/JSHC | hslib/Data/Complex.hs | mit | 86 | 0 | 3 | 16 | 7 | 5 | 2 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Network.JsonRpc.Server
import System.IO (BufferMode (LineBuffering), hSetBuffering, stdout)
import qualified Data.ByteString.Lazy.Char8 as B
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
import Control.Monad (forM_, when)
import Control.Monad.Trans (liftIO)
import Control.Monad.Except (throwError)
import Control.Monad.Reader (ReaderT, ask, runReaderT)
import Control.Concurrent.MVar (MVar, newMVar, modifyMVar)
main = do
hSetBuffering stdout LineBuffering
contents <- B.getContents
count <- newMVar 0
forM_ (B.lines contents) $ \request -> do
response <- runReaderT (call methods request) count
B.putStrLn $ fromMaybe "" response
type Server = ReaderT (MVar Integer) IO
methods :: [Method Server]
methods = [add, printSequence, increment]
add = toMethod "add" f (Required "x" :+: Required "y" :+: ())
where f :: Double -> Double -> RpcResult Server Double
f x y = liftIO $ return (x + y)
printSequence = toMethod "print_sequence" f params
where params = Required "string" :+:
Optional "count" 1 :+:
Optional "separator" ',' :+: ()
f :: String -> Int -> Char -> RpcResult Server ()
f str count sep = do
when (count < 0) $ throwError negativeCount
liftIO $ print $ intercalate [sep] $ replicate count str
negativeCount = rpcError (-32000) "negative count"
increment = toMethod "increment_and_get_count" f ()
where f :: RpcResult Server Integer
f = ask >>= \count -> liftIO $ modifyMVar count inc
where inc x = return (x + 1, x + 1)
| grayjay/json-rpc-server | demo/Demo.hs | mit | 1,690 | 0 | 14 | 398 | 560 | 297 | 263 | 38 | 1 |
sommeDeXaY x y = if x > y
then 0
else x + sommeDeXaY (x+1) y
somme :: [Int] -> Int
somme [] = 0
somme (x:xs) = x + somme xs
last' :: [a] -> a
last' xs = head (reverse xs)
init' :: [a] -> [a]
init' xs = reverse (tail (reverse xs))
-- Function !!
(!!!) :: [a] -> Int -> a
(!!!) [] n = error "Index too large"
(!!!) (x:xs) n = if (n == 0)
then x
else (!!!) xs (n -1)
-- Function ++
plus' :: [a] -> [a] -> [a]
plus' [] ys = ys
plus' (x:xs) ys = x:(plus' xs ys)
-- Function concat
concate' :: [[a]] -> [a]
concate' [] = []
concate' [[]] = []
concate' (xs:xss) = xs ++ concate' xss
| Debaerdm/L3-MIAGE | Programmation Fonctionnel/TP/TP1/sommeDeXaY.hs | mit | 594 | 0 | 9 | 151 | 366 | 199 | 167 | 22 | 2 |
module IdealGas
( idealgas, idealgas_of_veff, n, kT, of_effective_potential )
where
import Expression
nQ :: Expression RealSpace
nQ = (mass*kT/2/pi)**1.5
where mass = var "mH2O" "m_{H_2O}" (18.01528 * gpermol) -- uses molecular weight of water
gpermol = var "gpermol" "\\frac{\\textrm{g}}{\\textrm{mol}}" 1822.8885
idealgas :: Expression Scalar
idealgas = "Fideal" === integrate (kT*n*(log(n/nQ) - 1))
of_effective_potential :: Expression Scalar -> Expression Scalar
of_effective_potential = substitute (r_var "veff") (r_var "x") .
substitute (r_var "x") (exp (- r_var "veff" / kT))
idealgas_of_veff :: Expression Scalar
idealgas_of_veff = "ideal" === integrate (-n_of_veff*(veff + kT*(1 + log(nQ))))
where veff = r_var "Veff"
n_of_veff = exp(-veff/kT)
kT :: Type a => Expression a
kT = s_tex "kT" "kT"
n :: Expression RealSpace
n = "n" === r_var "x"
| droundy/deft | src/haskell/IdealGas.hs | gpl-2.0 | 918 | 1 | 14 | 187 | 333 | 174 | 159 | 20 | 1 |
--
-- Copyright (c) 2014 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE ScopedTypeVariables,PatternGuards,TupleSections,ViewPatterns,FlexibleContexts #-}
module Vm.Actions
( configureServiceVm
, startServiceVm
, trashUnusedServiceVms
, createVm, CreateVmPms(..), defaultCreateVmPms
, removeVm
, startVm
, startVmInternal
, rebootVm
, sleepVm
, resumeFromSleep
, hibernateVm
, shutdownVm
, shutdownVmIfSafe
, forceShutdownVm
, forceShutdownVmIfSafe
, pauseVm
, unpauseVm
, switchVm
, reallySwitchVm
, switchGraphicsFallback
, bindVmPcis
, loginToVm
, addNicToVm, addDefaultNicToVm, removeNicFromVm
, addDiskToVm, addDefaultDiskToVm, removeDiskFromVm, addVhdDiskToVm, addPhyDiskToVm
, addVmFirewallRule, deleteVmFirewallRule, applyVmFirewallRules, unapplyVmFirewallRules
, applyVmBackendShift
, disconnectFrontVifs
, createAndAddDiskToVm
, createVhd
, exportVmSwitcherInfo
, modifyVmNic
, modifyVmDisk
, modifyVmPciPtRules
, mountVmDisk
, unmountVmDisk
, generateCryptoKeyIn
, generateCryptoKey
, parallelVmExec
, parallelVmExecByType
, parallelVmExecInStages
, suspendToFile, resumeFromFile
, getMeasureFailAction, setMeasureFailAction
, vmSuspendImageStatePath
, runEventScript
, changeVmNicNetwork
, removeVmEnvIso
-- property accessors
, setVmType
, setVmWiredNetwork, setVmWirelessNetwork, setVmGpu, setVmCd
, setVmSeamlessTraffic
, setVmStartOnBoot, setVmHiddenInSwitcher, setVmHiddenInUi, setVmMemory, setVmName
, setVmImagePath, setVmSlot, setVmPvAddons, setVmPvAddonsVersion
, setVmTimeOffset, setVmCryptoUser, setVmCryptoKeyDirs, setVmAutoS3Wake
, setVmNotify, setVmHvm, setVmPae, setVmApic, setVmViridian, setVmNx, setVmSound, setVmDisplay
, setVmBoot, setVmCmdLine, setVmKernel, setVmInitrd, setVmAcpiPt, setVmVcpus, setVmCoresPerSocket
, setVmKernelExtract
, setVmInitrdExtract
, setVmMemoryStaticMax
, setVmMemoryMin
, setVmVideoram, setVmPassthroughMmio, setVmPassthroughIo, setVmFlaskLabel
, setVmAmtPt, setVmHap, setVmSmbiosPt, setVmDescription
, setVmExtraXenvm, setVmExtraHvm
, setVmStartOnBootPriority, setVmKeepAlive, setVmProvidesNetworkBackend
, setVmProvidesGraphicsFallback, setVmShutdownPriority, setVmSeamlessId
, setVmStartFromSuspendImage, setVmQemuDmPath, setVmQemuDmTimeout, setVmTrackDependencies
, setVmSeamlessMouseLeft, setVmSeamlessMouseRight, setVmOs, setVmControlPlatformPowerState
, setVmOemAcpiFeatures, setVmUsbEnabled, setVmUsbControl, setVmStubdom, setVmCpuid
, setVmGreedyPcibackBind
, setVmRunPostCreate, setVmRunPreDelete, setVmRunOnStateChange, setVmRunOnAcpiStateChange
, setVmRunPreBoot
, setVmRunInsteadofStart
, setVmUsbGrabDevices
, setVmNativeExperience, setVmShowSwitcher, setVmWirelessControl
, setVmXciCpuidSignature
, setVmS3Mode
, setVmS4Mode
, setVmVsnd
, setVmRealm
, setVmSyncUuid
, setVmIcbinnPath
, setVmOvfTransportIso
, setVmDownloadProgress
, setVmReady
, setVmProvidesDefaultNetworkBackend
, setVmVkbd
, setVmVfb
, setVmV4V
, setVmRestrictDisplayDepth
, setVmRestrictDisplayRes
, setVmPreserveOnReboot
, setVmBootSentinel
, setVmHpet
, setVmTimerMode
, setVmNestedHvm
, setVmSerial
, setVmAutolockCdDrives
, EventHookFailMode(..)
) where
import Prelude hiding (catch, mapM, mapM_)
import Data.Char
import Data.List
import Data.Maybe
import Data.Bits
import Data.String
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map as M
import qualified Data.Set as Set
import Text.Printf
import Control.Arrow
import Control.Monad
import Control.Monad.Error hiding (liftIO)
import Control.Monad.Reader
import Control.Applicative
import Control.Concurrent
import qualified Control.Exception as E
import System.FilePath.Posix
import System.Posix.Files
import System.Process
import System.Exit
import System.IO
import System.Timeout
import Data.Time
import Directory
import System.Directory (createDirectoryIfMissing)
import qualified Data.Foldable
import Tools.Log
import Tools.XenStore
import Tools.Misc
import Tools.Process
import Tools.Text
import Tools.IfM
import Tools.FreezeIOM
import Tools.Future
import Tools.File (fileSha1Sum)
import Tools.Apptool
import Vm.Types
import Vm.Config
import Vm.ConfigWriter
import Vm.Dm
import Vm.Queries
import Vm.QueriesM
import Vm.Pci
import Vm.Utility
import Vm.Policies
import Vm.Templates
import Vm.Monad
import Vm.Monitor
import Vm.State
import Vm.DomainCore
import {-# SOURCE #-} Vm.React
import qualified Vm.V4VFirewall as Firewall
import Vm.Balloon
import XenMgr.Rpc
import qualified XenMgr.Connect.Xenvm as Xenvm
import qualified XenMgr.Connect.GuestRpcAgent as RpcAgent
import XenMgr.Connect.NetworkDaemon
import XenMgr.Connect.Xenvm ( resumeFromSleep, resumeFromFile, suspendToFile )
import XenMgr.Connect.InputDaemon
import XenMgr.Config
import XenMgr.Errors
import XenMgr.Db
import XenMgr.Host
import XenMgr.Diskmgr
import XenMgr.Notify
import XenMgr.XM
import XenMgr.CdLock
import {-# SOURCE #-} XenMgr.PowerManagement
import Rpc.Autogen.XenmgrNotify
import XenMgr.Expose.ObjectPaths
data EventHook
= EventScript FilePath
| EventRpc (Maybe Uuid) RpcCall
deriving (Show)
data EventHookFailMode
= HardFail
| ContinueOnFail
deriving (Eq, Show)
vmSuspendImageStatePath :: Uuid -> String
vmSuspendImageStatePath uuid = "/xenmgr/service-vm-snapshot/" ++ show uuid ++ "/state"
startServiceVm :: Uuid -> XM ()
startServiceVm uuid = xmContext >>= \xm -> liftRpc $
isRunning uuid >>= \r ->
case r of
False -> do info $ "starting service vm " ++ show uuid
file <- getVmStartFromSuspendImage uuid
context <- rpcGetContext
if (not . null $ file)
then liftIO $ do
xsWrite (vmSuspendImageStatePath uuid) "start"
void . forkIO . xsWaitFor (vmSuspendImageStatePath uuid) $
do status <- (rpc context $ snapshot_request xm file)
case status of
Right rv -> return rv
Left err -> warn (show err) >> return True
else liftIO $ do
xsWrite (vmSuspendImageStatePath uuid) "start-no-snapshot"
runXM xm (startVm uuid)
True -> info $ "service vm " ++ show uuid ++ " already running"
where
snapshot_request xm file =
do state <- liftIO $ xsRead (vmSuspendImageStatePath uuid)
debug $ "service vm " ++ show uuid ++ " suspend image state = " ++ show state
if state /= Just "snapshot"
then return False -- continue waiting
else do
info $ "service vm " ++ show uuid ++ " requested a memory image snapshot"
-- take a vm snapshot
info $ "taking memory image snapshot for service vm " ++ show uuid
suspendToFile uuid file
info $ "DONE taking memory image snapshot for service vm " ++ show uuid
liftIO $ xsWrite (vmSuspendImageStatePath uuid) "snapshot-done"
-- double start, TODO: maybe wont be necessary
runXM xm (startVm uuid)
-- finished waiting on this watch
return True
parseEventHooks :: String -> [EventHook]
parseEventHooks = catMaybes . map parseEventHook . split ';'
parseEventHook :: String -> Maybe EventHook
parseEventHook s = parse s where
parse "" = Nothing
parse s | "rpc:" `isPrefixOf` s = rpc (drop 4 s) -- assume rpc call
parse s = return (EventScript s) -- assume script file
rpc s = do
let kv = keyvals (filter (not . isSpace) s)
objpath = fromMaybe "/" ("objpath" `M.lookup` kv)
vm = fromString <$> "vm" `M.lookup` kv
dest <- "destination" `M.lookup` kv
interface <- "interface" `M.lookup` kv
member <- "member" `M.lookup` kv
return . EventRpc vm $ RpcCall (fromString dest) (fromString objpath) (fromString interface) (fromString member) []
keyvals = M.fromList . catMaybes . map one . split ',' where
one x = case split '=' x of
[k,v] -> Just (k,dequote v)
_ -> Nothing
dequote (x:xs) | is_quote x = reverse (dequote $ reverse xs) where is_quote x = x == '\'' || x =='"'
dequote xs = xs
runEventScript :: EventHookFailMode -> Uuid -> (Uuid -> Rpc (Maybe String)) -> [String] -> Rpc Bool
runEventScript failmode vm get_hooks args =
run_hooks =<< get_hooks vm where
quote x = "\"" ++ x ++ "\""
run_hooks Nothing = return False
run_hooks (Just str) = (catchScriptErrors (mapM_ run_hook . parseEventHooks $ str)) >> return True
run_hook (EventScript file) = liftIO $ run_file file
run_hook (EventRpc vm call) = void $ run_rpc vm (setCallArgs call args)
run_rpc vm rpc =
do info $ "event-rpc " ++ show rpc
case vm of
Nothing -> rpcCall rpc
Just uuid -> do
domid <- getDomainID uuid
let fail = error $ "required VM " ++ show uuid ++ " is not running"
call domid = rpcWithDomain (fromIntegral domid) (rpcCall rpc)
maybe fail call domid
run_file path =
do info $ "event-script " ++ show path ++ " " ++ (intercalate " ") (map quote args)
exec path
return ()
exec path =
-- stderr redirected to warn, stdout to info
runInteractiveProcess path args Nothing Nothing >>= \ (_, stdout, stderr, h) ->
do hSetBuffering stdout NoBuffering
hSetBuffering stderr NoBuffering
contents_mv <- newEmptyMVar
forkIO $ consume_lines stderr warn >> return ()
forkIO $ putMVar contents_mv =<< consume_lines stdout info
contents <- takeMVar contents_mv
-- force evaluation of contents
exitCode <- contents `seq` waitForProcess h
case exitCode of
ExitSuccess -> return $ contents
_ -> error $ "event-script: " ++ path ++ " FAILED."
consume_lines h feed = go h [] where
go h ls = continue =<< E.try (hGetLine h)
where continue :: Either E.SomeException String -> IO String
continue (Right l) = feed l >> go h (l:ls)
continue (Left _) = return . unlines . reverse $ ls
catchScriptErrors run = run `catchError` reportErrors
reportErrors e = warn (show e) >> when (failmode == HardFail) (throwError e)
setCallArgs c args = c { callArgs = map toVariant args }
configureServiceVm :: String -> Rpc (Maybe Uuid)
configureServiceVm tag =
do overwrite_settings <- appOverwriteServiceVmSettings tag
template <- liftIO $ getServiceVmTemplate tag
case getUuidInTemplate template of
Nothing -> error "service vm template does not have uuid"
Just uuid ->
do exists <- dbExists $ "/vm/" ++ show uuid
when ( overwrite_settings || not exists ) $ exportTemplate Nothing template >> return ()
info $ "configured service vm " ++ show tag
return . Just $ uuid
-- remove service vms which are not in active templates from db
trashUnusedServiceVms :: Rpc ()
trashUnusedServiceVms =
do tags <- liftIO enumServiceVmTags
mapM_ (trash tags) =<< getVms
where
trash active uuid =
getVmType uuid >>= trash_type where
trash_type (ServiceVm tag) | not (tag `elem` active) = dbRm $ "/vm/" ++ show uuid
trash_type _ = return ()
-- Returns first empty slot in 1..9 range, or Nothing if all are in use
findEmptySlot :: Rpc (Maybe Int)
findEmptySlot =
fmap (first . inverse) . getSlots =<< getGuestVms
where
getSlots = mapM getSlot
getSlot uuid = fromMaybe 0 <$> readConfigProperty uuid vmSlot
inverse slots = [1..9] \\ slots
first = listToMaybe . sort
-- Create a VM
data CreateVmPms
= CreateVmPms { cvmUuid :: Maybe Uuid
, cvmTemplate :: Maybe String
, cvmExtraJson :: String
, cvmName :: Maybe String
, cvmDescription :: Maybe String
, cvmImagePath :: Maybe String
, cvmAutoSlot :: Bool }
defaultCreateVmPms :: CreateVmPms
defaultCreateVmPms
= CreateVmPms
Nothing
Nothing
""
Nothing
Nothing
Nothing
True
-- | bind passthrough devices to pciback
-- this typically might be done early (on daemon start), to prevent the devices going into
-- use by other, non-passthrough vms
bindVmPcis :: Uuid -> Rpc ()
bindVmPcis uuid = whenM (getVmGreedyPcibackBind uuid)
(mapM_ bind =<< filter (isconfig . pciPtSource) <$> getPciPtDevices uuid)
where
bind = liftIO . pciBindPciback . devAddr . pciPtDevice
isconfig SourceConfig = True
isconfig _ = False
createVm :: Bool -> CreateVmPms -> XM Uuid
createVm unrestricted pms = do
unlessM (liftM2 (||) (return unrestricted) policyQueryVmCreation) failActionSuppressedByPolicy
info "creating new VM..."
-- creation needs a lock around otherwise it produces non unique slots now and then
xmWithVmCreationLock do_create
where
do_create = do
let extra = templateFromString (cvmExtraJson pms)
template <- mergeTemplates <$> (
case cvmTemplate pms of
Nothing -> liftIO getNewVmTemplate
Just s | null (strip s) -> return nullTemplate
Just n -> liftIO $ getAnyVmTemplate n
) <*> pure extra
let auto_allocate_slot =
cvmAutoSlot pms && isNothing (getSlotInTemplate template)
maybeSlot <- liftRpc findEmptySlot
when (auto_allocate_slot && isNothing maybeSlot) $ failTooManyVms
uuid <- case catMaybes [ cvmUuid pms, getUuidInTemplate template ] of
[] -> liftIO uuidGen
(u:_) -> return u
info $ "exporting vm template for " ++ show uuid
liftRpc (exportTemplate (Just uuid) template)
info $ "setting initial properties for " ++ show uuid
-- Give it seamless ID, equal to uuid from creation
setVmSeamlessId uuid (show uuid)
-- Give it name and slot
whenM (null <$> liftRpc (getVmName uuid)) $
saveConfigProperty uuid vmName $
fromMaybe ("name-" ++ show uuid) (cvmName pms)
when auto_allocate_slot $
let Just n = maybeSlot in
saveConfigProperty uuid vmSlot n
-- other optional settings
maybeM (cvmDescription pms) $ saveConfigProperty uuid vmDescription
maybeM (cvmImagePath pms) $ saveConfigProperty uuid vmImagePath
-- Begin monitoring
monitorAndReactVm uuid
return uuid
maybeM x f = maybe (return ()) f x
-- Remove a VM
removeVm :: Uuid -> Rpc ()
removeVm uuid =
do -- not allowing to remove running VMS
whenM (isRunning uuid) failCannotRemoveRunning
info $ "Removing a VM " ++ show uuid
runEventScript HardFail uuid getVmRunPreDelete [uuidStr uuid]
-- Delete VHDS, but only if this is not a managed VM
unlessM (isManagedVm uuid) removeVhds
-- remove ovffiles
liftIO $ readProcessOrDie "rm" ["-rf", "/storage/ovffiles/" ++ show uuid] ""
-- Away from database
dbRm $ "/vm/" ++ show uuid
dbRm $ "/dom-store/" ++ show uuid
-- Need to quit xenvm
-- FIXME: cleanly stop monitoring events
removeDefaultEvents uuid --cleanly...stop monitoring events
Xenvm.quitXenvm uuid
notifyVmDeleted uuid
where
removeVhds = Data.Foldable.mapM_ (removeDiskFiles uuid) =<< getDisks uuid
removeVhd d
| not (diskShared d), diskType d == VirtualHardDisk
= do refs <- nub . map fst <$> getVhdReferences (diskPath d)
-- only remove when only this vm references the vhd
when (refs == [uuid]) $ do
let p = diskPath d
b | ".vhd" `isSuffixOf` p = take (length p - 4) p
| otherwise = p
snappaths = map (b ++) [".vhd.hib", ".snap.tmp.vhd", ".snap"]
liftIO . whenM (doesFileExist p) $ do
info $ "Removing VHD file " ++ p
removeLink p
let rmSnapshot p = liftIO . whenM (doesFileExist p) $ do
info $ "Removing snapshot file " ++ p
removeLink p
mapM_ rmSnapshot $ snappaths
removeVhd _ = return ()
-- which vms are referencing this vhd
-- TODO: handle differencing disks
getVhdReferences :: FilePath -> Rpc [(Uuid, Disk)]
getVhdReferences vhd = concat <$> (mapM (diskVhdReferences vhd) =<< getVms) where
diskVhdReferences vhd vm = zip (repeat vm) . filter (references vhd) . M.elems <$> getDisks vm where
references vhd disk = diskPath disk == vhd
startVm :: Uuid -> XM ()
startVm uuid = do
withPreCreationState uuid $ do
ran <- liftRpc $ runEventScript HardFail uuid getVmRunInsteadofStart [uuidStr uuid]
when (not ran) $ startVmInternal uuid
-- Start a VM! (maybe, because stuff can happen not)
startVmInternal :: Uuid -> XM ()
startVmInternal uuid = do
unlessM (dbExists $ "/vm/" ++ show uuid) $ error ("vm does not have a database entry: " ++ show uuid)
info $ "starting VM " ++ show uuid
config <- prepareAndCheckConfig uuid
case config of
Just c -> info ("done checks for VM " ++ show uuid) >> bootVm c
Nothing-> return ()
where
prepareAndCheckConfig uuid = do
ok <- stage1 -- early tests / dependency startup
if (not ok)
then return Nothing
else do
-- this (config gather) needs to be done after dependency startup to get correct
-- backend domids
config <- liftRpc $ getVmConfig uuid True
info $ "gathered config for VM " ++ show uuid
ok <- stage2 config
if ok then return (Just config) else return Nothing
stage1
= startupCheckVmState uuid
`followby` startupCheckHostStates uuid
`followby` startupDependencies uuid
`followby` startupExtractKernel uuid
`followby` startupMeasureVm uuid
stage2 config
= startupCheckNics config
`followby` startupCheckGraphicsConstraints config
`followby` startupCheckIntelConstraints config
`followby` startupCheckAMTConstraints config
`followby` startupCheckPCI config
`followby` startupCheckOemFeatures config
`followby` startupCheckSyncXTComfortable uuid
followby f g =
do ok <- f
if ok then g else return False
startupCheckVmState :: Uuid -> XM Bool
startupCheckVmState uuid
= do running <- liftRpc (isRunning uuid)
if running
then do warn ("request to start vm " ++ show uuid ++ " BUT it is already running")
return False
else do return True
startupCheckHostStates :: Uuid -> XM Bool
startupCheckHostStates uuid
= f =<< (,) <$> liftRpc getHostState <*> liftIO getCurrentRunLevel
where
f (_, Just 0) = badRunLevel 0
f (_, Just 6) = badRunLevel 6
f (s, _) | s /= HostIdle = badHostState s
f _ = return True
msg reason = warn ("ignoring request to start VM " ++ show uuid ++ " because of " ++ reason)
badRunLevel l = msg ("current runlevel: " ++ show l) >> return False
badHostState s = msg ("current host state: " ++ show s) >> return False
startupMeasureVm :: Uuid -> XM Bool
startupMeasureVm uuid
= do measure <- getVmMeasured uuid
if not measure
then return True
else xmRunVm uuid $ addVmDiskHashes >> checkVmDiskHashes
startupDependencies :: Uuid -> XM Bool
startupDependencies uuid
= do deps <- liftRpc $ getVmTrackDependencies uuid
if deps
then do
startDependencies uuid
checkDependencies uuid
else do
return True
where
checkDependencies uuid = liftRpc $ do
missing <- filterM (fmap not . isRunning) =<< (getVmDependencies uuid)
if (null missing)
then do return True
else do warn $ "missing dependencies: " ++ show missing
return False
startDependencies uuid =
do dependentVms <- liftRpc $ getVmDependencies uuid
unless (null dependentVms) $
info $ "vm dependencies: " ++ show dependentVms
mapM_ startVm =<< (liftRpc $ filterM (fmap not . isRunning) dependentVms)
startupExtractKernel :: Uuid -> XM Bool
startupExtractKernel uuid
= do liftRpc $ extractKernelFromPvDomain uuid
liftRpc $ extractInitrdFromPvDomain uuid
return True
startupCheckNics :: VmConfig -> XM Bool
startupCheckNics cfg
= mapM_ verify (vmcfgNics cfg) >> return True
where
verify nic
| nicdefBackendDomid nic == Nothing && (nicdefBackendUuid nic /= Nothing || nicdefBackendName nic /= Nothing)
= failNetworkDomainNotRunning
| otherwise
= return ()
startupCheckGraphicsConstraints :: VmConfig -> XM Bool
startupCheckGraphicsConstraints cfg
| (vmcfgGraphics cfg == HDX)
= hdxCount >> vtd >> return True
| otherwise
= return True
where
hdxCount = liftRpc $ do
hdx <- getRunningHDX
case vmcfgVgpuMode cfg of
Just vgpu | vgpuMaxVGpus vgpu < length hdx + 1 -> failCannotStartBecauseHdxRunning
_ -> return ()
vtd = do
hvmInfo <- liftIO getHvmInfo
let vtd = hvmDirectIOEnabled hvmInfo
when (not vtd)$ failCannotStartHdxWithoutVtD
startupCheckAMTConstraints :: VmConfig -> XM Bool
startupCheckAMTConstraints cfg
= do whenM (liftRpc $ getVmAmtPt $ vmcfgUuid cfg) $ unique
return True
where
unique = liftRpc $
getGuestVms >>= filterM isRunning >>= filterM getVmAmtPt >>= maybe_fail
where maybe_fail [] = return ()
maybe_fail (uuid:_) = failCannotStartBecauseAmtPtRunning
startupCheckOemFeatures :: VmConfig -> XM Bool
startupCheckOemFeatures config = liftRpc $ do
let features = vmcfgOemAcpiFeatures config
when features $
getGuestVms >>= filterM isRunning >>= filterM getVmOemAcpiFeatures >>= maybe_fail
return True
where
maybe_fail [] = return ()
maybe_fail (uuid:_) = failCannotStartBecauseOemFeaturesRunning
startupCheckPCI :: VmConfig -> XM Bool
startupCheckPCI cfg = liftRpc $
do in_use <- map pciPtDevice <$> online_devices
case vm_devices `intersect` in_use of
[] -> return True
(dev:_) -> failDeviceAlreadyPassedThrough (show dev)
where
vm_devices
= map pciPtDevice (vmcfgPciPtDevices cfg)
online_devices
= concat <$> (getGuestVms >>= filterM isRunning >>= mapM getPciPtDevices)
startupCheckIntelConstraints :: VmConfig -> XM Bool
startupCheckIntelConstraints cfg
= do mapM_ verify devs
return True
where
devs = vmcfgPciPtDevices cfg
verify (PciPtDev { pciPtDevice=d }) = liftRpc $
do info <- liftIO $ pciGetInfo d
case info of
Nothing -> return ()
Just info -> do
-- intel gpu cannot be passed as secondary adapter
case (pciinfoVendor info,pciinfoClass info) of
(0x8086,0x300) -> check d
(0x8086,0x380) -> check d
_ -> return ()
where
check d =
do let boot_vga_file = devSysfsPath d </> "boot_vga"
boot_vga_exists <- liftIO $ doesFileExist boot_vga_file
when boot_vga_exists $
do contents <- chomp <$> (liftIO $ readFile boot_vga_file)
case contents of
"0" -> failGraphicsCantBePassedAsSecondaryAdapter
_ -> return ()
startupCheckSyncXTComfortable :: Uuid -> XM Bool
startupCheckSyncXTComfortable uuid
= do bg_op_ok <- liftRpc (getVmReady uuid)
-- FIXME: perform this check only for syncxt vms if still needed
-- when (not bg_op_ok) $ failVmNotReady
return True
withPreCreationState :: Uuid -> XM a -> XM a
withPreCreationState uuid f =
do s <- getVmInternalState uuid
when (s /= PreCreate) $ xmRunVm uuid $ vmEvalEvent (VmStateChange PreCreate)
f `catchError` (\e -> do
s <- getVmInternalState uuid
-- have to mop up here if something went wrong in pre-create state
-- since xenvm does not know about this state
when (s == PreCreate) $ do
xmRunVm uuid $ vmEvalEvent (VmStateChange Shutdown)
throwError e)
bootVm :: VmConfig -> XM ()
bootVm config
= do -- fire xenvm up if necessary, send configuration to xenvm
monitor <- vm_monitor <$> xmRunVm uuid vmContext
liftRpc $ do
whenM (not <$> ensureXenvm monitor config) $ do -- starts xenvm + writes config if not up
-- xenvm was already up, need to send it new config
updateXVConfig config
withPreCreationState uuid create
where
uuid = vmcfgUuid config
updateXVConfig :: VmConfig -> Rpc ()
updateXVConfig c = writeXenvmConfig c >> Xenvm.readConfig uuid
create = do
-- create environment iso
whenM (getVmOvfTransportIso uuid) . liftIO $ do
createDirectoryIfMissing True envIsoDir
generateEnvIso uuid (envIsoPath uuid)
info $ "generated ovf environment ISO " ++ (envIsoPath uuid) ++ " for VM " ++ show uuid
-- try some ballooning if we lack memory
bootstrap <- xmWithBalloonLock $ do
liftRpc $ do
balanced <-
do ok <- balanceToBoot uuid
if ok
then return True
else do
-- another attempt with minimal memory
-- TODO: this should work out maximum amount of memory we can use instead of trying with
-- with minimum
let config' = config { vmcfgMemoryMib = vmcfgMemoryMinMib config }
updateXVConfig config'
balanceToBoot uuid
when (not balanced) $ failNotEnoughMemory
-- Clear the hibernated property
saveConfigProperty uuid vmHibernated False
-- run custom pre boot action
liftRpc $ runEventScript HardFail uuid getVmRunPreBoot [uuidStr uuid]
-- fork xenvm vm startup in the background
bootstrap <- future $ liftRpc $ do
suspend_file <- getVmStartFromSuspendImage uuid
exists <-
if null suspend_file
then return False
else liftIO (doesFileExist suspend_file)
if not exists
then do Xenvm.startPaused uuid
else do liftIO $ xsWrite (vmSuspendImageStatePath uuid) "resume"
resumeFromFile uuid suspend_file False True
return bootstrap
-- fork vm creation phase handling in the background
phases <- future handleCreationPhases
-- ensure bootstrap and phase handling synchronously terminates before returning (and errors get propagated)
force bootstrap
force phases
xsp domid = "/local/domain/" ++ show domid
writable domid path = do
xsWrite path ""
xsSetPermissions path [ Permission 0 []
, Permission (fromIntegral domid) [PermRead,PermWrite]]
setupCDDrives :: Uuid -> Rpc ()
setupCDDrives uuid = do
-- use cd autolocking perhaps
setVmAutolockCdDrives uuid =<< appGetAutolockCdDrives
-- make bsg device status & req nodes writable by domain
whenDomainID_ uuid $ \domid -> liftIO $ do
writable domid (xsp domid ++ "/bsgdev")
writable domid (xsp domid ++ "/bsgdev-req")
-- read drives media state
liftIO $
mapM_ updateCdDeviceMediaStatusKey =<< liftIO getHostBSGDevices
handleCreationPhases :: XM ()
handleCreationPhases = do
waitForVmInternalState uuid CreatingDevices 30
-- BEFORE DEVICE MODEL
info $ "pre-dm setup for " ++ show uuid
liftRpc $ do
twiddlePermissions uuid
exportVmSwitcherInfo uuid
setupCDDrives uuid
-- some little network plumbing
gives_network <- getVmProvidesNetworkBackend uuid
when gives_network $ whenDomainID_ uuid $ \domid -> do
liftIO $ xsWrite backendNode (show domid)
liftIO $ xsChmod backendNode "r0"
stubdom <- getVmStubdom uuid
when stubdom $ updateStubDomainID uuid
applyVmFirewallRules uuid
-- notify that v4v rules have been set up, so xenvm can unpause stubdom
whenDomainID_ uuid $ \domid -> liftIO $
xsWrite (domainXSPath domid ++ "/v4v-firewall-ready") "1"
info $ "done pre-dm setup for " ++ show uuid
waitForVmInternalState uuid Created 60
sentinel <- sentinelPath
-- allow writing to sentinel
maybe (return()) (\p -> liftIO $ xsWrite p "" >> xsChmod p "b0") sentinel
-- AFTER DOMAIN CREATION
liftRpc $ do
-- assign sticky cd drives
mapM_ (\d -> assignCdDevice d uuid) =<< getVmStickyCdDevices uuid
info $ "unpause " ++ show uuid
Xenvm.unpause uuid
-- wait for bootup services to complete if using sentinel
maybe (return()) (\p -> liftIO
. void
. timeout (10^6 * 60)
. xsWaitFor p
$ ((\v -> isJust v && v /= Just "") `fmap` xsRead p)
) sentinel
applyVmBackendShift uuid
return ()
sentinelPath = do
domid <- getDomainID uuid
case domid of
Nothing -> return Nothing
Just domid -> do
s <- getVmBootSentinel uuid
case s of
Nothing -> return Nothing
Just p -> return (Just $ domainXSPath domid ++ "/" ++ p)
--FIXME: get rid of this when/if we remove dbusbouncer from ndvm
twiddlePermissions :: Uuid -> Rpc ()
twiddlePermissions uuid =
-- make the /local/domain/<n>/vm node readable by all
-- make the snapshot node readable/writable by itself
whenDomainID_ uuid $ \domid -> let domid' = fromIntegral domid in
liftIO $ do xsSetPermissions ("/local/domain/" ++ show domid ++ "/vm") [Permission 0 [PermRead]]
xsSetPermissions ("/vm/" ++ show uuid ++ "/uuid") [Permission 0 [PermRead]]
have_path <- xsRead (vmSuspendImageStatePath uuid)
when (have_path /= Nothing) $
xsSetPermissions (vmSuspendImageStatePath uuid) [Permission 0 [], Permission domid' [PermRead,PermWrite]]
removeVmEnvIso :: Uuid -> IO ()
removeVmEnvIso uuid = whenM (doesFileExist p) (removeFile p) where p = envIsoPath uuid
-- update the backend/frontend driver paths to point to new domains,
-- needed after backend reboot
applyVmBackendShift :: Uuid -> XM ()
applyVmBackendShift bkuuid = do
target_ <- getDomainID bkuuid
case target_ of
Nothing -> warn $ printf "failed to move devices backend; domain %s does not exist" (show bkuuid)
Just target ->
do vms <- filter ((/=) bkuuid) <$> (filterM isRunning =<< getVms)
devices <- liftRpc $ filter (uses bkuuid) . concat <$> mapM getdevs vms
when (not . null $ devices) $ do
info $ printf "moving device backends for %s" (show bkuuid)
mapM_ (liftRpc . move target) devices
where
getdevs uuid = zip (repeat uuid) <$> getdevs' uuid
getdevs' uuid = whenDomainID [] uuid $ \domid -> do
-- TODO: only supporting vif,vwif devices atm
vifs <- liftIO $ getFrontDevices VIF domid
vwifs <- liftIO $ getFrontDevices VWIF domid
return (vifs ++ vwifs)
uses bkuuid (_,d) = bkuuid == dmfBackUuid d
move target (_,d) = moveBackend (dmfType d) (dmfDomid d) (dmfID d) target
disconnectFrontVifs :: Uuid -> Rpc ()
disconnectFrontVifs back_uuid =
do vms <- filter ((/=) back_uuid) <$> (filterM isRunning =<< getVms)
devices <- filter (uses back_uuid) . concat <$> mapM getdevs vms
mapM_ disconnect devices
where
getdevs uuid = zip (repeat uuid) <$> getdevs' uuid
getdevs' uuid = whenDomainID [] uuid $ \domid -> do
-- TODO: only supporting vif,vwif devices atm
vifs <- liftIO $ getFrontDevices VIF domid
vwifs <- liftIO $ getFrontDevices VWIF domid
return (vifs ++ vwifs)
uses bkuuid (_,d) = bkuuid == dmfBackUuid d
disconnect (front_uuid, dev) = do
let nid@(XbDeviceID nic_id) = dmfID dev
info $ "disconnecting nic uuid=" ++ show front_uuid ++ " id=" ++ show nic_id
Xenvm.connectVif front_uuid nid False
-- Reboot a VM
rebootVm :: Uuid -> Rpc ()
rebootVm uuid = do
info $ "rebooting VM " ++ show uuid
-- Write XENVM configuration file
writeXenvmConfig =<< getVmConfig uuid True
-- Ask xenvm kindly to reload it
Xenvm.readConfig uuid
-- Request start from XENVM
use_agent <- RpcAgent.guestAgentRunning uuid
if use_agent
then RpcAgent.reboot uuid
else Xenvm.reboot uuid
shutdownVm :: Uuid -> Rpc ()
shutdownVm uuid = do
info $ "shutting down VM " ++ show uuid
acpi <- getVmAcpiState uuid
use_agent <- RpcAgent.guestAgentRunning uuid
-- if it is asleep, we need to wake it first
when (acpi == 3) $ do
info $ "resuming " ++ show uuid ++ " from S3 first.."
resumeS3AndWaitS0 uuid
info $ "resuming " ++ show uuid ++ " from S3 DONE."
if use_agent
then RpcAgent.shutdown uuid
else Xenvm.shutdown uuid
canIssueVmShutdown :: Vm Bool
canIssueVmShutdown = liftRpc . Xenvm.isXenvmUp =<< vmUuid
shutdownVmIfSafe :: Vm ()
shutdownVmIfSafe = safe =<< canIssueVmShutdown where
safe False = vmUuid >>= \uuid -> warn $ "ignoring request to shutdown VM " ++ show uuid
safe _ = liftRpc . shutdownVm =<< vmUuid
forceShutdownVm :: Uuid -> Rpc ()
forceShutdownVm uuid = do
info $ "forcibly shutting down VM " ++ show uuid
Xenvm.destroy uuid
forceShutdownVmIfSafe :: Vm ()
forceShutdownVmIfSafe = safe =<< canIssueVmShutdown where
safe False = vmUuid >>= \uuid -> warn $ "ignoring request to forcibly shutdown VM " ++ show uuid
safe _ = liftRpc . forceShutdownVm =<< vmUuid
pauseVm :: Uuid -> Rpc ()
pauseVm uuid = do
info $ "pausing VM " ++ show uuid
Xenvm.pause uuid
unpauseVm :: Uuid -> Rpc ()
unpauseVm uuid = do
info $ "unpausing VM " ++ show uuid
Xenvm.unpause uuid
assertPvAddons :: Uuid -> Rpc ()
assertPvAddons uuid = getVmPvAddons uuid >>= \addons -> when (not addons) failActionRequiresPvAddons
sleepVm :: Uuid -> Rpc ()
sleepVm uuid = do
acpi <- getVmAcpiState uuid
case acpi of
3 -> info $ show uuid ++ " is already in S3, not sending it into S3 again."
_ -> do info $ "sending VM " ++ show uuid ++ " into S3"
use_agent <- RpcAgent.guestAgentRunning uuid
if use_agent
then RpcAgent.sleep uuid
else do assertPvAddons uuid
Xenvm.sleep uuid
hibernateVm :: Uuid -> Rpc ()
hibernateVm uuid = do
info $ "sending VM " ++ show uuid ++ " into S4"
running <- isRunning uuid
when running $ do
use_agent <- RpcAgent.guestAgentRunning uuid
when (not use_agent) $ assertPvAddons uuid
acpi <- getVmAcpiState uuid
-- if it is asleep, we need to wake it first
when (acpi == 3) $ resumeS3AndWaitS0 uuid
if use_agent
then RpcAgent.hibernate uuid
else Xenvm.hibernate uuid
saveConfigProperty uuid vmHibernated True
resumeS3AndWaitS0 :: Uuid -> Rpc ()
resumeS3AndWaitS0 uuid = do
acpi <- getVmAcpiState uuid
when (acpi == 3) $ do
Xenvm.resumeFromSleep uuid
done <- Xenvm.waitForAcpiState uuid 0 (Just 30)
when (not done) $ warn $ "timeout waiting for S0 for " ++ show uuid
-- Execute action in parallel for each of given VM, returns results of actions. If any action
-- fails, we report an error through exception
parallelVmExec :: [Uuid] -> (Uuid -> Rpc a) -> Rpc [a]
parallelVmExec uuids f = do
mvars <- mapM handle uuids
results <- mvars `seq` liftIO $ mapM takeMVar mvars
mapM unwrap results
where
unwrap (Right v) = return v
unwrap (Left err) = throwError err
handle uuid = do
context <- rpcGetContext
liftIO $ do
r <- newEmptyMVar
forkIO $ do
-- execute in rpc monad
rpcStatus <- rpc context (f uuid)
-- give result or error
putMVar r rpcStatus
return r
-- Execute action in stages, for each vm type, each stage is done in parallel but the stages itself
-- are sequential
parallelVmExecByType :: [VmType] -> (Uuid -> Rpc a) -> Rpc [(Uuid, a)]
parallelVmExecByType types f =
concat <$> mapM run types
where
run t = getVmsByType t >>= \uuids ->
parallelVmExec uuids f >>= \results ->
return $ zip uuids results
-- paralell execution in explicitly specified stages (as sets of uuids). Stages are sequential, actions
-- in a stage are parallel
parallelVmExecInStages :: [[Uuid]] -> (Uuid -> Rpc a) -> Rpc [(Uuid, a)]
parallelVmExecInStages stages f =
concat <$> mapM run stages
where run uuids = parallelVmExec uuids f >>= \results ->
return $ zip uuids results
-- Switch to a VM
switchVm :: MonadRpc e m => Uuid -> m Bool
switchVm uuid = whenDomainID False uuid $ \domid -> do
debug $ "Attempting to switch screen to domain " ++ show uuid
-- ensure switcher is ready
liftIO $ waitForSwitcher domid
success <- inputSwitchFocus domid
when (not success) $ warn ("switchVm: failed for uuid " ++ show uuid)
return success
-- Switch to a VM, and repeat the attempts until it succeeds
reallySwitchVm :: MonadRpc e m => Uuid -> Float -> m Bool
reallySwitchVm uuid timeout_time =
do t0 <- liftIO getCurrentTime
loop t0
where
loop t0 =
do t1 <- liftIO getCurrentTime
let d = realToFrac $ diffUTCTime t1 t0
if d >= timeout_time || d < 0
then return False
else do
r <- switchVm uuid
if r then return True else liftIO (threadDelay (2*10^5)) >> loop t0
-- Wait until switcher for given domain is ready
waitForSwitcher :: DomainID -> IO Bool
waitForSwitcher domid = do
status <-
timeout ( 10^6 * max_wait_secs ) .
xsWaitFor (notify_path domid) $
do ready_str <- xsRead (notify_path domid)
return $ ready_str == Just "true"
case status of
Just () -> return True
Nothing -> warn ("timeout while waiting for switcher " ++ show domid ++ " to become ready") >> return False
where
notify_path domid = "/local/domain/" ++ show domid ++ "/switcher/ready"
max_wait_secs = 10
-- Switch to graphics fallback vm
switchGraphicsFallback :: Rpc Bool
switchGraphicsFallback = do
vm <- getGraphicsFallbackVm
case vm of
Nothing -> return False
Just vm -> switchVm vm
-- This just sets authentication context on input daemon
loginToVm :: Uuid -> Rpc ()
loginToVm uuid = do
-- typically this is actually a HASH not a real user id
user <- readConfigProperty uuid vmCryptoUser
case user of
Nothing -> return ()
Just (uid :: String) -> inputAuthSetContextFlags uid "" (auth_FLAG_REMOTE_USER .|.
auth_FLAG_USER_HASH)
allocateDiskID :: Uuid -> Rpc DiskID
allocateDiskID uuid =
do disks <- getDisks uuid
return $ (maxID (M.keys disks)) + 1
where
maxID [] = (-1)
maxID xs = maximum xs
allocateNicID :: Uuid -> Rpc NicID
allocateNicID uuid =
getNicIds uuid >>= \ids -> return (XbDeviceID $ (maxID ids) + 1)
where
maxID [] = (-1)
maxID xs = maximum . map xbdevID $ xs
modifyDBVmFirewallRules :: Uuid -> ([Firewall.Rule] -> [Firewall.Rule]) -> Rpc ()
modifyDBVmFirewallRules uuid f = do
r <- getVmFirewallRules uuid
let r' = nub $ f r
when ( r /= r' ) $ saveConfigProperty uuid vmFirewallRules r'
addVmFirewallRule :: Uuid -> Firewall.Rule -> Rpc ()
addVmFirewallRule uuid rule = modifyDBVmFirewallRules uuid (++ [rule])
deleteVmFirewallRule :: Uuid -> Firewall.Rule -> Rpc ()
deleteVmFirewallRule uuid rule = modifyDBVmFirewallRules uuid (filter (/= rule))
getEffectiveVmFirewallRules :: Uuid -> Rpc [Firewall.Rule]
getEffectiveVmFirewallRules uuid =
getVmFirewallRules uuid >>= \rules -> return $ nub (rules ++ map Firewall.inverse rules)
doVmFirewallRules :: Rpc () -> Rpc ([Firewall.ActiveVm], [Firewall.ActiveVm]) -> Rpc ()
doVmFirewallRules message which =
whenM appGetV4VFirewall $ do
message
(vms, vms') <- which
seamlessVms <- getSeamlessVms
info $ "firewall rule delta vms:"
info $ "BEFORE: " ++ show vms
info $ "NOW: " ++ show vms
let reduce vms ((Firewall.ActiveVm _ _ vm _ _), rule) =
Firewall.reduce (Firewall.ReduceContext vm seamlessVms vms) [rule]
vm_rules = mapM (getEffectiveVmFirewallRules . Firewall.vmUuid)
let makeRules vms = (concat . map (reduce vms) .
concat . zipWith (\vm rs -> map (vm,) rs) vms) <$>
vm_rules vms
changeset <- Firewall.changeset <$> makeRules vms <*> makeRules vms'
liftIO $ Firewall.applyChangeset changeset
applyVmFirewallRules :: Uuid -> Rpc ()
applyVmFirewallRules uuid = doVmFirewallRules (info $ "applying v4v firewall rules due to " ++ show uuid) $
do active <- activeVms
return (filter (\vm -> Firewall.vmUuid vm /= uuid) active,
active)
unapplyVmFirewallRules :: Uuid -> Rpc ()
unapplyVmFirewallRules uuid = doVmFirewallRules (info $ "unapplying v4v firewall rules due to " ++ show uuid) $
do active <- activeVms
myself <- getActiveVm uuid
return (nub $ active ++ maybeToList myself
, filter (\vm -> Firewall.vmUuid vm /= uuid) active)
getActiveVm :: Uuid -> Rpc (Maybe Firewall.ActiveVm)
getActiveVm uuid = do
maybe_domid <- liftIO . xsRead $ "/xenmgr/vms/" ++ show uuid ++ "/domid"
stubdom <- getStubDomainID uuid
typ <- typStr <$> getVmType uuid
name <- getVmName uuid
return . fmap (\domid -> Firewall.ActiveVm domid stubdom uuid typ name) . fmap read $ maybe_domid
where typStr (ServiceVm tag) = tag
typStr Svm = "svm"
mapMaybeM :: Monad m => (a -> m (Maybe b)) -> [a] -> m [b]
mapMaybeM op = liftM catMaybes . mapM op
-- this resolves domid using xenstore as the domain might well be destroyed on xen level when this info is needed
activeVms :: Rpc [Firewall.ActiveVm]
activeVms =
mapMaybeM getActiveVm =<< filterM isRunning =<< getVms
addNicToVm :: Uuid -> NicDef -> Vm NicID
addNicToVm uuid nic = withVmDbLock . liftRpc $
do info $ "adding virtual nic to VM " ++ show uuid
nics <- getVmNicDefs uuid
id <- allocateNicID uuid
let nics' = M.insert id (nic { nicdefId = id }) nics
saveConfigProperty uuid vmNics nics'
return id
addDefaultNicToVm :: Uuid -> Vm NicID
addDefaultNicToVm uuid = addNicToVm uuid nic
where
nic = NicDef { nicdefId = XbDeviceID (-1)
, nicdefNetwork = fallbackNetwork
, nicdefWirelessDriver = False
, nicdefBackendUuid = Nothing
, nicdefBackendName = Nothing
, nicdefBackendDomid = Nothing
, nicdefEnable = True
, nicdefMac = Nothing }
addDiskToVm :: Uuid -> Disk -> Vm DiskID
addDiskToVm uuid disk = withVmDbLock . liftRpc $
do info $ "adding virtual disk to VM " ++ show uuid
disks <- getDisks uuid
id <- allocateDiskID uuid
let disks' = M.insert id disk disks
saveConfigProperty uuid vmDisks disks'
return id
--
removeNicFromVm :: Uuid -> NicID -> Vm ()
removeNicFromVm uuid id = withVmDbLock . liftRpc $
do info $ "removing virtual nic " ++ show id ++ " from VM " ++ show uuid
nics <- getVmNicDefs uuid
saveConfigProperty uuid vmNics (M.delete id nics)
addDefaultDiskToVm :: Uuid -> Vm DiskID
addDefaultDiskToVm uuid =
do virt_path <- pickDiskVirtPath uuid
addDiskToVm uuid $
Disk { diskPath = ""
, diskType = VirtualHardDisk
, diskMode = Vm.Types.ReadWrite
, diskDeviceType = DiskDeviceTypeDisk
, diskDevice = virt_path
, diskSnapshotMode = Nothing
, diskSha1Sum = Nothing
, diskManagedType = UnmanagedDisk
, diskShared = False
, diskEnabled = True
}
--
addVhdDiskToVm :: Uuid -> FilePath -> Vm DiskID
addVhdDiskToVm uuid path = do
virt_path <- pickDiskVirtPath uuid
let disk = Disk {
diskPath = path
, diskType = VirtualHardDisk
, diskMode = Vm.Types.ReadWrite
, diskDeviceType = DiskDeviceTypeDisk
, diskDevice = virt_path
, diskSnapshotMode = Nothing
, diskSha1Sum = Nothing
, diskManagedType = UnmanagedDisk
, diskShared = False
, diskEnabled = True
}
addDiskToVm uuid disk
--
addPhyDiskToVm :: Uuid -> FilePath -> Vm DiskID
addPhyDiskToVm uuid path = do
virt_path <- pickDiskVirtPath uuid
let disk = Disk {
diskPath = path
, diskType = PhysicalDevice
, diskMode = Vm.Types.ReadWrite
, diskDeviceType = DiskDeviceTypeDisk
, diskDevice = virt_path
, diskSnapshotMode = Nothing
, diskSha1Sum = Nothing
, diskManagedType = UnmanagedDisk
, diskShared = False
, diskEnabled = True
}
addDiskToVm uuid disk
--
removeDiskFromVm :: Uuid -> DiskID -> Vm ()
removeDiskFromVm uuid id = withVmDbLock . liftRpc $
do info $ "removing a virtual disk " ++ show id ++ " from VM " ++ show uuid
disks <- getDisks uuid
case M.lookup id disks of
Nothing -> return ()
Just d -> do
let disks' = M.delete id disks
removeDiskFiles uuid d
saveConfigProperty uuid vmDisks disks'
removeDiskFiles :: Uuid -> Disk -> Rpc ()
removeDiskFiles uuid d = removeVhd d where
removeVhd d
| not (diskShared d), diskType d == VirtualHardDisk
= do refs <- nub . map fst <$> getVhdReferences (diskPath d)
-- only remove when only this vm references the vhd
when (refs == [uuid]) $ do
let p = diskPath d
liftIO . whenM (doesFileExist p) $ do
info $ "Removing VHD file " ++ p
removeLink p
removeVhd _ = return ()
--
createAndAddDiskToVm :: Uuid -> Int -> Vm DiskID
createAndAddDiskToVm uuid sizeGB =
do let sizeMB = sizeGB * 1024
vhdPath <- liftIO $ createVhd sizeMB
addVhdDiskToVm uuid vhdPath
modifyVmNic :: Uuid -> NicID -> (NicDef -> NicDef) -> Vm ()
modifyVmNic uuid nicID modifyF = withVmDbLock . liftRpc $
do n <- getNic uuid nicID
case n of
Nothing -> failNoSuchNic
Just nic -> let nic' = modifyF nic in
saveConfigProperty uuid (vmNic nicID) nic'
modifyVmDisk :: Uuid -> DiskID -> (Disk -> Disk) -> Vm ()
modifyVmDisk uuid diskID modifyF = withVmDbLock . liftRpc $
do p <- readConfigProperty uuid (vmDisk diskID)
case p of
Nothing -> failNoSuchDisk
Just disk ->
let disk' = modifyF disk in
saveConfigProperty uuid (vmDisk diskID) disk'
modifyVmPciPtRules :: Uuid -> (PciPtRuleMap -> PciPtRuleMap) -> Rpc ()
modifyVmPciPtRules uuid modifyF =
do rules <- getPciPtRules uuid
let rules' = modifyF rules
saveConfigProperty uuid vmPcis rules'
finally' = flip E.finally
mountVmDisk :: Uuid -> DiskID -> Bool -> FilePath -> Rpc ()
mountVmDisk uuid diskID readonly path =
do disk <- getDisk uuid diskID
case disk of
Nothing -> failNoSuchDisk
Just disk -> case diskType disk of
VirtualHardDisk -> mountVhd $ diskPath disk
_ -> failIncorrectDiskType
where
mountVhd :: FilePath -> Rpc ()
mountVhd vhdpath =
do keydirs <- concat . intersperse "," <$> getCryptoKeyLookupPaths uuid
liftIO $ do
dev <- tapCreate "vhd" [("TAPDISK2_CRYPTO_KEYDIR", keydirs)] readonly vhdpath
E.handle (\(e :: E.SomeException) -> removedev dev >> E.throw e) $ do
xsWrite (xspath ++ "/dev") dev
let mountopts =
if readonly then ["-o", "ro"] else []
readProcessOrDie "mount" (mountopts ++ [dev, path]) ""
xsWrite (xspath ++ "/path") path
return ()
where
removedev dev = do
tapDestroy dev
xsRm xspath
xspath = "/xenmgr/mount/" ++ show uuid ++ "/" ++ show diskID
unmountVmDisk :: Uuid -> DiskID -> Rpc ()
unmountVmDisk uuid diskID =
do disk <- getDisk uuid diskID
case disk of
Nothing -> failNoSuchDisk
Just disk -> case diskType disk of
VirtualHardDisk -> unmountVhd $ diskPath disk
_ -> failIncorrectDiskType
where
unmountVhd :: FilePath -> Rpc ()
unmountVhd vhdpath = liftIO $
do dev <- fromMaybe "" <$> xsRead (xspath ++ "/dev")
mountpath <- xsRead (xspath ++ "/path")
case mountpath of
Nothing -> error $ "device not mounted " ++ show dev
Just mountpath -> do
readProcessOrDie "umount" [mountpath] ""
tapDestroy dev
xsRm xspath
return ()
where
xspath = "/xenmgr/mount/" ++ show uuid ++ "/" ++ show diskID
generateCryptoKeyIn :: Uuid -> DiskID -> Int -> FilePath -> Rpc ()
generateCryptoKeyIn vm diskID keybits dir
= do when (not $ keybits `elem` [256, 512]) $ error "only supported key sizes: 256, 512"
disk <- haveDisk =<< getDisk vm diskID
checkFileEx disk
checkKeySet disk
liftIO (setKeyHash disk =<< mkRandomKeyFile disk)
where
checkFileEx d = whenM (liftIO $ doesFileExist $ keyfile d) $ failCryptoKeyAlreadyExists
checkKeySet d = whenM ((/= "none") <$> liftIO (readKeyHash d)) $ failVhdKeyAlreadySet
readKeyHash d = strip <$> readProcessOrDie "vhd-util" ["key", "-n", diskPath d, "-p"] []
setKeyHash d f = void $ readProcessOrDie "vhd-util" ["key", "-n", diskPath d, "-k", f, "-s"] []
mkRandomKeyFile d =
let dst = keyfile d
src = "/dev/random" in
copy src dst (fromIntegral keysize_bytes) >> return dst
copy s d n = BL.readFile s >>= return . BL.take n >>= BL.writeFile d
keyfile d = dir </> diskUuid d ++ ",aes-xts-plain," ++ show keybits ++ ".key"
-- 512 keysize by default (this means AES-256 as aes-xts uses keys split in half)
keysize_bytes = keybits `div` 8
haveDisk Nothing = failNoSuchDisk
haveDisk (Just d) = return d
diskUuid = takeBaseName . diskPath
generateCryptoKey :: Uuid -> DiskID -> Int -> Rpc ()
generateCryptoKey vm diskID keybits
= into =<< ( split ',' <$> appGetPlatformCryptoKeyDirs )
where
into [] = error "no platform crypto directories configured"
into (p:_) =
do liftIO $ whenM (not <$> doesDirectoryExist p) $ createDirectory p
generateCryptoKeyIn vm diskID keybits p
-- only computes hash for disk of id 0. TODO: make this more generic maybe
addVmDiskHashes :: Vm ()
addVmDiskHashes = vmUuid >>= \uuid -> addTo uuid 0
where
addTo uuid id =
do Just primary <- M.lookup id <$> (liftRpc $ getDisks uuid)
case diskSha1Sum primary of
Just _ -> return () -- already have
Nothing ->
liftRpc (computeDiskSha1Sum uuid primary) >>= \sum ->
modifyVmDisk uuid id $ \disk -> disk { diskSha1Sum = Just sum }
checkVmDiskHashes :: Vm Bool
checkVmDiskHashes =
do bypass <- liftRpc appBypassSha1SumChecks
if not bypass
then do
uuid <- vmUuid
disks <- M.elems <$> liftRpc (getDisks uuid)
all (==True) <$> mapM (validate uuid) disks
else return True
where
validate uuid d
| Just sh <- sha1 = liftRpc ( computeDiskSha1Sum uuid d ) >>= \sh' -> equalT sh sh'
| otherwise = return True
where
sha1 = diskSha1Sum d
path = diskPath d
equalT sh sh' | sh == sh' = return True
| otherwise = vmSubmit (VmMeasurementFailure path sh sh') >> return False
getMeasureFailAction :: Rpc PMAction
getMeasureFailAction = dbReadWithDefault ActionForcedShutdown "/xenmgr/measure-fail-action"
setMeasureFailAction :: PMAction -> Rpc ()
setMeasureFailAction a = dbWrite "/xenmgr/measure-fail-action" a
tapEnvForVm :: Uuid -> Rpc [(String,String)]
tapEnvForVm uuid = do
keydirs <- concat . intersperse "," <$> getCryptoKeyLookupPaths uuid
return [("TAPDISK2_CRYPTO_KEYDIR", keydirs)]
tapCreateForVm :: Uuid -> Bool -> FilePath -> Rpc FilePath
tapCreateForVm uuid ro path = do
env <- tapEnvForVm uuid
liftIO $ tapCreateVhd env ro path
-- Computer sha1 sum for disk. Has to be through tap device because the vhd file changes
-- even for completely readonly fs
computeDiskSha1Sum :: Uuid -> Disk -> Rpc Integer
computeDiskSha1Sum vm_uuid d
| diskType d == VirtualHardDisk =
do tapdev <- tapCreateForVm vm_uuid True (diskPath d)
liftIO $
E.finally (fileSha1Sum tapdev) (spawnShell' $ "tap-ctl destroy -d " ++ tapdev)
| diskType d == Aio = liftIO $
do tapdev <- fromMaybe (error $ "FAILED to create tap device for " ++ diskPath d ++ ", possibly in use?")
. fmap chomp
<$> (spawnShell' $ "tap-ctl create -a aio:" ++ (diskPath d))
E.finally (fileSha1Sum tapdev) (spawnShell' $ "tap-ctl destroy -d " ++ tapdev)
| diskType d `elem` [PhysicalDevice, DiskImage] = liftIO $
fileSha1Sum (diskPath d)
| otherwise = error "unsupported disk type, should be vhd or phy or file"
parseKernelExtract :: String -> (Maybe DiskID, Maybe PartitionNum, FilePath)
parseKernelExtract p
= case split ':' p of
[file] -> (Nothing, Nothing, file)
[opts, file] ->
case split ',' p of
[diskS,partS] ->
let diskNum = fromMaybe (error "bad disk number in kernel-extract") $ maybeRead diskS in
let partNum = fromMaybe (error "bad partition number in kernel-extract") $ maybeRead partS in
(Just diskNum, Just partNum, file)
[diskS] ->
let diskNum = fromMaybe (error "bad disk number in kernel-extract") $ maybeRead diskS in
(Just diskNum, Nothing, file)
_ -> error "incorrect disk & partition specification in kernel-extract"
_ -> error "incorrect kernel-extract syntax"
-- Assumes that kernel is stored in disk of id 0 for that vm
extractKernelFromPvDomain :: Uuid -> Rpc ()
extractKernelFromPvDomain uuid = join $ extractFileFromPvDomain <$> getVmKernelPath uuid <*> getVmKernelExtract uuid <*> pure uuid
extractInitrdFromPvDomain :: Uuid -> Rpc ()
extractInitrdFromPvDomain uuid = do
initrd <- getVmInitrd uuid
let initrd' = case initrd of
"" -> Nothing
_ -> Just initrd
join $ extractFileFromPvDomain <$> pure initrd' <*> getVmInitrdExtract uuid <*> pure uuid
extractFileFromPvDomain :: Maybe FilePath -> String -> Uuid -> Rpc ()
extractFileFromPvDomain Nothing _ _ = return ()
extractFileFromPvDomain (Just dst_path) ext_loc uuid = withKernelPath dst_path where
withKernelPath dst_kernel_path = do
let (diskid,partid,src_path) = parseKernelExtract ext_loc
disk <- getDisk uuid (fromMaybe 0 diskid)
keydirs <- concat . intersperse "," <$> getCryptoKeyLookupPaths uuid
case (disk, dst_kernel_path) of
(_, "") -> return () -- doesn't have a kernel, not a pv domain, ignore
(Nothing, _) -> error "extract-kernel: domain does not have a disk with ID 0"
(_, path) | null src_path -> return () -- no extraction
(Just disk, path) -> do liftIO $ copyKernelFromDisk [("TAPDISK2_CRYPTO_KEYDIR", keydirs)] disk path (partid,src_path)
info $ "extracted pv kernel/initrd from " ++ src_path ++ " into " ++ path
copyKernelFromDisk :: [ (String, String) ] -> Disk -> FilePath -> (Maybe PartitionNum,FilePath) -> IO ()
copyKernelFromDisk extraEnv disk dst_path src
= copyFileFromDisk extraEnv (diskType disk) (diskMode disk == ReadOnly) (diskPath disk) src dst_path
changeVmNicNetwork :: Uuid -> NicID -> Network -> XM ()
changeVmNicNetwork uuid nicid network = do
-- Save in database
xmRunVm uuid $ modifyVmNic uuid nicid $ \nic -> nic { nicdefNetwork = network }
-- Hotswap network if VM is running
whenM (isRunning uuid) $ do
xmWithNetsyncLock . liftRpc $
do -- disconnect vif
-- notify xenvm TODO: maybe won't be necessary sometime?
-- notify network daemon
-- resynchronise vif state
Xenvm.connectVif uuid nicid False
Xenvm.changeNicNetwork uuid nicid network
whenDomainID_ uuid $ \domid -> joinNetwork network domid nicid
-- Property accessors
---------------------
setVmWiredNetwork :: Uuid -> Network -> XM ()
setVmWiredNetwork uuid network
= getVmWiredNics uuid >>= pure . take 1 >>= mapM_ (\n -> changeVmNicNetwork uuid (nicdefId n) network)
setVmWirelessNetwork :: Uuid -> Network -> XM ()
setVmWirelessNetwork uuid network
= getVmWirelessNics uuid >>= pure . take 1 >>= mapM_ (\n -> changeVmNicNetwork uuid (nicdefId n) network)
-- TODO: this sucks
-- update 6.05.2011: sucks a little bit less now but still
setVmGpu :: Uuid -> String -> Rpc ()
setVmGpu uuid s = do
running <- isRunning uuid
when running $ failCannotTurnHdxWhenVmRunning
case s of
"hdx" -> test_hdx
_ -> return ()
saveConfigProperty uuid vmGpu s
where
set_hdx uuid cfgs = map set cfgs
where set (uuid',c) | uuid == uuid' = (uuid', c { vmcfgGraphics = HDX })
| otherwise = (uuid', c)
test_hdx = do
unlessM (getVmPvAddons uuid) $ failCannotTurnHdxWithoutPvAddons
verifyAutostartAndHDX (set_hdx uuid)
verifyAutostartAndHDX :: ([(Uuid,VmConfig)] -> [(Uuid,VmConfig)]) -> Rpc ()
verifyAutostartAndHDX change = do
max_vgpus <- getMaxVgpus
vms <- getGuestVms
cfgs <- zip vms <$> mapM (\uuid -> getVmConfig uuid False) vms
let cfgs' = change cfgs
offending (uuid,cfg) = vmcfgGraphics cfg == HDX && vmcfgAutostart cfg
when (length (filter offending cfgs') > max_vgpus) $ failSimultaneousAutostartAndHdx
setVmCd :: Uuid -> String -> Rpc ()
setVmCd uuid str =
-- change all cdrom paths
readConfigPropertyDef uuid vmDisks [] >>=
mapM maybeChange >>=
saveConfigProperty uuid vmDisks
where
maybeChange disk | not (isCdrom disk) = return disk
| otherwise = do isos <- appIsoPath
let path = isos ++ "/" ++ name
-- hot swap cd
whenVmRunning uuid (Xenvm.changeCd uuid path)
return $ disk { diskPath = path }
name | str == "" = "null.iso"
| otherwise = str
setVmType :: Uuid -> VmType -> Rpc ()
setVmType uuid typ = saveConfigProperty uuid vmType typ
setVmAmtPt :: Uuid -> Bool -> Rpc ()
setVmAmtPt uuid amtpt = saveConfigProperty uuid vmAmtPt amtpt
setVmSeamlessTraffic :: Uuid -> Bool -> Rpc ()
setVmSeamlessTraffic uuid view = saveConfigProperty uuid vmSeamlessTraffic view
setVmStartOnBoot :: Uuid -> Bool -> Rpc ()
setVmStartOnBoot uuid start = do
when start $ verifyAutostartAndHDX (set_start uuid)
saveConfigProperty uuid vmStartOnBoot start
where
set_start uuid cfgs = map set cfgs
where set (uuid',c) | uuid == uuid' = (uuid', c { vmcfgAutostart = True })
| otherwise = (uuid', c)
setVmHiddenInSwitcher :: Uuid -> Bool -> Rpc ()
setVmHiddenInSwitcher uuid hidden = do
saveConfigProperty uuid vmHidden hidden
exportVmSwitcherInfo uuid
setVmHiddenInUi :: Uuid -> Bool -> Rpc ()
setVmHiddenInUi uuid hidden = saveConfigProperty uuid vmHiddenInUi hidden
-- in mebibytes
setVmMemory :: Uuid -> Int -> Rpc ()
setVmMemory uuid mb = do
saveConfigProperty uuid vmMemory mb
-- running <- isRunning uuid
-- when running $ Xenvm.setMemTarget uuid mb
-- <= 0 == turned off
setVmMemoryStaticMax :: Uuid -> Int -> Rpc ()
setVmMemoryStaticMax uuid mib = do
saveOrRmConfigProperty uuid vmMemoryStaticMax (if mib <= 0 then Nothing else Just mib)
-- <= 0 == turned off
setVmMemoryMin :: Uuid -> Int -> Rpc ()
setVmMemoryMin uuid mib = do
saveOrRmConfigProperty uuid vmMemoryMin (if mib <= 0 then Nothing else Just mib)
setVmName :: Uuid -> String -> Rpc ()
setVmName uuid name = do
saveConfigProperty uuid vmName (strip name)
exportVmSwitcherInfo uuid
notifyVmNameChanged uuid
setVmImagePath :: Uuid -> FilePath -> Rpc ()
setVmImagePath uuid path = do
saveConfigProperty uuid vmImagePath (strip path)
exportVmSwitcherInfo uuid
-- swaps slots as necessary, requires locking
setVmSlot :: Uuid -> Int -> XM ()
setVmSlot uuid slot
= xmWithVmSlotLock . liftRpc $ swap
where
swap = do
there <- if slot < 0 then return Nothing else slotted slot
case there of
Nothing -> really_save uuid slot
Just vm -> getVmSlot uuid >>= \prev ->
when (prev>0) (really_save vm prev) >> really_save uuid slot
slotted at = fmap listToMaybe $ filterM (\uuid -> (at ==) <$> getVmSlot uuid) =<< getVms
really_save uuid slot = getDomainID uuid >>= set where
set (Just domid) = inputSetSlot domid slot >> saveConfigProperty uuid vmSlot slot
set _ = saveConfigProperty uuid vmSlot slot
setVmPvAddons uuid adds = saveConfigProperty uuid vmPvAddons ( adds :: Bool )
setVmPvAddonsVersion uuid v = saveConfigProperty uuid vmPvAddonsVersion ( v :: String )
setVmTimeOffset uuid o = saveConfigProperty uuid vmTimeOffset ( o :: Int )
setVmCryptoUser uuid u = saveOrRmConfigProperty uuid vmCryptoUser (if u == "" then Nothing else Just u)
setVmCryptoKeyDirs uuid d = saveConfigProperty uuid vmCryptoKeyDirs ( d :: String )
setVmAutoS3Wake uuid a = saveConfigProperty uuid vmAutoS3Wake ( a :: Bool )
setVmNotify uuid v = saveConfigProperty uuid vmNotify (v::String)
setVmHvm uuid v = saveConfigProperty uuid vmHvm (v::Bool)
setVmPae uuid v = saveConfigProperty uuid vmPae (v::Bool)
setVmApic uuid v = saveConfigProperty uuid vmApic (v::Bool)
setVmViridian uuid v = saveConfigProperty uuid vmViridian (v::Bool)
setVmNx uuid v = saveConfigProperty uuid vmNx (v::Bool)
setVmSound uuid v = saveConfigProperty uuid vmSound (v::String)
setVmDisplay uuid v = saveConfigProperty uuid vmDisplay (v::String)
setVmBoot uuid v = saveConfigProperty uuid vmBoot (v::String)
setVmCmdLine uuid v = saveConfigProperty uuid vmCmdLine (v::String)
setVmKernel uuid v = saveConfigProperty uuid vmKernel (v::String)
setVmKernelExtract uuid v = saveConfigProperty uuid vmKernelExtract (v::String)
setVmInitrd uuid v = saveConfigProperty uuid vmInitrd (v::String)
setVmInitrdExtract uuid v = saveConfigProperty uuid vmInitrdExtract (v::String)
setVmAcpiPt uuid v = saveConfigProperty uuid vmAcpiPt (v::Bool)
setVmVcpus uuid v = saveConfigProperty uuid vmVcpus (v::Int)
setVmCoresPerSocket uuid v = saveConfigProperty uuid vmCoresPerSocket (v::Int)
setVmVideoram uuid v = saveConfigProperty uuid vmVideoram (v::Int)
setVmPassthroughMmio uuid v = saveConfigProperty uuid vmPassthroughMmio (v::String)
setVmPassthroughIo uuid v = saveConfigProperty uuid vmPassthroughIo (v::String)
setVmFlaskLabel uuid v = saveConfigProperty uuid vmFlaskLabel (v::String)
setVmHap uuid v = saveConfigProperty uuid vmHap (v::Bool)
setVmSmbiosPt uuid v = saveConfigProperty uuid vmSmbiosPt (v::Bool)
setVmDescription uuid v = saveConfigProperty uuid vmDescription (v::String)
setVmStartOnBootPriority uuid v = saveConfigProperty uuid vmStartOnBootPriority (v::Int)
setVmKeepAlive uuid v = saveConfigProperty uuid vmKeepAlive (v::Bool)
setVmProvidesNetworkBackend uuid v = saveConfigProperty uuid vmProvidesNetworkBackend (v::Bool)
setVmProvidesDefaultNetworkBackend uuid v = saveConfigProperty uuid vmProvidesDefaultNetworkBackend (v::Bool)
setVmProvidesGraphicsFallback uuid v = saveConfigProperty uuid vmProvidesGraphicsFallback (v::Bool)
setVmShutdownPriority uuid v = saveConfigProperty uuid vmShutdownPriority (v::Int)
setVmSeamlessId uuid v = saveConfigProperty uuid vmSeamlessId (v::String)
setVmQemuDmPath uuid v = saveConfigProperty uuid vmQemuDmPath (v::String)
setVmQemuDmTimeout uuid v = saveConfigProperty uuid vmQemuDmTimeout (v::Int)
setVmControlPlatformPowerState uuid v = saveConfigProperty uuid vmControlPlatformPowerState (v::Bool)
setVmExtraXenvm uuid str = saveConfigProperty uuid vmExtraXenvm (filter (not.null) . map strip . split ';' $ str)
setVmExtraHvm uuid str = saveConfigProperty uuid vmExtraHvms (filter (not.null) . map strip . split ';' $ str)
setVmStartFromSuspendImage uuid v = saveConfigProperty uuid vmStartFromSuspendImage (v::String)
setVmTrackDependencies uuid v = saveConfigProperty uuid vmTrackDependencies (v::Bool)
setVmSeamlessMouseLeft uuid v =
do saveConfigProperty uuid vmSeamlessMouseLeft (v::String)
inputUpdateSeamlessMouseSettings uuid
setVmSeamlessMouseRight uuid v =
do saveConfigProperty uuid vmSeamlessMouseRight (v::String)
inputUpdateSeamlessMouseSettings uuid
setVmOs uuid os = saveConfigProperty uuid vmOs (osToStr os)
setVmOemAcpiFeatures uuid v = saveConfigProperty uuid vmOemAcpiFeatures (v::Bool)
setVmUsbEnabled uuid v = saveConfigProperty uuid vmUsbEnabled (v::Bool)
setVmUsbControl uuid v =
do saveConfigProperty uuid vmUsbControl (v::Bool)
-- TODO: surely this can be improved, for now SIGHUP to cause proxy daemon
-- to reevaluate rules
liftIO $ spawnShell' "killall -SIGHUP rpc-proxy"
return ()
setVmStubdom uuid v = saveConfigProperty uuid vmStubdom (v::Bool)
setVmCpuid uuid v = saveConfigProperty uuid vmCpuid (v::String)
setVmXciCpuidSignature uuid v = saveConfigProperty uuid vmXciCpuidSignature (v::Bool)
setVmGreedyPcibackBind uuid v = saveConfigProperty uuid vmGreedyPcibackBind (v::Bool)
setVmRunPostCreate uuid v = saveOrRmConfigProperty uuid vmRunPostCreate (v::Maybe String)
setVmRunPreDelete uuid v = saveOrRmConfigProperty uuid vmRunPreDelete (v::Maybe String)
setVmRunPreBoot uuid v = saveOrRmConfigProperty uuid vmRunPreBoot (v::Maybe String)
setVmRunInsteadofStart uuid v = saveOrRmConfigProperty uuid vmRunInsteadofStart (v::Maybe String)
setVmRunOnStateChange uuid v = saveOrRmConfigProperty uuid vmRunOnStateChange (v::Maybe String)
setVmRunOnAcpiStateChange uuid v = saveOrRmConfigProperty uuid vmRunOnAcpiStateChange (v::Maybe String)
setVmNativeExperience uuid v
= do mapM_ clearSetting =<< getGuestVms
saveConfigProperty uuid vmNativeExperience (v :: Bool)
where clearSetting uuid = saveConfigProperty uuid vmNativeExperience False
setVmShowSwitcher uuid v = saveConfigProperty uuid vmShowSwitcher (v :: Bool)
setVmWirelessControl uuid v = saveConfigProperty uuid vmWirelessControl (v :: Bool)
setVmUsbGrabDevices uuid v = saveConfigProperty uuid vmUsbGrabDevices (v::Bool)
setVmS3Mode uuid v = saveConfigProperty uuid vmS3Mode (v::S3Mode)
setVmS4Mode uuid v = saveConfigProperty uuid vmS4Mode (v::S4Mode)
setVmVsnd uuid v = saveConfigProperty uuid vmVsnd (v::Bool)
setVmRealm uuid v = saveConfigProperty uuid vmRealm (v::String)
setVmSyncUuid uuid v = saveConfigProperty uuid vmSyncUuid (v::String)
setVmIcbinnPath uuid v = saveConfigProperty uuid vmIcbinnPath (v::String)
setVmOvfTransportIso uuid = saveConfigProperty uuid vmOvfTransportIso
setVmDownloadProgress uuid v = do
dbWrite ("/vm/"++show uuid++"/download-progress") (v::Int)
notifyVmTransferChanged uuid
setVmReady uuid v = saveConfigProperty uuid vmReady (v::Bool)
setVmVkbd uuid v = saveConfigProperty uuid vmVkbd (v::Bool)
setVmVfb uuid v = saveConfigProperty uuid vmVfb (v::Bool)
setVmV4V uuid v = saveConfigProperty uuid vmV4v (v::Bool)
setVmRestrictDisplayDepth uuid v = saveConfigProperty uuid vmRestrictDisplayDepth (v::Bool)
setVmRestrictDisplayRes uuid v = saveConfigProperty uuid vmRestrictDisplayRes (v::Bool)
setVmPreserveOnReboot uuid v = saveConfigProperty uuid vmPreserveOnReboot (v::Bool)
setVmBootSentinel uuid v = saveOrRmConfigProperty uuid vmBootSentinel (v::Maybe String)
setVmHpet uuid v = saveConfigProperty uuid vmHpet (v::Bool)
setVmTimerMode uuid v = saveConfigProperty uuid vmTimerMode (v::Int)
setVmNestedHvm uuid v = saveConfigProperty uuid vmNestedHvm (v::Bool)
setVmSerial uuid v = saveConfigProperty uuid vmSerial (v::String)
-- set autolock flag on the vm xenstore tree, per cd device
-- cd devices which have sticky bit are not subject to autolock ever
setVmAutolockCdDrives uuid v =
whenDomainID_ uuid $ \domid ->
mapM_ (set domid) =<< devs
where
devs = mapM (\d -> (,) <$> pure d <*> getCdDeviceStickyVm d)
=<< liftIO getHostBSGDevices
set domid (d,sticky_vm) =
let sticky = sticky_vm /= Nothing in -- sticky somewhere
liftIO $ xsWrite (autolockpath domid d) (f $ v && (not sticky))
where
f True = "1"
f _ = "0"
autolockpath domid (BSGDevice a b c d) =
printf "/local/domain/%d/bsgdev-req/%s/autolock" domid (printf "%d_%d_%d_%d" a b c d :: String)
-- Create XenStore information used by switcher
exportVmSwitcherInfo :: Uuid -> Rpc ()
exportVmSwitcherInfo uuid =
do running <- isRunning uuid
typ <- getVmType uuid
typStr <- readConfigPropertyDef uuid vmType "svm"
if (not running)
then liftIO $ xsRm path
else do name <- getVmName uuid
slot <- getVmSlot uuid
image <- getVmImagePath uuid
hidden <- getVmHiddenInSwitcher uuid
hide_switcher <- not <$> getVmShowSwitcher uuid
domid <- fromMaybe (-1) <$> getDomainID uuid
seamlessid <- getVmSeamlessId uuid
liftIO $ do
xsWrite "/xenmgr/vms" ""
xsChmod "/xenmgr/vms" "r0"
xsWrite path ""
xsChmod path "r0"
xsWrite (path ++ "/domid") (show domid)
xsWriteUTF8 (path ++ "/name" ) name
xsWrite (path ++ "/slot" ) (show slot)
xsWrite (path ++ "/image") image
xsWrite (path ++ "/type" ) typStr
xsWrite (path ++ "/hidden") (if hidden then "1" else "0")
xsWrite (path ++ "/hide-switcher" ) (if hide_switcher then "1" else "0")
xsWrite (path ++ "/seamlessid") seamlessid
case typ of
ServiceVm tag -> xsWrite (path ++ "/" ++ tag) "1"
_ -> return ()
where
path = "/xenmgr/vms/" ++ show uuid
| cjp256/manager | xenmgr/Vm/Actions.hs | gpl-2.0 | 74,303 | 4 | 27 | 21,289 | 19,940 | 9,794 | 10,146 | 1,438 | 6 |
module EmuProcessor where
import Numeric (showHex, readHex)
import qualified Graphics.UI.SDL as SDL
import Data.Bits (Bits, shiftL, shiftR, xor, (.&.), (.|.), testBit)
import Data.Word (Word8, Word16)
import Data.Maybe (fromMaybe)
import System.Random (randomIO)
import Data.List(genericTake, genericDrop)
import qualified Data.ByteString as B
import qualified Data.Map as Map
import EmuData
data State = State {
screen :: SDL.Surface,
pixels :: [[Bool]],
pc :: Word16, -- Program counter
sp :: Word8, -- Stack pointer
stack :: [Word16],
vx :: [Word8], -- Registros de procesador
i :: Word16, -- Registro especial I
dt :: Word8, --Delay timer, su valor disminuye con
-- cada instruccion hasta llegar a 0
st :: Word8, -- Sound timer, su valor disminuye con
-- cada instruccion, y el sonido suena
-- mientras no sea 0
mem :: [Word8],
keyboard :: [Bool]
}
instance Show State where
show s = "I:"++show (i s)++
"\n 0,1,2,3,4,5,6,7,8,9,A,B,C,D,E,F"++
"\nReg: "++show(vx s)++"\nPC: "++show(pc s)++"\nSP: "++show(sp s)++"\nStack: "++show(stack s)++"\nDT: "++show(dt s)++"\nST: "++show(st s)
++ "\nKeyboard:\n "++ show (take 4 (keyboard s)) ++ "\n "++ show (take 4 (drop 4 (keyboard s))) ++ "\n "++ show (take 4 (drop 8 (keyboard s)))
waitForKey :: State -> IO(State, SDL.SDLKey)
waitForKey s = do
event <- getEvent
case event of
SDL.Quit -> return (s, SDL.SDLK_ESCAPE)
SDL.KeyDown k -> return (keyDown s keyMap (SDL.symKey k), SDL.symKey k)
SDL.KeyUp k -> waitForKey (keyUp s keyMap (SDL.symKey k))
_ -> do
let st = tickDT s
waitForKey st
getEvent :: IO SDL.Event
getEvent = do
e <- SDL.pollEvent
case e of
SDL.Quit -> return e
(SDL.KeyDown _)-> return e
(SDL.KeyUp _)-> return e
SDL.NoEvent -> return e
_ -> getEvent
-- Manejo de instrucciones
execute :: Word16 -> State -> IO State
execute ins s=
case showHex ins "" of
--0NNN Calls RCA 1802 program at address NNN. Not necessary for most ROMs.
nnn@[_, _, _] -> return (advancePC s) --Esta instruccion se ignora y es innecesario especificarla, la dejo por completitud
--00E0 Clears the screen.
"e0" -> return (advancePC (s{pixels =
replicate pixelsH (replicate pixelsW False)}))
--00EE Returns from a subroutine.
"ee" -> return (advancePC (removeFromStack s))
--1NNN Jumps to address NNN.
('1':_) -> return (s{pc = n })
--2NNN Calls subroutine at NNN.
('2':_) -> return (addToStack n s)
--3XKK Skips the next instruction if VX equals KK.
('3':_) -> return (skipOn (==) (getReg s x) k s)
--4XKK Skips the next instruction if VX doesn't equal KK.
('4':_) -> return (skipOn (/=) (getReg s x) k s)
--5XY0 Skips the next instruction if VX equals VY.
('5':_) -> return (skipOn (==) (getReg s x) (getReg s y) s)
--6XNN Sets VX to KK.
('6':_) -> return (advancePC (setReg s x k))
--7XKK Adds KK to VX.
('7':_) -> return (advancePC ( addToReg s x k))
--8XY0 Sets VX to the value of VY.
['8',_,_,'0'] -> return (advancePC (setReg s x (getReg s y)))
--8XY1 Sets VX to VX or VY.
['8',_,_,'1'] -> return (advancePC (
setReg s x (getReg s y .|. getReg s x)))
--8XY2 Sets VX to VX and VY.
('8':_:_:"2") -> return (advancePC (
setReg s x (getReg s y.&. getReg s x)))
--8XY3 Sets VX to VX xor VY.
('8':_:_:"3") -> return (advancePC (
setReg s x (xor (getReg s y) (getReg s x))))
--8XY4 Adds VY to VX. VF is set to 1 when there's a carry, and to 0 when there isn't.
('8':_:_:"4") -> return (advancePC (sumReg s x y))
--8XY5 VY is subtracted from VX. VF is set to 0 when there's a borrow, and 1 when there isn't.
('8':_:_:"5") -> return (advancePC (subReg s x y x))
--8XY6 Shifts VX right by one. VF is set to the value of the least --significant bit of VX before the shift.[2]
('8':_:_:"6") -> return (advancePC (shiftRReg s x))
--8XY7 Sets VX to VY minus VX. VF is set to 0 when there's a borrow, and 1 --when there isn't.
('8':_:_:"7") -> return (advancePC (subReg s y x x))
--8XYE Shifts VX left by one. VF is set to the value of the most --significant bit of VX before the shift.[2]
('8':_:_:"e") -> return (advancePC (shiftLReg s x))
--9XY0 Skips the next instruction if VX doesn't equal VY.
('9':_:_:"0") -> return (skipOn (/=) (getReg s x) (getReg s y) s)
--ANNN Sets I to the address NNN.
('a':_) -> return (advancePC (setI s n))
--BNNN Jumps to the address NNN plus V0.
('b':_) -> return (setPC s (n+ asWord16(getReg s 0)))
--CXNN Sets VX to the result of a bitwise and operation on a random number --and NN.
('c':_) -> do
ran <- randomIO :: IO Word8
return (advancePC (setReg s x (ran.&.k)))
--DXYN Draws a sprite at coordinate (VX, VY) that has a width of 8 pixels
--and a height of N pixels. Each row of 8 pixels is read as bit-coded
--starting from memory location I; I value doesn’t change after the execution
--of this instruction. As described above, VF is set to 1 if any screen
--pixels are flipped from set to unset when the sprite is drawn, and to 0 if
--that doesn’t happen
('d':_) -> do
let sprite = map toBitsArray (loadFromMem (asInt (i s)) (asInt h) s)
(st, coll) <- drawSprite s (getReg s x) (getReg s y) sprite
if coll
then return (advancePC (setReg st 0xf 1))
else return (advancePC (setReg st 0xf 0))
--EX9E Skips the next instruction if the key stored in VX is pressed.
('e':_:"9e") -> return (skipIf (getAt (getReg s x) (keyboard s)) s)
--EXA1 Skips the next instruction if the key stored in VX isn't pressed.
('e':_:"a1") -> return (skipIf (not (getAt (getReg s x) (keyboard s))) s)
--FX07 Sets VX to the value of the delay timer.
('f':_:"07") -> return (advancePC (setReg s x (dt s)))
--FX0A A key press is awaited, and then stored in VX.
('f':_:"0a") -> do
(st, key) <- waitForKey s
if key == SDL.SDLK_ESCAPE
then do
SDL.quit
return s
else do
let keyPos = asWord8 (fromMaybe 0 (Map.lookup key keyMap))
return (advancePC (setReg s x keyPos))
--FX15 Sets the delay timer to VX.
('f':_:"15") -> return (advancePC (setDT s (getReg s x)))
--FX18 Sets the sound timer to VX. --TODO: Implementar sonido
('f':_:"18") -> return (advancePC s)
--FX1E Adds VX to I.[3]
('f':_:"1e") -> return (advancePC (setI s (i s +asWord16 (getReg s x))))
--FX29 Sets I to the location of the sprite for the character in VX. --Characters 0-F (in hexadecimal) are represented by a 4x5 font.
('f':_:"29") -> return (advancePC (setI s (asWord16 (getReg s x * 5))))
--FX33 Stores the binary-coded decimal representation of VX, with the most
--significant of three digits at the address in I, the middle digit at I plus
--1, and the least significant digit at I plus 2. (In other words, genericTake the
--decimal representation of VX, place the hundreds digit in memory at
--location in I, the tens digit at location I+1, and the ones digit at location I+2.)
('f':_:"33") -> do
let v = getReg s x
let bcd = [div v 100, div (mod v 100) 10, mod v 10]
return (advancePC (loadToMem bcd (asInt (i s)) s))
--FX55 Stores V0 to VX (including VX) in memory starting at address I.
('f':_:"55") -> return (advancePC (
loadToMem (genericTake (x+1) (vx s)) (asInt (i s)) s))
--FX65 Fills V0 to VX (including VX) with values from memory starting at address I.
('f':_:"65") -> return (advancePC (
s{vx=loadFromMem (asInt (i s)) (asInt x+1) s++
drop (asInt x+1) (vx s)}))
_ -> return (advancePC s)
where x = getX ins
y = getY ins
n = getN ins
k = getK ins
h = getH ins
--
describe :: Word16 -> String
describe ins =
case showHex ins "" of
nnn@[_, _, _] -> "Cargar RCA 1802 en "++nnn
"e0" -> "Limpiar pantalla"
"ee" -> "Regresar subrutina"
('1':n) -> "["++showHex ins ""++"] Saltar a la direccion "++ nn
('2':n) -> "["++showHex ins ""++"] Llamar subrutina en "++ nn
('3':x:n) -> "["++showHex ins ""++"] Saltear si V"++[x]++" == "++ kk
('4':x:n) -> "["++showHex ins ""++"] Saltear si V"++[x]++" /= "++ kk
('5':x:y:_) -> "["++showHex ins ""++"] Saltear si V"++[x]++" == V"++ [y]
('6':x:n) -> "["++showHex ins ""++"] Setea V"++[x]++" a "++ kk
('7':x:n) -> "["++showHex ins ""++"] Suma "++kk++" a V"++[x]
['8',x,y,'0'] -> "["++showHex ins ""++"] Copia V"++[y]++" a V"++[x]
['8',x,y,'1'] -> "["++showHex ins ""++"] V"++[x]++" = V"++[x]++"|V"++[y]
['8',x,y,'2'] -> "["++showHex ins ""++"] V"++[x]++" = V"++[x]++"&V"++[y]
['8',x,y,'3'] -> "["++showHex ins ""++"] V"++[x]++" = V"++[x]++"xor V"++[y]
['8',x,y,'4'] -> "["++showHex ins ""++"] V"++[x]++"+=V"++[y]
['8',x,y,'5'] -> "["++showHex ins ""++"] V"++[x]++"-=V"++[y]
['8',x,y,'6'] -> "["++showHex ins ""++"] Shift V"++[x]++" a la derecha"
['8',x,y,'7'] -> "["++showHex ins ""++"] V"++[x]++"=V"++[y]++"-V"++[x]
['8',x,y,'e'] -> "["++showHex ins ""++"] Shift V"++[x]++" a la izquierda"
['9',x,y,'0'] -> "["++showHex ins ""++"] Salta instruccion si V"++[x]++"/=V"++[y]
('a':n) -> "["++showHex ins ""++"] Settea I a "++nn
('b':n) -> "["++showHex ins ""++"] Salta a "++nn++" + V0"
('c':x:n) -> "["++showHex ins ""++"] V"++[x]++"="++kk++" & (random)"
('d':x:y:n) -> "["++showHex ins ""++"] Dibuja un sprite en V"++[x]++", V"++[y]++" de altura "++hh
('e':x:"9e") -> "["++showHex ins ""++"] Saltea si la tecla en V"++[x]++" esta apretada"
('e':x:"a1") -> "["++showHex ins ""++"] Saltea si la tecla en V"++[x]++" no esta apretada"
('f':x:"07") -> "["++showHex ins ""++"] V"++[x]++"= delay timer"
('f':x:"0a") -> "["++showHex ins ""++"] Espera una tecla y la guarda en V"++[x]
('f':x:"15") -> "["++showHex ins ""++"] delay timer = V"++[x]
('f':x:"18") -> "["++showHex ins ""++"] sound timer = V"++[x]
('f':x:"1e") -> "["++showHex ins ""++"] Agrega V"++[x]++" a I"
('f':x:"29") -> "["++showHex ins ""++"] I = posicion del caracter V"++[x]++" (font)"
('f':x:"33") -> "["++showHex ins ""++"] Guarda centenas de V"++[x]++" (decimal) en I, decenas en I+1, unidades en I+2"
('f':x:"55") -> "["++showHex ins ""++"] Guarda valores de V0 a V"++[x]++" inclusive en memoria, empezando en I"
('f':x:"65") -> "["++showHex ins ""++"] Settea valores de V0 a V"++[x]++" con los valores en memoria empezando en I"
_ -> "["++showHex ins ""++"] Instruccion invalida (dec: "++ show ins++", hex:)"
where xx =show $ getX ins
yy =show $ getY ins
nn =show $ getN ins
kk =show $ getK ins
hh =show $ getH ins
--Manejo de registros
getReg :: State -> Int -> Word8
getReg State{ vx = vx } x = getAt x vx
setReg :: State -> Int -> Word8 -> State
setReg s@State{ vx = vx } x v = s { vx = replaceAt x vx v }
addToReg :: State -> Int -> Word8 -> State
addToReg s x v = setReg s x (getReg s x + v)
subFromReg :: State -> Int -> Word8 -> State
subFromReg s x v = addToReg s x (-v)
shiftRReg :: State -> Int -> State
shiftRReg s x = setReg (setReg s x (shiftR vx 1)) 0xf (vx.&.1)
where vx = getReg s x
shiftLReg :: State -> Int -> State
shiftLReg s x = setReg (setReg s x (shiftL vx 1)) 0xf (shiftR (vx.&.128) 7)
where vx = getReg s x
setI :: State -> Word16 -> State
setI s v = s{i=v}
sumReg :: State -> Int -> Int -> State
sumReg s x y
|v > 255 = setReg (setReg s x (asWord8 v)) 0xf 1
|otherwise = setReg (setReg s x (asWord8 v)) 0xf 0
where v = asInt (getReg s x) + asInt (getReg s y)
subReg :: State -> Int -> Int -> Int -> State
subReg s x y d
|vx > vy = setReg (setReg s d v) 0xf 1
|otherwise = setReg (setReg s d (v-255)) 0xf 0
where vx = getReg s x
vy = getReg s y
v = vx-vy
--Manejo de program counter
advancePC :: State -> State
advancePC s@State{pc = pc} = s{pc = pc+2}
setPC s v = s{pc=v}
--Manejo de pantalla
clearScreen :: SDL.Surface -> IO ()
clearScreen s = do
screenRect <- SDL.getClipRect s
SDL.fillRect s (Just screenRect) (SDL.Pixel bgColor)
return()
drawScreen :: SDL.Surface -> [[Bool]] -> IO()
drawScreen s d = do
clearScreen s
go 0 0 d
where
drawPixel s x y = SDL.fillRect s (Just (SDL.Rect (x*pixelSize) (y*pixelSize) pixelSize pixelSize)) (SDL.Pixel fgColor)
go _ _ [] = SDL.flip s
go x y ([]:r) = go 0 (y+1) r
go x y ((True:ps):r) = do
drawPixel s x y
go (x+1) y (ps:r)
go x y ((False:ps):r) = go (x+1) y (ps:r)
togglePixel p x y = replaceAt y p (replaceAt x (getAt y p) (not (getPixel p x y)))
getPixel :: [[Bool]] -> Word8 -> Word8 -> Bool
getPixel p x y = getAt x (getAt y p)
drawSprite :: State -> Word8 -> Word8 -> [[Bool]] -> IO (State, Bool)
drawSprite state x y sprite = go (pixels state) 0 0 sprite False
where
go p _ _ [] c = return (state {pixels = p},c)
go p _ dy ([]:r) c = go p 0 (dy+1) r c
go p dx dy ((False:ps):r) c = go p (dx+1) dy (ps:r) c
go p dx dy ((True:ps):r) c = do
let px = mod (x+dx) (asWord8 pixelsW)
let py = mod (y+dy) (asWord8 pixelsH)
go (togglePixel p px py ) (dx+1) dy (ps:r) (c || getPixel p px py)
--Manejo de memoria
loadToMem :: [Word8] -> Int -> State -> State
loadToMem [] _ s = s
loadToMem (x:xs) p s@State{mem = m} = loadToMem xs (p+1) (s{ mem = replaceAt p m x })
loadFromMem :: Int -> Int -> State -> [Word8]
loadFromMem i r s = genericTake r $drop i m
where m = mem s
fileOpen :: FilePath -> IO [Word8]
fileOpen fp = do
bs <- B.readFile fp
return (B.unpack bs)
getX i = fromIntegral $ shiftR (i.&.0x0F00) 8
getY i = fromIntegral $ shiftR (i.&.0x00F0) 4
getN :: Word16 -> Word16
getN i = i.&.0x0FFF
getK :: Word16 -> Word8
getK i = fromIntegral (i.&.0xFF)
getH :: Word16 -> Word8
getH i = fromIntegral (i.&.0xF)
getInst :: Word16 -> [Word8] -> Word16
getInst addr mem = (shiftL (fromIntegral l) 8 + fromIntegral r)::Word16
where l = mem !! fromIntegral addr
r = mem !! fromIntegral (addr+1)
addToStack :: Word16 -> State -> State
addToStack addr state@State{pc=pc, sp=sp, stack=stack} = state {pc=addr,sp=sp+1, stack = replaceAt sp stack pc}
removeFromStack state@State{sp=sp, stack=stack} = state {pc = getAt (sp-1) stack, sp=sp-1}
skipOn :: (a->a->Bool)->a->a->State->State
skipOn f a b s@State{pc=pc}
| f a b = s{pc=pc+4}
| otherwise = s{pc=pc+2}
skipIf :: Bool->State->State
skipIf True s@State{pc=pc} = s{pc=pc+4}
skipIf _ s@State{pc=pc} = s{pc=pc+2}
-- Manejo de timers
tickDT :: State -> State
tickDT s@State{dt=0} = s
tickDT s@State{dt=dt} = s{dt=dt-1}
setDT s v = s{dt=v}
-- Manejo de teclado
keyDown :: State -> Map.Map SDL.SDLKey Int -> SDL.SDLKey -> State
keyDown s km k
| pos == -1 = s
| otherwise = s{keyboard = replaceAt pos (keyboard s) True}
where pos = fromMaybe 0 (Map.lookup k km)
keyUp :: State -> Map.Map SDL.SDLKey Int -> SDL.SDLKey -> State
keyUp s km k
| pos == -1 = s
| otherwise = s{keyboard = replaceAt pos (keyboard s) False}
where pos = fromMaybe 0 (Map.lookup k km)
-- Utilidad
getAt x l = l !! fromIntegral x
replaceAt x l v = y ++ [v] ++ z
where ix = fromIntegral x
(y,z) = (genericTake ix l, drop (ix+1) l)
addAt x l v = replaceAt x l (v + getAt x l)
toBitsArray :: Word8 -> [Bool]
toBitsArray x = map (testBit x) [7,6..0]
asInt x = fromIntegral x::Int
asWord8 x = fromIntegral x::Word8
asWord16 x = fromIntegral x::Word16
| mgbarotto/Chip8 | EmuProcessor.hs | gpl-3.0 | 16,183 | 0 | 28 | 4,311 | 6,851 | 3,556 | 3,295 | 275 | 38 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Home where
import Import
import Text.Blaze.Svg11 ((!), mkPath, l, m)
import qualified Text.Blaze.Svg11 as S
import qualified Text.Blaze.Svg11.Attributes as A
-- This is a handler function for the GET request method on the HomeR
-- resource pattern. All of your resource patterns are defined in
-- config/routes
--
-- The majority of the code you will write in Yesod lives in these handler
-- functions. You can spread them across multiple files if you are so
-- inclined, or create a single monolithic file.
getHomeR :: Handler Html
getHomeR = do
defaultLayout $ do
toWidget $ svgDoc
svgDoc :: S.Svg
svgDoc = -- S.docTypeSvg
S.svg ! A.version "1.1"
! A.width "150" ! A.height "100"
! A.stroke "black" ! A.strokeWidth "5"
$ do
S.circle ! A.cx "50" ! A.cy "50" ! A.r "20"
S.path ! A.d makeSimplePath
makeSimplePath :: S.AttributeValue
makeSimplePath = mkPath $ do
l 0 0
m 20 20
l 30 30
m 10 90
postHomeR :: Handler Html
postHomeR = do
((result, formWidget), formEnctype) <- runFormPost sampleForm
let handlerName = "postHomeR" :: Text
submission = case result of
FormSuccess res -> Just res
_ -> Nothing
defaultLayout $ do
aDomId <- newIdent
setTitle "Welcome To Yesod!"
$(widgetFile "homepage")
sampleForm :: Form (FileInfo, Text)
sampleForm = renderDivs $ (,)
<$> fileAFormReq "Choose a file"
<*> areq textField "What's on the file?" Nothing
| jwaldmann/turtle | web/Handler/Home.hs | gpl-3.0 | 1,558 | 0 | 13 | 380 | 391 | 202 | 189 | 39 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudMonitoring.TimeseriesDescriptors.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List the descriptors of the time series that match the metric and labels
-- values and that have data points in the interval. Large responses are
-- paginated; use the nextPageToken returned in the response to request
-- subsequent pages of results by setting the pageToken query parameter to
-- the value of the nextPageToken.
--
-- /See:/ <https://cloud.google.com/monitoring/v2beta2/ Cloud Monitoring API Reference> for @cloudmonitoring.timeseriesDescriptors.list@.
module Network.Google.Resource.CloudMonitoring.TimeseriesDescriptors.List
(
-- * REST Resource
TimeseriesDescriptorsListResource
-- * Creating a Request
, timeseriesDescriptorsList
, TimeseriesDescriptorsList
-- * Request Lenses
, tdlWindow
, tdlProject
, tdlCount
, tdlPayload
, tdlAggregator
, tdlTimespan
, tdlMetric
, tdlOldest
, tdlLabels
, tdlPageToken
, tdlYoungest
) where
import Network.Google.CloudMonitoring.Types
import Network.Google.Prelude
-- | A resource alias for @cloudmonitoring.timeseriesDescriptors.list@ method which the
-- 'TimeseriesDescriptorsList' request conforms to.
type TimeseriesDescriptorsListResource =
"cloudmonitoring" :>
"v2beta2" :>
"projects" :>
Capture "project" Text :>
"timeseriesDescriptors" :>
Capture "metric" Text :>
QueryParam "youngest" Text :>
QueryParam "window" Text :>
QueryParam "count" (Textual Int32) :>
QueryParam "aggregator"
TimeseriesDescriptorsListAggregator
:>
QueryParam "timespan" Text :>
QueryParam "oldest" Text :>
QueryParams "labels" Text :>
QueryParam "pageToken" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
ListTimeseriesDescriptorsRequest
:>
Get '[JSON]
ListTimeseriesDescriptorsResponse
-- | List the descriptors of the time series that match the metric and labels
-- values and that have data points in the interval. Large responses are
-- paginated; use the nextPageToken returned in the response to request
-- subsequent pages of results by setting the pageToken query parameter to
-- the value of the nextPageToken.
--
-- /See:/ 'timeseriesDescriptorsList' smart constructor.
data TimeseriesDescriptorsList = TimeseriesDescriptorsList'
{ _tdlWindow :: !(Maybe Text)
, _tdlProject :: !Text
, _tdlCount :: !(Textual Int32)
, _tdlPayload :: !ListTimeseriesDescriptorsRequest
, _tdlAggregator :: !(Maybe TimeseriesDescriptorsListAggregator)
, _tdlTimespan :: !(Maybe Text)
, _tdlMetric :: !Text
, _tdlOldest :: !(Maybe Text)
, _tdlLabels :: !(Maybe [Text])
, _tdlPageToken :: !(Maybe Text)
, _tdlYoungest :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TimeseriesDescriptorsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tdlWindow'
--
-- * 'tdlProject'
--
-- * 'tdlCount'
--
-- * 'tdlPayload'
--
-- * 'tdlAggregator'
--
-- * 'tdlTimespan'
--
-- * 'tdlMetric'
--
-- * 'tdlOldest'
--
-- * 'tdlLabels'
--
-- * 'tdlPageToken'
--
-- * 'tdlYoungest'
timeseriesDescriptorsList
:: Text -- ^ 'tdlProject'
-> ListTimeseriesDescriptorsRequest -- ^ 'tdlPayload'
-> Text -- ^ 'tdlMetric'
-> Text -- ^ 'tdlYoungest'
-> TimeseriesDescriptorsList
timeseriesDescriptorsList pTdlProject_ pTdlPayload_ pTdlMetric_ pTdlYoungest_ =
TimeseriesDescriptorsList'
{ _tdlWindow = Nothing
, _tdlProject = pTdlProject_
, _tdlCount = 100
, _tdlPayload = pTdlPayload_
, _tdlAggregator = Nothing
, _tdlTimespan = Nothing
, _tdlMetric = pTdlMetric_
, _tdlOldest = Nothing
, _tdlLabels = Nothing
, _tdlPageToken = Nothing
, _tdlYoungest = pTdlYoungest_
}
-- | The sampling window. At most one data point will be returned for each
-- window in the requested time interval. This parameter is only valid for
-- non-cumulative metric types. Units: - m: minute - h: hour - d: day - w:
-- week Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is
-- not allowed; you should use 17d instead.
tdlWindow :: Lens' TimeseriesDescriptorsList (Maybe Text)
tdlWindow
= lens _tdlWindow (\ s a -> s{_tdlWindow = a})
-- | The project ID to which this time series belongs. The value can be the
-- numeric project ID or string-based project name.
tdlProject :: Lens' TimeseriesDescriptorsList Text
tdlProject
= lens _tdlProject (\ s a -> s{_tdlProject = a})
-- | Maximum number of time series descriptors per page. Used for pagination.
-- If not specified, count = 100.
tdlCount :: Lens' TimeseriesDescriptorsList Int32
tdlCount
= lens _tdlCount (\ s a -> s{_tdlCount = a}) .
_Coerce
-- | Multipart request metadata.
tdlPayload :: Lens' TimeseriesDescriptorsList ListTimeseriesDescriptorsRequest
tdlPayload
= lens _tdlPayload (\ s a -> s{_tdlPayload = a})
-- | The aggregation function that will reduce the data points in each window
-- to a single point. This parameter is only valid for non-cumulative
-- metrics with a value type of INT64 or DOUBLE.
tdlAggregator :: Lens' TimeseriesDescriptorsList (Maybe TimeseriesDescriptorsListAggregator)
tdlAggregator
= lens _tdlAggregator
(\ s a -> s{_tdlAggregator = a})
-- | Length of the time interval to query, which is an alternative way to
-- declare the interval: (youngest - timespan, youngest]. The timespan and
-- oldest parameters should not be used together. Units: - s: second - m:
-- minute - h: hour - d: day - w: week Examples: 2s, 3m, 4w. Only one unit
-- is allowed, for example: 2w3d is not allowed; you should use 17d
-- instead. If neither oldest nor timespan is specified, the default time
-- interval will be (youngest - 4 hours, youngest].
tdlTimespan :: Lens' TimeseriesDescriptorsList (Maybe Text)
tdlTimespan
= lens _tdlTimespan (\ s a -> s{_tdlTimespan = a})
-- | Metric names are protocol-free URLs as listed in the Supported Metrics
-- page. For example,
-- compute.googleapis.com\/instance\/disk\/read_ops_count.
tdlMetric :: Lens' TimeseriesDescriptorsList Text
tdlMetric
= lens _tdlMetric (\ s a -> s{_tdlMetric = a})
-- | Start of the time interval (exclusive), which is expressed as an RFC
-- 3339 timestamp. If neither oldest nor timespan is specified, the default
-- time interval will be (youngest - 4 hours, youngest]
tdlOldest :: Lens' TimeseriesDescriptorsList (Maybe Text)
tdlOldest
= lens _tdlOldest (\ s a -> s{_tdlOldest = a})
-- | A collection of labels for the matching time series, which are
-- represented as: - key==value: key equals the value - key=~value: key
-- regex matches the value - key!=value: key does not equal the value -
-- key!~value: key regex does not match the value For example, to list all
-- of the time series descriptors for the region us-central1, you could
-- specify: label=cloud.googleapis.com%2Flocation=~us-central1.*
tdlLabels :: Lens' TimeseriesDescriptorsList [Text]
tdlLabels
= lens _tdlLabels (\ s a -> s{_tdlLabels = a}) .
_Default
. _Coerce
-- | The pagination token, which is used to page through large result sets.
-- Set this value to the value of the nextPageToken to retrieve the next
-- page of results.
tdlPageToken :: Lens' TimeseriesDescriptorsList (Maybe Text)
tdlPageToken
= lens _tdlPageToken (\ s a -> s{_tdlPageToken = a})
-- | End of the time interval (inclusive), which is expressed as an RFC 3339
-- timestamp.
tdlYoungest :: Lens' TimeseriesDescriptorsList Text
tdlYoungest
= lens _tdlYoungest (\ s a -> s{_tdlYoungest = a})
instance GoogleRequest TimeseriesDescriptorsList
where
type Rs TimeseriesDescriptorsList =
ListTimeseriesDescriptorsResponse
type Scopes TimeseriesDescriptorsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/monitoring"]
requestClient TimeseriesDescriptorsList'{..}
= go _tdlProject _tdlMetric (Just _tdlYoungest)
_tdlWindow
(Just _tdlCount)
_tdlAggregator
_tdlTimespan
_tdlOldest
(_tdlLabels ^. _Default)
_tdlPageToken
(Just AltJSON)
_tdlPayload
cloudMonitoringService
where go
= buildClient
(Proxy :: Proxy TimeseriesDescriptorsListResource)
mempty
| rueshyna/gogol | gogol-cloudmonitoring/gen/Network/Google/Resource/CloudMonitoring/TimeseriesDescriptors/List.hs | mpl-2.0 | 9,713 | 0 | 23 | 2,382 | 1,164 | 686 | 478 | 166 | 1 |
-- Implicit CAD. Copyright (C) 2011, Christopher Olah (chris@colah.ca)
-- Copyright (C) 2014 2015 2016, Julia Longtin (julial@turinglace.com)
-- Released under the GNU AGPLV3+, see LICENSE
-- FIXME: why are these required?
{-# LANGUAGE FlexibleContexts, ScopedTypeVariables #-}
-- An executor, which parses openscad code, and executes it.
module Graphics.Implicit.ExtOpenScad (runOpenscad) where
import Prelude(String, Either(Left, Right), IO, ($), fmap, return)
import Graphics.Implicit.Definitions (SymbolicObj2, SymbolicObj3)
import Graphics.Implicit.ExtOpenScad.Definitions (VarLookup, ScadOpts, Message(Message), MessageType(SyntaxError), CompState(CompState))
import Graphics.Implicit.ExtOpenScad.Parser.Statement (parseProgram)
import Graphics.Implicit.ExtOpenScad.Parser.Util (sourcePosition)
import Graphics.Implicit.ExtOpenScad.Eval.Statement (runStatementI)
import Graphics.Implicit.ExtOpenScad.Default (defaultObjects)
import Graphics.Implicit.ExtOpenScad.Util.OVal (divideObjs)
import Text.Parsec.Error (errorPos, errorMessages, showErrorMessages)
import Control.Monad (mapM_)
import Control.Monad.State.Lazy (runStateT)
import System.Directory (getCurrentDirectory)
-- | Small wrapper of our parser to handle parse errors, etc.
runOpenscad :: ScadOpts -> String -> IO (VarLookup, [SymbolicObj2], [SymbolicObj3], [Message])
runOpenscad scadOpts source =
let
initial = defaultObjects
rearrange :: (t, CompState) -> (VarLookup, [SymbolicObj2], [SymbolicObj3], [Message])
rearrange (_, CompState (varlookup, ovals, _, messages, _)) = (varlookup, obj2s, obj3s, messages) where
(obj2s, obj3s, _) = divideObjs ovals
show' err = showErrorMessages "or" "unknown parse error" "expecting" "unexpected" "end of input" (errorMessages err)
mesg e = Message SyntaxError (sourcePosition $ errorPos e) $ show' e
in case parseProgram "" source of
Left e -> return (initial, [], [], [mesg e])
Right sts -> fmap rearrange
$ (\sts' -> do
path <- getCurrentDirectory
runStateT sts' $ CompState (initial, [], path, [], scadOpts)
)
$ mapM_ runStatementI sts
| krakrjak/ImplicitCAD | Graphics/Implicit/ExtOpenScad.hs | agpl-3.0 | 2,230 | 0 | 19 | 409 | 543 | 324 | 219 | 30 | 2 |
module Lycopene.ApplicationSpec (spec) where
import Test.Hspec
import Test.QuickCheck
import Lycopene.SpecTool
import qualified Lycopene.Core as Core
spec :: Spec
spec = do
before localEngine $ do
describe "Event" $ do
it "create a project" $ \engine -> do
created <- runEngine engine (Core.NewProject "new" Nothing)
fetched <- runEngine engine (Core.FetchProject "new")
fetched `shouldBe` created
it "remove a project" $ \engine -> do
_ <- runEngine engine (Core.NewProject "new" Nothing)
runEngine engine (Core.RemoveProject "new")
fetched <- runEngine engine Core.AllProject
(mapR length fetched) `shouldBe` (Right 0)
it "fetch backlog sprints created on project creation" $ \engine -> do
_ <- runEngine engine (Core.NewProject "new" Nothing)
fetched <- runEngine engine (Core.FetchProjectSprint "new")
(mapR length fetched) `shouldBe` (Right 1)
it "fetch a backlog sprint" $ \engine -> do
_ <- runEngine engine (Core.NewProject "new" Nothing)
fetched <- runEngine engine (Core.FetchSprint "new" "backlog")
(mapR Core.sprintName fetched) `shouldBe` (Right "backlog")
it "create a issue" $ \engine -> do
_ <- runEngine engine (Core.NewProject "new" Nothing)
created <- runEngine engine (Core.NewIssue "new" "backlog" "issue")
fetched <- runEngine engine (Core.FetchIssues "new" "backlog" Core.IssueOpen)
(mapR (Core.issueTitle . head) fetched) `shouldBe` (Right "issue")
| utky/lycopene | tests/Lycopene/ApplicationSpec.hs | apache-2.0 | 1,633 | 0 | 22 | 442 | 503 | 244 | 259 | 31 | 1 |
-- |Constraints that specify a control flow analysis for Javascript.
module Ovid.Constraints
( initialize
, stmt
, expr
, AnnotatedStatement
, AnnotatedExpression
, ParentNodeT
, JsCFAState(..)
, CFAOpts (..)
) where
-- #define CONSERVATIVE_MODE
-- #define DEBUG_XHR
-- #define DEBUG_BUILTINS
-- #define CONSERVATIVE_REFERENCES
-- #define CONSERVATIVE_LVALS
-- #define TRACE_APPLICATION
-- #define TRACE_ASSIGNMENT
-- #define PENTIUM_MMX_LOOP_OPTIMIZATIONS
import Prelude hiding (catch)
import qualified Data.Map as M
import qualified Data.List as L
import qualified Data.Foldable as F
import Data.Generics
import Data.Typeable
import Framework
import WebBits.JavaScript.JavaScript hiding (Expression,Statement)
import qualified WebBits.JavaScript.JavaScript as Js
-- 'buildJsEnv' is required for `on-demand JavaScript'
import Ovid.Environment (Ann,AdditionalAnnotation(..),AnnotatedExpression,
AnnotatedStatement)
import CFA.Labels
import Ovid.Abstraction
import CFA
import Ovid.Prelude
import Data.InductiveGraph.Class
import Ovid.ConstraintUtils
import Ovid.Interactions
-- required for `on demand JavaScript'
import WebBits.Html.Html (parseHtmlFromString,parseHtmlFromFile)
import WebBits.JavaScript.Crawl (getPageJavaScript)
import Ovid.DOM (topLevelIds)
import qualified System.IO as IO
import Data.Store
-- -----------------------------------------------------------------------------
-- Miscellaneous
primNumeric OpAdd = Just (+)
primNumeric OpMul = Just (*)
primNumeric OpDiv = Just (/)
primNumeric OpSub = Just (-)
primNumeric _ = Nothing
primLogical OpLT = Just (<)
primLogical OpLEq = Just (<=)
primLogical OpGT = Just (>)
primLogical OpGEq = Just (>=)
primLogical _ = Nothing
logicalAssignOps
= [OpAssignBAnd, OpAssignBXor, OpAssignBOr]
numericAssignOps
= [OpAssignAdd, OpAssignSub, OpAssignMul, OpAssignDiv, OpAssignMod,
OpAssignLShift, OpAssignSpRShift, OpAssignZfRShift]
logicalInfixOps = [OpLT .. OpLOr]
arithmeticInfixOps = [OpMul .. OpBOr]
lookupErr :: (Monad m, Ord k) => String -> k -> M.Map k v -> m v
lookupErr errStr k t =
case M.lookup k t of
Just k -> return k
Nothing -> fail ("Ovid.Constraints: " ++ errStr)
idv :: Monad m => Id Ann -> m Label
idv (Id (_,lbl,_) _) = return lbl
type Expression = Js.Expression Ann
type Statement = Js.Statement Ann
type ParentNodeT n m = StateT n m
-- |We need to be able to have multiple invocations of document.write on the stack.
isRecursionAllowed _ lbl | isBuiltinLabel lbl = True
isRecursionAllowed srcs lbl = case labelSource lbl of
Nothing -> False
Just src -> src `elem` srcs
isPreciseArithmetic lbl = do
JsCFAState { jscfaOpts = opts } <- lift get
case labelSource lbl of
Nothing -> return False
Just src -> return (src `elem` cfaOptsPreciseArithmetic opts)
isPreciseConditionals lbl = do
JsCFAState { jscfaOpts = opts } <- lift get
case labelSource lbl of
Nothing -> return False
Just src -> return (src `elem` cfaOptsPreciseConditionals opts)
application :: (MonadIO m)
=> (Label,Contour) -- ^set of functions
-> [(Label,Contour)] -- ^sets of arguments
-> Contour -- ^dynamic call stack
-> (Label,Contour) -- ^result set (out-flow)
-> CfaT Value (StateT (JsCFAState Contour) m) ()
application svF actuals ct svCxt = do
-- Mark 'svF' as a function set. After the analysis is complete, we can check if any function values flowed into this
-- set. If not, it's a good sign that we missed modelling some part of the DOM.
markFunctionSet svF
-- Options governing the analysis.
JsCFAState { jscfaOpts = opts } <- lift get
let expSrcs = cfaOptUnlimitedRecursion opts
-- Invoke callback for a piggybacked analysis.
-- applicationHook <- callHook svF actuals ct svCxt
let app fn@(ABuiltin{aBuiltinName=name,aBuiltinLabel=lbl, aBuiltinThis=this}) = do
{- #ifdef TRACE_APPLICATION
warn $ "builtin application; lbl " ++ show lbl
#endif -}
-- warn $ "Applying " ++ show lbl
builtinDispatch name ct ((this,ct):actuals) svCxt
-- dummy 'return []' or we violate let-polymorphism!
-- applicationHook fn (dispatch >> return [])
return ()
-- Check to see if this function is on the stack. If it is, we don't
-- apply it, unless its label indicates that it should be permitted to
-- recurse.
app fn@(AFunction{aFunctionLabel=lbl})
| lbl `elem` callStack ct && (not $ isRecursionAllowed expSrcs lbl) = do
warn $ "Ignoring an application of " ++ show lbl ++ " from " ++ show (fst svCxt)
return ()
app fn@(AFunction{aFunctionLabel=lbl,aFunctionArgs=formals,aFunctionLocals=locals,aFunctionBody=(EqualOrd body),
aFunctionEnv=ceClosure,aFunctionThis=thisLbl}) = do
{- #ifdef TRACE_APPLICATION
warn $ "Call stack of length " ++ (show $ length $ callStack ct)
#endif -}
size <- currentSetSize svF
-- warn $ "Applying " ++ show lbl
when (size > 5)
(warn $ "WARNING: " ++ show size ++ " functions in a function set")
let svFormals = map (\lbl -> (lbl,ct)) formals
let svLocals = map (\lbl -> (lbl,ct)) locals
let this = (thisLbl,ct)
let ce = M.union (M.fromList (svFormals ++ svLocals)) ceClosure
-- create the array of arguments
argumentsLbl <- uniqueLabel
let arguments = (argumentsLbl,ct)
newArray arguments actuals
-- flow actuals into the formals in this contour
mapM_ (uncurry subsetOf) (zip (this:arguments:actuals) svFormals)
-- flow undefined into the arguments that are not supplied
let undefinedArgs = drop (length $ this:arguments:actuals) svFormals
mapM_ (newValue UndefinedVal) undefinedArgs
-- flow the results into the context of the call
svResults <- {- applicationHook fn -} (stmt ce (newCall lbl ct) body)
mapM_ (\set -> subsetOf set svCxt) svResults
app UndefinedVal = return ()
app v = do
warn $ "Ovid.Constraints.application: non-function in " ++
"function set : " ++ show v
warn $ "function at " ++ show svF
propagateTo svF svCxt app
showProp (PropId _ id) = show id
showProp (PropString _ s) = s
showProp (PropNum _ n) = show n
-- |Generates constraints for the expression.
expr :: (MonadIO m)
=> M.Map Label Contour
-> Contour
-> AnnotatedExpression
-> CfaT Value (StateT (JsCFAState Contour) m) (Label,Contour)
expr ce ct e = {- exprHook ce ct e >>= \r -> -} case e of
StringLit (_,lbl,_) s -> do
-- This represents a standard problem. The object we create must have the label lbl' since its the only concrete
-- label available in this expression. Suppose the outer expression is an application. In an application, we must
-- prime the values sets of the argument array. We cannot prime the formal-labels, as they are shared between
-- applications. So, we have to prime the arguments. However, since this is an argument, it is already primed.
newString (lbl,ct) (SConst s)
return (lbl,ct)
RegexpLit (_,lbl,_) s b c -> do
newValue (ARegexp s b c) (lbl,ct)
return (lbl,ct)
NumLit (_,lbl,_) n -> do
newValue (ANum n) (lbl,ct)
return ( lbl,ct)
BoolLit (_,lbl,_) b -> do
newValue (ABool b) (lbl,ct)
return ( lbl,ct)
NullLit (_,lbl,_) -> do
newValue (NullVal) (lbl,ct)
return ( lbl,ct)
ArrayLit (_,lbl,_) es -> do
elems <- mapM (expr ce ct) es
newArray (lbl,ct) elems
return (lbl,ct)
ObjectLit (_,lbl,_) props -> do
let prop (p,e) = do
eSet <- expr ce ct e
return (showProp p,eSet)
propSets <- mapM prop props
newObject (lbl,ct) propSets
return (lbl,ct)
ThisRef (_,lbl,_) -> do
thisCt <- lookupErr ("`this' is unbound") lbl ce
return (lbl,thisCt)
VarRef (_,lbl,_) id -> do -- a bound identifier; not an assignment
idCt <- lookupErr ("unbound identifier: " ++ show id) lbl ce
subsetOf (lbl,idCt) (lbl,ct)
return (lbl,ct) -- we *always* return the context
DotRef (_,lbl,_) obj (Id _ propId) -> do -- obj.prop
objSet <- expr ce ct obj
propagateProperty objSet (lbl,ct) propId $ \propSet -> subsetOf propSet (lbl,ct)
return (lbl,ct)
BracketRef (_,lbl,_) obj prop -> do -- obj[prop]
let stx = (lbl,ct)
objStx <- expr ce ct obj
propIdStx <- expr ce ct prop
flow1 objStx stx (const Nothing)
propagateTo propIdStx stx $ \propVal -> case propVal of
AObject{aObjectX=(PString sp)} -> case unStringPat sp of
Just propId -> do
unsafePropagateProperty objStx stx propId $ \vals -> subsetOf vals stx
Nothing -> do
warnAt "indeterminate string used for property reference" (show ct)
propagatePropertySetTo objStx stx $ \set -> propagate set $ \val -> case val of
AProperty _ (ValueSet valSet) -> subsetOf valSet stx
otherwise -> fail $ "non-property value in property set at " ++ show stx ++ "; value is " ++ show val
ANum propIx ->
unsafePropagateProperty objStx stx (show $ truncate propIx) $ \vals -> subsetOf vals stx
AnyNum -> do
-- This is far too common with arrays.
-- warnAt "indeterminate number used for property reference" (show ct)
propagatePropertySetTo objStx stx $ \set -> propagate set $ \val -> case val of
AProperty _ (ValueSet valSet) -> subsetOf valSet stx
otherwise -> fail $ "non-property value in property set at " ++ show stx ++ "; value is " ++ show val
UndefinedVal -> return ()
otherwise ->
warn $ "non-indexable value (" ++ show propVal ++ ") at " ++ show stx
return stx
NewExpr (_,lbl,_) constr args -> do
-- We are essentially setting up a function call; the only difference is the construction of `this.'
constrSet <- expr ce ct constr -- the function / constructor
argSets <- mapM (expr ce ct) args
-- the contour of the function call
ct' <- extendContour lbl ct
-- discard the return value of the function; note that the result flows
-- into (lbl,ct') so that we can tell when we are applying at the edge of
-- the contour. However, the result set is (lbl,ct).
application constrSet argSets ct' (lbl,ct')
propagateTo constrSet (lbl,ct) $ \val -> case (closureThis val,objectProperties val) of
(Just this,Nothing) -> do -- builtin with no prototype
subsetOf (this,ct') (lbl,ct)
(Just this,Just propSet) -> do -- function/builtin with a prototype
-- create an empty object
obj@(AObject{aObjectProps=props}) <- newObject (this,ct') []
-- copy the members of the prototype over
propagatePropertyOf val (lbl,ct) "prototype" $ \proto -> propagatePropertySetTo proto props $ \propSet -> do
flow1 propSet props justProperty
-- obj.prototype = constr
newValue (AProperty "prototype" (ValueSet constrSet)) props
-- flow the set of objects (thises) into the result
subsetOf (this,ct') (lbl,ct)
otherwise -> do
warn $ "non-function value " ++ show val ++ " in " ++ show e
return (lbl,ct)
PostfixExpr (_,lbl,_) op e -> do -- e++ or e--
eSet <- expr ce ct e
flow1 eSet eSet $ \val -> case val of
ANum _ -> Just AnyNum
otherwise -> Nothing
subsetOf eSet (lbl,ct)
return (lbl,ct)
PrefixExpr (_,lbl,_) op e -> do
eSet <- expr ce ct e
stringPrototype <- builtinPrototype "String"
flow1 eSet eSet $ \val -> case val of
ANum _ -> Just AnyNum
ABool _ -> Just AnyBool
otherwise -> Nothing
flow1 eSet (lbl,ct) $ \val -> case op of
-- ++e and --e are the only operators that side-effect e
PrefixInc -> Just AnyNum
PrefixDec -> Just AnyNum
PrefixLNot -> case val of
_ | isTrueValue val -> Just (ABool False)
_ | isFalseValue val -> Just (ABool True)
otherwise -> Just AnyBool
PrefixBNot -> Just AnyNum
PrefixPlus -> Nothing -- e == +e
PrefixMinus -> Just AnyNum
PrefixTypeof -> case val of
AFunction{} -> Just $ AObject (primeLabel lbl 1) stringPrototype (PString $ SConst "function")
otherwise -> Just AnyBool
-- TODO : distinction between void and undefined? check!
PrefixVoid -> Just UndefinedVal
PrefixDelete -> Just UndefinedVal
return (lbl,ct)
InfixExpr (_,lbl,_) op l r -> do
let cxt = (lbl,ct)
lVar <- expr ce ct l
rVar <- expr ce ct r
isPrecise <- isPreciseArithmetic lbl -- do you expect us to actually add?
isPreciseIf <- isPreciseConditionals lbl -- do you expect us to actually branch?
let prototype = primeSet cxt 1 -- we may use this
case op of
OpLOr -> flow2 lVar rVar cxt $ \lv rv ->
if isTrueValue lv then
Just lv
else if isFalseValue lv then
Just rv
else
Just AnyBool
_ | op `elem` [OpLT,OpLEq,OpGT,OpGEq,OpIn,OpEq,OpNEq,OpStrictEq,OpStrictNEq,OpLAnd,OpIn] ->
flow2 lVar rVar cxt $ \l r -> case (l,r,primLogical op) of
(ANum m,ANum n,Just op) -> Just $ if m `op` n then (ABool True) else (ABool False)
(AObject{aObjectX=(PString sp1)},AObject{aObjectX=(PString sp2)},Nothing) | op == OpLEq ->
Just $ ABool (sp1 == sp2)
otherwise -> Just AnyBool
_ | op `elem` [OpMul,OpDiv,OpMod,OpSub] -> flow2 lVar rVar cxt $ \l r -> case (l,r,primNumeric op) of
(ANum m,ANum n,Just op) -> Just $ ANum (m `op` n)
otherwise -> Just AnyNum
_ | op `elem` [OpLShift,OpSpRShift,OpZfRShift,OpBAnd,OpBXor,OpBOr] ->
flow2 lVar rVar cxt $ \_ _ -> Just AnyNum
OpInstanceof -> flow2 lVar rVar cxt $ \val prototype -> case (val,prototype) of
(AObject{aObjectX=PString{}}, ABuiltin{aBuiltinName="String"}) -> Just (ABool True)
(_ , ABuiltin{aBuiltinName="String"}) -> Just (ABool False)
(AObject{aObjectX=PArray},ABuiltin{aBuiltinName="Array"}) -> Just (ABool True)
(AObject{aObjectX=PArray},_) -> Just (ABool False)
otherwise -> Just AnyBool
OpAdd -> flow2 lVar rVar cxt $ \l r -> case (l,r) of
(ANum m,ANum n) | isPrecise -> Just $ ANum (m+n)
| otherwise -> Just AnyNum
(ANum _,AnyNum) -> Just AnyNum
(AnyNum,_) -> Just AnyNum
(AObject{aObjectX=(PString s)},AObject{aObjectX=(PString t)}) ->
Just $ (AObject lbl prototype (PString $ stringPatCat s t))
(AObject{aObjectX=(PString s)}, ANum n) ->
Just $ (AObject lbl prototype (PString $ stringPatCat s (SConst $ show n)))
(AObject{aObjectX=(PString s)},_) ->
Just $ (AObject lbl prototype (PString $ stringPatCat s SAny))
otherwise -> Nothing
otherwise -> fail $ "Ovid.Constraints.expr : unaccounted infix operator -- " ++ show op
-- flow primitive prototypes as needed
stringPrototype <- builtinPrototype "String"
propagate cxt $ \val -> case val of
AObject{aObjectX=(PString _),aObjectProps=props} -> subsetOf stringPrototype props
otherwise -> return ()
return cxt
CondExpr (_,lbl,_) test true false -> do
expr ce ct test
trueVar <- {- branchHook r -} (expr ce ct true)
falseVar <- {- branchHook r -} (expr ce ct false)
subsetOf trueVar (lbl,ct)
subsetOf falseVar (lbl,ct)
-- joinHook [trueR,falseR] r
return (lbl,ct)
AssignExpr (_,lbl,_) op l r
| op == OpAssign -> do
lVar <- lval ce ct l
rVar <- expr ce ct r
subsetOf rVar lVar
subsetOf lVar (lbl,ct)
return (lbl,ct)
| op `elem` logicalAssignOps -> do
lVar <- lval ce ct l
rVar <- expr ce ct r
flow1 rVar lVar $ \_ -> Just AnyBool
subsetOf lVar (lbl,ct)
return (lbl,ct)
| op `elem` numericAssignOps -> do
lVar <- lval ce ct l
rVar <- expr ce ct r
flow1 rVar lVar $ \_ -> Just AnyNum
subsetOf lVar (lbl,ct)
return (lbl,ct)
| otherwise -> do
fail $ "Program bug: the operator " ++ show op ++ " was unclassified"
ParenExpr _ e ->
expr ce ct e
ListExpr _ [] ->
fail "Ovid.Constraints.expr : empty list expression (program bug)"
ListExpr (_,lbl,_) es -> do
eVars <- mapM (expr ce ct ) es
subsetOf (last eVars) (lbl,ct)
return (lbl,ct)
CallExpr (_,lbl,_) f@(DotRef (_,methodLbl,_) obj (Id _ propId)) args -> do
let cxt = (lbl,ct)
objSet <- expr ce ct obj -- the object whose method we are caling
argSets <- mapM (expr ce ct) args -- the arguments to the method
ct' <- extendContour lbl ct -- contour of the call
application (methodLbl,ct) argSets ct' (lbl,ct) -- flow the method into (methodLbl,ct), for consistency
propagateProperty objSet cxt propId $ \fnSet -> do
subsetOf fnSet (methodLbl,ct)
propagateTo fnSet cxt $ \fnVal -> case closureThis fnVal of -- setup `this'
Just thisLbl -> subsetOf objSet (thisLbl,ct')
Nothing -> return () -- `application' will handle the warning
return (lbl,ct)
CallExpr (_,lbl,_) f@(BracketRef (_,methodLbl,_) obj prop) args -> do
let cxt = (lbl,ct)
objSet <- expr ce ct obj
propSet <- expr ce ct prop
argSets <- mapM (expr ce ct) args
ct' <- extendContour lbl ct -- contour of the call
application (methodLbl,ct) argSets ct' (lbl,ct) -- the application *must* be here for the call graph
propagateTo propSet cxt $ \propVal -> case asPropId propVal of -- select the value of the property
Nothing -> warn $ "indeterminate index at " ++ show propSet ++ "; value is " ++ show propVal
Just propId -> propagateProperty objSet cxt propId $ \fnSet -> do
subsetOf fnSet (methodLbl,ct)
propagateTo fnSet cxt $ \fnVal -> case closureThis fnVal of -- setup `this'
Just thisLbl -> subsetOf (objSet) (thisLbl,ct')
Nothing -> return () -- application will display a warning
return (lbl,ct)
CallExpr (_,lbl,ann) f args -> do
{- #ifdef TRACE_APPLICATION
warn $ "call: " ++ (show f)
#endif -}
(f:args) <- mapM (expr ce ct) (f:args)
ct' <- extendContour lbl ct
application f args ct' (lbl,ct)
return (lbl,ct)
FuncExpr (_,lbl,FnA{fnannEnclosing=enclosing,fnannLocals=locals,fnannThisLbl=thisLbl})
args body -> do
formals <- mapM idv args
-- the function value creates an implicit object
let objSet = primeSet (lbl,ct) 1
-- members of Function.prototype are copied to the function object
-- TODO: object.prototype = Function
JsCFAState {jscfasBuiltins=builtins} <- lift get
prototypeSet <- builtinPrototype "Function"
subsetOf prototypeSet objSet
newValue (AFunction lbl formals locals (EqualOrd body) ce objSet thisLbl) (lbl,ct) -- acceptable newValue
return (lbl,ct)
FuncExpr (_,_,ann) _ _ ->
fail $ "(bug) Ovid.Constraints.expr: unexpected annotation on a function"
++ show ann ++ ", function was " ++ show e
-- The label on e must be returned. (er. why?)
lval :: (MonadIO m)
=> M.Map Label Contour
-> Contour
-> AnnotatedExpression
-> CfaT Value (StateT (JsCFAState Contour) m) (Label,Contour)
lval ce ct expr@(VarRef (_,lbl,_) id) = do
ct' <- lookupErr ("lval : " ++ show lbl ++ " is not in ce :" ++ show id ++ "\n" ++ show expr) lbl ce
return (lbl,ct')
lval ce ct (DotRef (_,lbl,_) obj (Id _ propId)) = do -- obj.prop
objSet <- expr ce ct obj
propagateProperty objSet (lbl,ct) propId $ \valSet -> do
{-flow1 (lbl,ct) valSet (const Nothing)
propagate (lbl,ct) $ \val -> do
clearValues valSet
newValue val valSet -}
removeValue UndefinedVal valSet
subsetOf (lbl,ct) valSet
return (lbl,ct)
lval ce ct e@(BracketRef (_,lbl,_) obj prop) = do -- obj[prop]
let stx = (lbl,ct)
objSet <- expr ce ct obj
propSet <- expr ce ct prop
flow1 objSet (lbl,ct) (const Nothing)
propagateTo propSet stx $ \propVal -> case asPropId propVal of
Just propId -> unsafePropagateProperty objSet (lbl,ct) propId $ \valSet -> do
{-flow1 (lbl,ct) valSet (const Nothing)
propagate (lbl,ct) $ \val -> do
clearValues valSet
newValue val valSet-}
removeValue UndefinedVal valSet
subsetOf (lbl,ct) valSet
Nothing | propVal == UndefinedVal -> return ()
| otherwise -> do
warn $ "Indeterminate index at " ++ show stx ++ "--assigning to all values. The index was " ++ show propVal
propagatePropertySetTo objSet stx $ \propSet -> propagateTo propSet (lbl,ct) $ \propVal -> case propVal of --TODO : overstructured
AProperty _ (ValueSet valSet) -> subsetOf (lbl,ct) valSet
otherwise -> warn $ "non-property at " ++ show (lbl,ct) ++ "; value is " ++ show propVal
return (lbl,ct)
lval _ _ e = fail $ "Invalid l-value: " ++ show e
caseClause ce ct (CaseClause l e ss) =
expr ce ct e >> mapM (stmt ce ct ) ss >>= return . concat
caseClause ce ct (CaseDefault l ss) =
mapM (stmt ce ct ) ss >>= return . concat
catchClause ce ct (CatchClause l id s) = stmt ce ct s
varDecl ce ct (VarDecl _ _ Nothing) = return []
varDecl ce ct e'@(VarDecl (_,idLabel,_) _ (Just e)) = do
case M.lookup idLabel ce of
Just idCt -> do
eSet <- expr ce ct e
subsetOf eSet (idLabel,idCt)
return []
Nothing -> fail $ "varDecl: could not find contour for " ++ (show e') ++
"\n\n" ++ "the environment is " ++ show ce ++ "\n\n" ++
"the label is " ++ show idLabel
forInit ce ct NoInit = return []
forInit ce ct (VarInit decls) = mapM (varDecl ce ct) decls >>= return . concat
forInit ce ct (ExprInit e) = expr ce ct e >> return []
forInInit (ForInVar id) = id
forInInit (ForInNoVar id) = id
yl:: (Monad m) => (a -> m [b]) -> Maybe a -> m [b]
yl f Nothing = return []
yl f (Just x) = f x
stmt :: (MonadIO m)
=> M.Map Label Contour -- ^environment
-> Contour -- ^contour
-> Statement
-> CfaT Value (StateT (JsCFAState Contour) m) [(Label,Contour)]
stmt ce ct s = {- stmtHook ce ct s >>= \r -> -} case s of
BlockStmt l ss -> do
vss <- mapM (stmt ce ct) ss
return (concat vss)
EmptyStmt _ ->
return []
ExprStmt l e ->
expr ce ct e >> return []
IfStmt (_,lbl,_) e s1 s2 -> do
let stx = (lbl,ct)
testStx <- expr ce ct e
markBranch testStx
propagateTo testStx stx $ \testVal -> -- constraint identity will evaluate true/false branches at most once
if isTrueValue testVal then do
results <- stmt ce ct s1
mapM_ (\result -> subsetOf result stx) results
else if isFalseValue testVal then do
results <- stmt ce ct s2
mapM_ (\result -> subsetOf result stx) results
else {- is indeterminate value -} do
trueResults <- stmt ce ct s1
falseResults <- stmt ce ct s2
mapM_ (\result -> subsetOf result stx) (trueResults ++ falseResults)
return [stx]
{-
isPrecise <- isPreciseConditionals lbl
{- (s1r,s1vs) <- branchHook r (stmt ce ct s1)
(s2r,s2vs) <- branchHook r (stmt ce ct s2)
joinHook [s1r,s2r] r
return (s1vs ++ s2vs) -}
if isPrecise
-- when using precise conditionals, we don't generate the branch in the
-- control flow graph
then do
let result = primeSet (lbl,ct) 1
propagateTo test (lbl,ct) $ \testVal -> case testVal of
ABool True -> do
vs <- stmt ce ct s1
mapM_ (\set -> subsetOf set result) vs
ABool False -> do
vs <- stmt ce ct s2
mapM_ (\set -> subsetOf set result) vs
otherwise ->
warnAt "imprecise boolean value with precise conditionals" (show ct)
return [result]
else do
(s1r,s1vs) <- branchHook r (stmt ce ct s1)
(s2r,s2vs) <- branchHook r (stmt ce ct s2)
joinHook [s1r,s2r] r
return (s1vs ++ s2vs) -}
IfSingleStmt (_,lbl,_) testExpr bodyStmt -> do
let stx = (lbl,ct)
testStx <- expr ce ct testExpr
markBranch testStx
propagateTo testStx stx $ \testVal ->
if not (isFalseValue testVal) then do
results <- stmt ce ct bodyStmt
mapM_ (\result -> subsetOf result stx) results
else return ()
return [stx]
SwitchStmt l e cs ->
expr ce ct e >> mapM (caseClause ce ct ) cs >>= return . concat
WhileStmt l e s ->
expr ce ct e >> stmt ce ct s
DoWhileStmt l s e ->
expr ce ct e >> stmt ce ct s
BreakStmt l yid ->
return []
ContinueStmt l yid ->
return []
LabelledStmt l id s ->
stmt ce ct s
ForInStmt (_,lbl,_) init e body -> do -- we unroll for-in loops
let stx = (lbl,ct)
let (Id (_,varLbl,_) varId) = forInInit init
stringPrototype <- builtinPrototype "String"
eStx <- expr ce ct e
propagateTo eStx stx $ \obj -> case objectProperties obj of
Nothing | obj == NullVal -> return ()
| otherwise -> return () -- warn $ "for-in : non-object value (" ++ show obj ++ ") at " ++ show eStx
Just propSet -> propagateTo propSet stx $ \property -> case property of -- propSet is unique for each object
AProperty propId _ | (not $ isAbstractArray obj) || (isJust $ tryInt propId) -> do
ct' <- extendContour (propLabel varLbl propId) ct
newString (varLbl,ct') (SConst propId)
results <- stmt (M.insert varLbl ct' ce) ct' body
mapM_ (\result -> subsetOf result stx) results
-- TODO: not quite right, for-in iterates over non-int indices that are not in the prototype
AProperty _ _ | isAbstractArray obj -> return ()
otherwise -> fail $ "non-property value in property set at " ++ show eStx
return [stx]
-- special case: for (var id = initExpr; testExpr; id++) bodyStmt (we do postfix decrement here too)
{-
ForStmt (_,lbl,_) (VarInit [VarDecl (_,idLbl,_) _ (Just initExpr)]) (Just testExpr) (
(Just $ PostfixExpr _ postfixOp (VarRef (_,incrIdLbl,_) _)) bodyStmt | idLbl == incrIdLbl -> do
let stx = (lbl,ct)
initStx <- expr ce ct initExpr
let iterate idStx ix =
ct' <- extendContour (primeLabel idLbl ix) ct
-- must extend the contour of the call, or the inner constraints won't get created uniquely for each run
testStx <- expr (M.insert idLbl ct' ce) ct' testExpr
propagate testStx $ \testVal -> case testVal of
_ | isFalseValue testVal -> do
return ()
otherwise -> do
propagateTo initStx stx $ \init -> do
testStx <- expr (M.insert idLbl ct' ce) ct' testExpr
-}
ForStmt (_,lbl,_) init ye1 ye2 s -> do
forInit ce ct init
ym (expr ce ct ) ye1
ym (expr ce ct ) ye2
stmt ce ct s
TryStmt l s cs ys -> do
sv <- stmt ce ct s
cvs <- mapM (catchClause ce ct ) cs
fvs <- yl (stmt ce ct ) ys
return (sv ++ (concat cvs) ++ fvs)
ThrowStmt l e ->
expr ce ct e >> return []
ReturnStmt l Nothing ->
return [] -- TODO : perhaps undefined?
ReturnStmt _ (Just e) ->
expr ce ct e >>= \v -> return [v]
WithStmt loc e s ->
expr ce ct e >> stmt ce ct s
VarDeclStmt loc ds ->
mapM_ (varDecl ce ct ) ds >> return []
FunctionStmt (_,_,FnA{fnannEnclosing=enclosing, fnannLocals=locals, fnannThisLbl=thisLbl}) id args body -> do
lbl <- idv id -- the only difference from FuncExpr is the label
formals <- mapM idv args
-- the function value creates an implicit object
let objSet = primeSet (lbl,ct) 1
-- members of Function.prototype are copied to the function object
-- TODO: object.prototype = Function
JsCFAState {jscfasBuiltins=builtins} <- lift get
prototypeSet <- builtinPrototype "Function"
subsetOf prototypeSet objSet
newValue (AFunction lbl formals locals (EqualOrd body) ce objSet thisLbl) (lbl,ct) -- acceptable newValue
return []
FunctionStmt (_,_,ann) _ _ _ ->
fail $ "(bug) Ovid.Constraints.stmt: unexpected annotation on a function"
++ show ann ++ ", function was " ++ show s
-- |The <unsafe> is necessary to handle leading holes.
onDemandJavaScript :: (MonadIO m)
=> Contour -> StringPat -> String
-> CfaT Value (StateT (JsCFAState Contour) m) ()
onDemandJavaScript ct sp sourceName = do
let concreteHtml = "<unsafe>" ++ unStringPatForOnDemandJavaScript sp ++ "</unsafe>" -- deals with holes!!
warn $ "HTML: " ++ show concreteHtml
case parseHtmlFromString sourceName concreteHtml of
Left err -> do warn $ "onDemandJavaScript: " ++ show err
liftIO $ putStrLn $ "onDemandJavaScript: " ++ show err
Right (htmlStx,_) -> do
parsedStmts <- liftIO $ getPageJavaScript htmlStx
dynamicJavaScriptLoader ct parsedStmts
dynamicJavaScriptLoader ct parsedStmts = do
globals <- getGlobals
fail "dynamicJavaScriptLoader is temporarily disabled"
{- (globals,stmts) <- makeDynamicEnv globals parsedStmts
setGlobals globals
-- (envTree,stmts) <- buildJsEnv topLevelIds parsedStmts
warn $ (show $ length stmts) ++ " statements are on-demand"
let ce = M.fromList $ map (\lbl -> (lbl,topContour)) globals
-- This is correct. Execution is _not_ in the top contour. If it were, our beautiful recursion check
-- goes out the door. Moreover, the analysis may not terminate in general, if, for example, factorial
-- made its recursive call `on-demand' and we let it happen in the top contour.
mapM (stmt ce ct) stmts
return () -}
dynamicIFrameLoader :: (MonadIO m)
=> Contour -> StringPat
-> CfaT Value (StateT (JsCFAState Contour) m) ()
dynamicIFrameLoader ct sp = do
let src = unStringPatForOnDemandJavaScript sp
result <- liftIO $ try (parseHtmlFromFile src)
case result of
Left (err::IOException) -> tell $ "Error loading script at " ++ show src ++ ":\n" ++ show err
Right (Left parseErr) -> tell $ "Parse error reading " ++ show src ++ "; " ++ show parseErr
Right (Right (parsedHtml,_)) -> do
parsedStmts <- liftIO $ getPageJavaScript parsedHtml
dynamicJavaScriptLoader ct parsedStmts
onDemandJavaScriptFromFile ct sp = do
let src = unStringPatForOnDemandJavaScript sp
-- Now, for some real input/output (and not simple mutation). Bye bye functional programming.
result <- liftIO $ try (parseJavaScriptFromFile src)
case result of
Left (err::IOException) -> tell $ "Error loading script at " ++ show src ++ ":\n" ++ show err
Right parsedStmts -> dynamicJavaScriptLoader ct parsedStmts
-- |It is safe to 'newValue ... cxt'.
builtinDispatch :: (MonadIO m)
=> [Char]
-> Contour
-> [(Label,Contour)]
-> (Label,Contour)
-> CfaT Value (StateT (JsCFAState Contour) m) ()
builtinDispatch "Object" ct _ cxt = do
newObject cxt []
return ()
builtinDispatch "eval" ct _ cxt = do
warn "trivial eval definition"
newString cxt SAny
return ()
builtinDispatch "Array" ct (this:_) cxt = do
newArray cxt []
return ()
builtinDispatch "Array.concat" ct args@(this:rest) cxt = do
arr@(AObject{aObjectProps=arrSet}) <- newArray cxt []
-- this implementation breaks ordering
let copy src = propagateTo src cxt $ \val -> case val of
AObject {aObjectX=PArray,aObjectProps=props} -> propagateTo props arrSet $ \propVal -> case propVal of
AProperty id (ValueSet vals) -> propagateProperty cxt arrSet id $ \vals' -> subsetOf vals vals'
otherwise -> warn $ "not a property: " ++ show propVal
otherwise -> warn $ "arbitrary flattening at " ++ show cxt ++ "; possible conflation"
let flatten src = propagateProperty cxt arrSet "0" $ \propSet -> flow1 src propSet $ \v ->
if isAbstractArray v then Nothing else Just v
mapM_ copy args
mapM_ flatten args
-- builtinDispatch "Array.length" ct (this ..
builtinDispatch "Array.push" ct (this:arg:_) cxt = do
propagateTo this cxt $ \val -> case val of
AObject{aObjectProps=props,aObjectX=PArray} -> propagateTo arg props $ \argVal -> do
-- we increment ix for each incoming value!
flow1 arg props (\_ -> Nothing) -- artificial dependency creation, since we have to increment stuff
ix' <- currentSetSize props
let ix = ix' - 4 -- 4 things in the array prototype
let set = primeSet cxt ix
newValue argVal set
newValue (AProperty (show ix) (ValueSet set)) props -- SAFE?
otherwise ->
warnAt ("Array.push applied to " ++ show val) (show ct)
builtinDispatch "Array.slice" ct [this,begin] cxt = do
propagateTo this cxt $ \obj -> case obj of
AObject{aObjectProps=ixs,aObjectX=PArray} -> do
AObject{aObjectProps=destIxs} <- newArray cxt [] -- each source array maps to a unique destination
propagateTo begin cxt $ \begin_ix -> case begin_ix of
ANum x -> do let ix_min = truncate x
flow1 ixs destIxs $ \prop -> case prop of
AProperty id (ValueSet vals) -> case tryInt id of
Just ix | ix >= ix_min -> Just $ AProperty (show $ ix - ix_min) (ValueSet vals)
| otherwise -> Nothing
Nothing -> Nothing
otherwise -> Nothing
otherwise -> flow1 ixs destIxs $ \prop -> case prop of
AProperty id (ValueSet vals) -> case tryInt id of
Just ix -> Just prop
Nothing -> Nothing
otherwise -> Nothing
otherwise -> warn $ "Array.slice at " ++ show cxt ++ " applied to " ++ show obj
builtinDispatch "Array.slice" ct [this,begin,end] cxt = do
propagateTo this cxt $ \obj -> case obj of
AObject{aObjectProps=ixs,aObjectX=PArray} -> do
AObject{aObjectProps=destIxs} <- newArray cxt [] -- each source array maps to a unique destination
propagateTo begin cxt $ \begin_ix -> propagateTo end cxt $ \end_ix -> case (begin_ix,end_ix) of
(ANum x,ANum y) -> do
let ix_min = truncate x
let ix_max = truncate y
flow1 ixs destIxs $ \prop -> case prop of
AProperty id (ValueSet vals) -> case tryInt id of
Just ix | ix >= ix_min && ix < ix_max -> Just $ AProperty (show $ ix - ix_min) (ValueSet vals)
| otherwise -> Nothing
Nothing -> Nothing
otherwise -> Nothing
otherwise -> flow1 ixs destIxs $ \prop -> case prop of
AProperty id (ValueSet vals) -> case tryInt id of
Just ix -> Just prop
Nothing -> Nothing
otherwise -> Nothing
otherwise -> warn $ "Array.slice at " ++ show cxt ++ " applied to " ++ show obj
-- .apply with no arguments.
builtinDispatch "Function.apply" ct [thisFn,thisObj] cxt = do
application thisFn [thisObj] ct cxt
propagateTo thisFn cxt $ \val -> case closureThis val of
Just thisLbl -> subsetOf thisObj (thisLbl,ct)
Nothing -> return ()
builtinDispatch "Function.apply" ct (thisFn:thisObj:arg:_) cxt = do
JsCFAState { jscfaOpts = opts } <- lift get
let expSrcs = cfaOptUnlimitedRecursion opts
-- applicationHook <- callHook thisFn [] ct cxt
let app formals results ce = do
let formalsCount = (length formals) - 2
-- this flows into the first formal
subsetOf thisObj (formals !! 0)
-- construct the arguments array (second formal)
arguments <- newArray cxt []
newValue arguments (formals !! 1) -- TODO: safe?
-- the arguments are in an array (an object really)
propagateTo arg cxt $ \argVal -> case argVal of
AObject{aObjectProps=props,aObjectX=PArray} -> do
propagateTo props cxt $ \elemVal -> case elemVal of
AProperty id (ValueSet vals) -> do
case tryInt id of
Just ix ->
if ix < formalsCount
then do newValue elemVal (formals !! 1) -- TODO: safe?
subsetOf vals (formals !! (ix + 2))
else return () -- warn $ "argument out of bound: " ++ id
-- arrays have various builtins that are not copied over. This
-- drops user-defined non-indexable properties too, but that
-- should be okay.
Nothing -> return ()
otherwise ->
warn $ "non-object argument (1): " ++ show elemVal
otherwise ->
warn $ "illegal argument to Function.apply: " ++ show argVal
mapM_ (\set -> subsetOf set cxt) results
propagateTo thisFn cxt $ \fnVal -> case fnVal of
AFunction{aFunctionLabel=fnLbl} | fnLbl `elem` callStack ct && (not $ isRecursionAllowed expSrcs fnLbl) -> do
warn $ "ignoring .apply: " ++ show fnLbl ++ " at " ++ show (fst cxt)
return ()
AFunction{aFunctionLabel=fnLbl,aFunctionArgs=fnArgs,
aFunctionLocals=fnLocals,aFunctionEnv=ceClosure,
aFunctionBody=(EqualOrd body)} -> do
{- #ifdef TRACE_APPLICATION
warn $ "applying " ++ show fnLbl ++ " from " ++ show (callStack ct)
++ " in " ++ show ct
#endif -}
let formals = map (\lbl -> (lbl,ct)) fnArgs
let locals = map (\lbl -> (lbl,ct)) fnLocals
let ce = M.union (M.fromList (formals ++ locals)) ceClosure
results <- {-applicationHook fnVal-} (stmt ce (newCall fnLbl ct) body)
app formals results ce
otherwise ->
warn $ "non-function value: " ++ show fnVal
return ()
builtinDispatch "addEventListener" ct (this:evtType:listener:_) cxt = do
newString cxt SAny -- cxt.1
let result = primeSet cxt 2
-- TODO: It's okay to use the contour ct since contours are abstract and don't represent any particular relationship
-- between calls--for now. However, it is a total hack, and we really need different kinds of top-level contours.
-- asyncHook ct this cxt (application listener [primeSet cxt 1] ct result) False
-- state@(JsCFAState{jscfasFinalFlows=fs}) <- lift get
-- lift $ put state{jscfasFinalFlows = (AnyString,arg):fs}
return ()
builtinDispatch "document.write" ct [this,html] cxt@(lbl,_) = do
warn $ "document.write ..."
let sourceName = case labelSource lbl of { Just s -> s ; Nothing -> "<dynamic>" }
propagateTo html cxt $ \html -> case html of
AObject{aObjectX=(PString sp)} -> onDemandJavaScript ct sp sourceName
otherwise -> warn $ "document.write applied to " ++ show html ++ " at " ++ show cxt
builtinDispatch "Element" ct [this,tag] cxt@(lbl,_) = do
let sourceName = case labelSource lbl of { Just s -> s ; Nothing -> "<dynamic>" }
-- Programmatically, the only way to access an element is using one of the document.getElement* methods, or with
-- an element object.
-- If this is a <script> element, we have to load the script. Currently, we only handle scripts that reference
-- external files (i.e. the src attribute).
propagateTo tag cxt $ \tag -> case tag of
AObject{aObjectX=(PString sp)} | lowercase (unStringPatForOnDemandJavaScript sp) == "script" ->
propagateProperty this cxt "src" $ \srcSet -> propagateTo srcSet cxt $ \src -> case src of
AObject{aObjectX=(PString sp)} -> onDemandJavaScriptFromFile ct sp
otherwise -> warn $ "builtinDispatch [" ++ show ct ++ "] : src property of a <script> tag assigned to "
++ show src
AObject{aObjectX=(PString sp)} | lowercase (unStringPatForOnDemandJavaScript sp) == "iframe" ->
propagateProperty this cxt "src" $ \srcSet -> propagateTo srcSet cxt $ \src -> case src of
AObject{aObjectX=(PString sp)} -> dynamicIFrameLoader ct sp
otherwise -> warn $ "builtinDispatch [" ++ show ct ++ "] : src property of an <iframe> tag assigned to "
++ show src
otherwise -> return ()
-- If the innerHTML attribute is assigned to, we parse it and load any JavaScript it contains.
JsCFAState {jscfasBuiltins=builtins} <- lift get
unsafePropagateProperty this cxt "innerHTML" $ \htmlSet -> propagateTo htmlSet cxt $ \html -> case html of
AObject{aObjectX=(PString sp)} -> onDemandJavaScript ct sp sourceName
UndefinedVal -> return ()
otherwise -> warn $ "Element.innerHTML assigned to " ++ show html ++ " at " ++ show htmlSet
builtinDispatch "XMLHttpRequest" ct (this:_) cxt = do
-- nothing to initialize
return ()
builtinDispatch "XMLHttpRequest.open" ct [this,method,url] cxt = do
{- #ifdef DEBUG_XHR
warn $ "(XMLHttpRequest) this.open"
propagateTo url cxt $ \val -> warn $ "(XMLHttpRequest) this.url = " ++ show val
#endif -}
propagateProperty this cxt "url" $ \urlSet -> subsetOf url urlSet
return ()
builtinDispatch "XMLHttpRequest.open" ct [this,method,url,async] cxt = do
warnAt "dropping async argument to XHR.open" (show ct)
builtinDispatch "XMLHttpRequest.open" ct [this,method,url] cxt
--
-- XMLHttpRequest.prototype.send
--
builtinDispatch "XMLHttpRequest.send" ct (this:contentArg:_) cxt = do
{- #ifdef DEBUG_XHR
warn $ "(XMLHttpRequest) this.send in " ++ show ct
#endif -}
let result = primeSet cxt 1
let handlerSet = primeSet cxt 2
-- set XMLHttpRequest.content
propagateProperty this contentArg "content" $ \contentProp -> subsetOf contentArg contentProp
-- flow a server value into responseText
unsafePropagateProperty this cxt "responseText" $ \responseText -> do
let svrSet = primeSet responseText 1
newValue (AServerVal svrSet) responseText -- acceptable use of newValue
-- the asynchronous application
-- asyncHook ct this cxt (application handlerSet [] ct result) True
-- populate the set of handler functions
propagateProperty this handlerSet "onreadystatechange" $ \rst -> subsetOf rst handlerSet
builtinDispatch "String.any" ct [this] cxt = do
newString cxt SAny
return ()
builtinDispatch "print" ct [this,val] cxt = do
propagateTo val cxt $ \v -> do
-- tell $ "----------------------------"
tell $ show v
tell $ "At: " ++ show ct
{- srcs <- sources val
tell $ "Sources:"
mapM_ (tell.show) srcs
tell $ "----------------------------" -}
newValue UndefinedVal cxt -- acceptable use of newValue
builtinDispatch "$A" ct [this,valStx] stx = do
propagateTo valStx stx $ \val -> case objectProperties val of
Nothing -> do
newArray stx []
return ()
Just propSet -> propagatePropertyOf val stx "toArray" $ \propValSet -> propagate propValSet $ \propVal ->
case propVal of
-- toArray is not defined
UndefinedVal -> do
AObject{aObjectProps=ixs} <- newArray stx []
flow1 propSet ixs $ \property -> case property of
AProperty propId _ | isJust (tryInt propId) -> Just property
| otherwise -> Nothing
otherwise -> error "$A: non-property value in a property set"
AFunction{aFunctionThis=thisLbl} -> do
subsetOf this (thisLbl,ct)
application propValSet [] ct stx
otherwise -> warn $ "$A: expected function or undefined in .toArray: " ++ show propVal
builtinDispatch "$w" ct [this,valStx] stx = do
propagateTo valStx stx $ \val -> case val of
AObject{aObjectX=(PString sp)} -> case unStringPat sp of
Just s -> do
stringPrototype <- builtinPrototype "String"
let mk (s,ix) = do
let set = primeSet stx ix
newValue (AObject (fst set) set (PString $ SConst s)) set
return set
strs <- mapM mk (zip (L.words s) [2..]) -- newArray uses 1
newArray stx strs
return ()
Nothing -> warn $ "$w applied to strange string at " ++ show stx ++ "; argument was " ++ show val
otherwise ->
warn $ "$w applied to non-string at " ++ show stx ++ "; argument was " ++ show val
builtinDispatch name ct args _ = do
warn $ "ERROR: non-existant builtin or pattern-match failure`" ++ name
++ "'; " ++ show (length args) ++ " arguments, call from " ++ show ct
initialize :: (MonadIO m)
=> CfaT Value (StateT (JsCFAState Contour) m) ()
initialize = do
JsCFAState {jscfasBuiltins=builtins} <- lift get
ctArray <- emptyContour
-- Whenever window.location is assigned to, create a navigate node off the
-- page.
case M.lookup "window" builtins of
Just lbl -> do
let windowSet = (lbl,topContour)
unsafePropagateProperty windowSet windowSet "location" $ \locSet ->
return () -- propagate locSet navigateHook
Nothing -> fail "initialize: could not find window"
| brownplt/ovid | src/Ovid/Constraints.hs | bsd-2-clause | 44,770 | 2 | 36 | 11,756 | 13,293 | 6,615 | 6,678 | -1 | -1 |
-- vim: sw=2: ts=2: set expandtab:
{-# LANGUAGE CPP, TemplateHaskell,
MultiParamTypeClasses,
FlexibleInstances,
FlexibleContexts,
OverlappingInstances,
IncoherentInstances,
OverloadedStrings,
GADTs,
NoMonomorphismRestriction,
ScopedTypeVariables
#-}
-----------------------------------------------------------------------------
--
-- Module : Unify
-- Copyright : BSD
-- License : AllRightsReserved
--
-- Maintainer : Ki Yung Ahn
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Unify where
import Syntax
import Data.List
import Data.Maybe
import Control.Monad.Trans
import Control.Monad.Error
import Control.Monad.State
import Language.LBNF.Runtime hiding (printTree)
import Parser (printTree)
import Generics.RepLib.Unify hiding (solveUnification)
import Unbound.LocallyNameless hiding (subst, Con, union)
-- uapply may cause problem becuase of subst
-- I had to inline this in mininax project
-- Let's see how it goes
#define uapply (foldl' (flip (.)) id . map (uncurry subst))
instance HasVar (Name Ki) Ki where
is_var (KVar nm) = Just nm
is_var _ = Nothing
var = KVar
instance HasVar (Name Ty) Ty where
is_var (TVar nm) = Just nm
is_var _ = Nothing
var = TVar
instance HasVar (Name Ty) Tm where
instance (Eq n, Show n, Show a, HasVar n a) => Unify n a String where
unifyStep _ = unifyStepEq
instance (Eq n, Show n, Show a, HasVar n a) => Unify n a (Name s) where
unifyStep _ = unifyStepEq
instance (Alpha n, Eq n, Show n, Alpha a, HasVar n a, Rep1 (UnifySubD n a) a) => Unify n a (Bind n a) where
unifyStep _ b1 b2
| b1 `aeq` b2 = return ()
| otherwise =
do (e1,e2) <- runFreshMT $
do { (_,e1) <- unbind b1
; (_,e2) <- unbind b2
; return (e1,e2) }
-- trace ("trace in instance Unify n a (Bind n a): " ++ show (e1,e2)) $
unifyStep undefined e1 e2
--------------------------------------------
----- maybe we don't need this
--------------------------------------------
-- instance (Eq n, Show n, HasVar n Ty) => Unify n Ty Ty where
-- unifyStep (dum :: Proxy(n,Ty)) a1 a2 =
-- -- trace ("trace 2 in instance Unify n PSUT PSUT): " ++ show (a1,a2)) $
-- case ((is_var a1) :: Maybe n, (is_var a2) :: Maybe n) of
-- (Just n1, Just n2) -> if n1 == n2
-- then return ()
-- else addSub n1 (var n2);
-- (Just n1, _) -> addSub n1 a2
-- (_, Just n2) -> addSub n2 a1
-- (_, _) -> unifyStepR1 rep1 dum a1 a2
-- where
-- addSub n t = extendSubstitution (n, t)
-- modified the Generics.Replib.Unify version to throwError rather than error
-- TODO Can be even better if we pass curret stat rather than (Ustate cs [])?
-- somehow this idea doesn't work ... [] replaced with current subst loops
-- solveUnification :: (HasVar n a, Eq n, Show n, Show a, Rep1 (UnifySubD n a) a) => [(a, a)] -> Either UnifyError [(n, a)]
solveUnification (eqs :: [(a, a)]) =
case r of Left e -> throwError e
Right _ -> return $ uSubst final
where
(r, final) = runState (runErrorT rwConstraints) (UState cs [])
cs = [(UC dict a1 a2) | (a1, a2) <- eqs]
rwConstraints :: UM n a ()
rwConstraints =
do c <- dequeueConstraint
case c of Just (UC d a1 a2) ->
do unifyStepD d (undefined :: Proxy (n, a)) a1 a2
rwConstraints
Nothing -> return ()
mgu t1 t2 = do
case solveUnification [(t1, t2)] of
Left e -> throwError (strMsg $ e ++ "\n\t"++ errstr)
Right u -> return u
where errstr = "cannot unify "++printTree t1++" and "++printTree t2
mguMany ps = do
case solveUnification ps of
Left e -> throwError (strMsg $ e ++ "\n\t" ++ errstr)
Right u -> return u
where errstr = "cannot unify \n" ++
( concat [ "\t"++printTree t1++" and "++printTree t2++"\n"
| (t1,t2)<-ps ] )
lift2 = lift . lift
getSubst = do { UState _ s <- lift get; return s }
extendSubst :: ( HasVar (Name a) a, Show a, Print a
, Rep1 (UnifySubD (Name a) a) a) =>
(Name a, a)
-> ErrorT UnifyError (State (UnificationState (Name a) a)) ()
extendSubst (x,t)
| isJust my && x < y = extendSubst (y,var x)
| isJust my && x== y = return ()
where my = is_var t
y = fromJust my
extendSubst (x,t) =
do u <- getSubst
case lookup x u of
Nothing -> extendSubstitution (x,t)
Just t' -> mapM_ extendSubst =<< mgu t t'
unify t1 t2 = -- trace ("unify ("++show t1++") ("++show t2++")") $
do u <- getSubst
mapM_ extendSubst =<< mgu (uapply u t1) (uapply u t2)
unifyMany ps = do u <- getSubst
mapM_ extendSubst =<< mguMany (map (uapply u) ps)
| kyagrd/micronax | src/Unify.hs | bsd-2-clause | 5,110 | 0 | 17 | 1,548 | 1,323 | 688 | 635 | 88 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-| Implementation of the Ganeti confd server functionality.
-}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Monitoring.Server
( main
, checkMain
, prepMain
) where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Data.ByteString.Char8 hiding (map, filter, find)
import Data.List
import qualified Data.Map as Map
import Snap.Core
import Snap.Http.Server
import qualified Text.JSON as J
import Control.Concurrent
import qualified Ganeti.BasicTypes as BT
import Ganeti.Daemon
import qualified Ganeti.DataCollectors.CPUload as CPUload
import qualified Ganeti.DataCollectors.Diskstats as Diskstats
import qualified Ganeti.DataCollectors.Drbd as Drbd
import qualified Ganeti.DataCollectors.InstStatus as InstStatus
import qualified Ganeti.DataCollectors.Lv as Lv
import Ganeti.DataCollectors.Types
import qualified Ganeti.Constants as C
import Ganeti.Runtime
-- * Types and constants definitions
-- | Type alias for checkMain results.
type CheckResult = ()
-- | Type alias for prepMain results.
type PrepResult = Config Snap ()
-- | Version of the latest supported http API.
latestAPIVersion :: Int
latestAPIVersion = C.mondLatestApiVersion
-- | A report of a data collector might be stateful or stateless.
data Report = StatelessR (IO DCReport)
| StatefulR (Maybe CollectorData -> IO DCReport)
-- | Type describing a data collector basic information
data DataCollector = DataCollector
{ dName :: String -- ^ Name of the data collector
, dCategory :: Maybe DCCategory -- ^ Category (storage, instance, ecc)
-- of the collector
, dKind :: DCKind -- ^ Kind (performance or status reporting) of
-- the data collector
, dReport :: Report -- ^ Report produced by the collector
, dUpdate :: Maybe (Maybe CollectorData -> IO CollectorData)
-- ^ Update operation for stateful collectors.
}
-- | The list of available builtin data collectors.
collectors :: [DataCollector]
collectors =
[ DataCollector Diskstats.dcName Diskstats.dcCategory Diskstats.dcKind
(StatelessR Diskstats.dcReport) Nothing
, DataCollector Drbd.dcName Drbd.dcCategory Drbd.dcKind
(StatelessR Drbd.dcReport) Nothing
, DataCollector InstStatus.dcName InstStatus.dcCategory InstStatus.dcKind
(StatelessR InstStatus.dcReport) Nothing
, DataCollector Lv.dcName Lv.dcCategory Lv.dcKind
(StatelessR Lv.dcReport) Nothing
, DataCollector CPUload.dcName CPUload.dcCategory CPUload.dcKind
(StatefulR CPUload.dcReport) (Just CPUload.dcUpdate)
]
-- * Configuration handling
-- | The default configuration for the HTTP server.
defaultHttpConf :: FilePath -> FilePath -> Config Snap ()
defaultHttpConf accessLog errorLog =
setAccessLog (ConfigFileLog accessLog) .
setCompression False .
setErrorLog (ConfigFileLog errorLog) $
setVerbose False
emptyConfig
-- * Helper functions
-- | Check function for the monitoring agent.
checkMain :: CheckFn CheckResult
checkMain _ = return $ Right ()
-- | Prepare function for monitoring agent.
prepMain :: PrepFn CheckResult PrepResult
prepMain opts _ = do
accessLog <- daemonsExtraLogFile GanetiMond AccessLog
errorLog <- daemonsExtraLogFile GanetiMond ErrorLog
return $
setPort
(maybe C.defaultMondPort fromIntegral (optPort opts))
(defaultHttpConf accessLog errorLog)
-- * Query answers
-- | Reply to the supported API version numbers query.
versionQ :: Snap ()
versionQ = writeBS . pack $ J.encode [latestAPIVersion]
-- | Version 1 of the monitoring HTTP API.
version1Api :: MVar CollectorMap -> Snap ()
version1Api mvar =
let returnNull = writeBS . pack $ J.encode J.JSNull :: Snap ()
in ifTop returnNull <|>
route
[ ("list", listHandler)
, ("report", reportHandler mvar)
]
-- | Get the JSON representation of a data collector to be used in the collector
-- list.
dcListItem :: DataCollector -> J.JSValue
dcListItem dc =
J.JSArray
[ J.showJSON $ dName dc
, maybe J.JSNull J.showJSON $ dCategory dc
, J.showJSON $ dKind dc
]
-- | Handler for returning lists.
listHandler :: Snap ()
listHandler =
dir "collectors" . writeBS . pack . J.encode $ map dcListItem collectors
-- | Handler for returning data collector reports.
reportHandler :: MVar CollectorMap -> Snap ()
reportHandler mvar =
route
[ ("all", allReports mvar)
, (":category/:collector", oneReport mvar)
] <|>
errorReport
-- | Return the report of all the available collectors.
allReports :: MVar CollectorMap -> Snap ()
allReports mvar = do
reports <- mapM (liftIO . getReport mvar) collectors
writeBS . pack . J.encode $ reports
-- | Takes the CollectorMap and a DataCollector and returns the report for this
-- collector.
getReport :: MVar CollectorMap -> DataCollector -> IO DCReport
getReport mvar collector =
case dReport collector of
StatelessR r -> r
StatefulR r -> do
colData <- getColData (dName collector) mvar
r colData
-- | Returns the data for the corresponding collector.
getColData :: String -> MVar CollectorMap -> IO (Maybe CollectorData)
getColData name mvar = do
m <- readMVar mvar
return $ Map.lookup name m
-- | Returns a category given its name.
-- If "collector" is given as the name, the collector has no category, and
-- Nothing will be returned.
catFromName :: String -> BT.Result (Maybe DCCategory)
catFromName "instance" = BT.Ok $ Just DCInstance
catFromName "storage" = BT.Ok $ Just DCStorage
catFromName "daemon" = BT.Ok $ Just DCDaemon
catFromName "hypervisor" = BT.Ok $ Just DCHypervisor
catFromName "default" = BT.Ok Nothing
catFromName _ = BT.Bad "No such category"
errorReport :: Snap ()
errorReport = do
modifyResponse $ setResponseStatus 404 "Not found"
writeBS "Unable to produce a report for the requested resource"
error404 :: Snap ()
error404 = do
modifyResponse $ setResponseStatus 404 "Not found"
writeBS "Resource not found"
-- | Return the report of one collector.
oneReport :: MVar CollectorMap -> Snap ()
oneReport mvar = do
categoryName <- maybe mzero unpack <$> getParam "category"
collectorName <- maybe mzero unpack <$> getParam "collector"
category <-
case catFromName categoryName of
BT.Ok cat -> return cat
BT.Bad msg -> fail msg
collector <-
case
find (\col -> collectorName == dName col) $
filter (\c -> category == dCategory c) collectors of
Just col -> return col
Nothing -> fail "Unable to find the requested collector"
dcr <- liftIO $ getReport mvar collector
writeBS . pack . J.encode $ dcr
-- | The function implementing the HTTP API of the monitoring agent.
monitoringApi :: MVar CollectorMap -> Snap ()
monitoringApi mvar =
ifTop versionQ <|>
dir "1" (version1Api mvar) <|>
error404
-- | The function collecting data for each data collector providing a dcUpdate
-- function.
collect :: CollectorMap -> DataCollector -> IO CollectorMap
collect m collector =
case dUpdate collector of
Nothing -> return m
Just update -> do
let name = dName collector
existing = Map.lookup name m
new_data <- update existing
return $ Map.insert name new_data m
-- | Invokes collect for each data collector.
collection :: CollectorMap -> IO CollectorMap
collection m = foldM collect m collectors
-- | The thread responsible for the periodical collection of data for each data
-- data collector.
collectord :: MVar CollectorMap -> IO ()
collectord mvar = do
m <- takeMVar mvar
m' <- collection m
putMVar mvar m'
threadDelay $ 10^(6 :: Int) * C.mondTimeInterval
collectord mvar
-- | Main function.
main :: MainFn CheckResult PrepResult
main _ _ httpConf = do
mvar <- newMVar Map.empty
_ <- forkIO $ collectord mvar
httpServe httpConf . method GET $ monitoringApi mvar
| apyrgio/snf-ganeti | src/Ganeti/Monitoring/Server.hs | bsd-2-clause | 9,228 | 0 | 14 | 1,834 | 1,874 | 957 | 917 | 165 | 3 |
{-
This is a module for reading and parsing *.csv files.
-}
module DataBuilder
( getData
, filterDataContainer
, getMatrixFromDataContainer
, getMatrixFromNumContainer
, NumContainer(..)
, Header
, DataContainer
)where
import Data.Char
import Data.List.Split
import System.IO
import Control.Exception
import Exceptions
import Matrix hiding (decreaseAll)
-- * Types
-- | Simply String containing path to a file§
type Filepath = String
type Header = String
-- | String that seperates data, in csv it is: ","
type Separator = String
-- | DataContainer is a container for headers of data and data itself
newtype DataContainer = DataContainer ([Header], Matrix String)
-- | NumContainer stores the same as DataContainer except non-numerical values
newtype NumContainer = NumContainer ([Header], Matrix Double)
-- * Instances
instance Show DataContainer where
show (DataContainer (xs, m)) = show xs ++ "\n" ++ show m
instance Show NumContainer where
show (NumContainer (xs, m)) = show xs ++ "\n" ++ show m
-- * FUNCTION INDEX
-- | Takes list of indexes of columns that are to be deleted, DataContainer with data and return NumContainer with only numeric values.
-- | If it cannot get a number out of a field it removes this one training set.
filterDataContainer :: [Int] -> DataContainer -> NumContainer
-- | Takes a path to the file, seperator that seperates data and returns DataContainer
getData :: Filepath -> Separator -> IO (DataContainer)
-- BODIES
filterDataContainer columnsToDelete (DataContainer (s, m)) = NumContainer ( deleteElements columnsToDelete s,
fmap (\(Just e) -> e) .
filterLinesHor (\s -> if s == Nothing then False else True) .
fmap getNumber .
deleteColumns columnsToDelete $ m)
getData path sep = (do
handle <- openFile path ReadMode
contents <- hGetContents handle
if isFileOK contents
then return (getDataContainer sep . lines $ contents)
else throwE "\n The file is not a valid csv file.")
`catch` handler
handler :: IOError -> IO (DataContainer)
handler e = do
throwE (show e ++ "Error while getting data.")
getMatrixFromDataContainer :: DataContainer -> Matrix String
getMatrixFromDataContainer (DataContainer (_, m)) = m
getMatrixFromNumContainer :: NumContainer -> Matrix Double
getMatrixFromNumContainer (NumContainer (_, m)) = m
--PRIVATE FUNCTIONS
isFileOK :: String -> Bool -- here we can evaluate on errors that can appear
isFileOK s
| length s == 0 = False -- when empty file
| otherwise = True
getDataContainer :: Separator -> [String] -> DataContainer
getDataContainer sep contentInLines = DataContainer (map filterHeader . splitOn sep . head $ contentInLines, transposeM $ (packM inside))
where
inside = map (\s -> splitOn sep $ s) . tail $ contentInLines
filterHeader :: Header -> Header
filterHeader s = filter (\ch -> (ch /= '\"') && (ch /= '\'')) s
deleteElements :: [Int] -> [a] -> [a]
deleteElements [] l = l
deleteElements _ [] = []
deleteElements (a:as) xs = deleteElements (decreaseAll (as)) (deleteElement a xs)
deleteElement :: Int -> [a] -> [a]
deleteElement a xs = loop a xs
where
loop _ [] = []
loop 1 (y:ys) = loop 0 ys
loop n (y:ys) = y:(loop (n-1) ys)
decreaseAll :: [Int] -> [Int]
decreaseAll [] = []
decreaseAll (x:xs) = (x-1):(decreaseAll xs)
getNumber :: String -> Maybe Double
getNumber s = case head s == '\"' || head s == '\'' || (length s == 0) ||(isLetter . head $ s) of
True -> Nothing
False -> Just $ (read s :: Double)
| kanes115/Regressions | src/DataBuilder.hs | bsd-3-clause | 4,048 | 0 | 15 | 1,215 | 1,065 | 569 | 496 | 69 | 3 |
{-
- Hacq (c) 2013 NEC Laboratories America, Inc. All rights reserved.
-
- This file is part of Hacq.
- Hacq is distributed under the 3-clause BSD license.
- See the LICENSE file for more details.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module Control.Monad.Quantum.ApproxSequence.Class (MonadApproxSequence(..), applyPrepareQubitApprox, applyGlobalPhase) where
import Control.Monad.Reader (ReaderT)
import Control.Monad.Trans (MonadTrans, lift)
import Data.Complex
import Control.Monad.Quantum.Class
class MonadQuantumBase w m => MonadApproxSequence w m | m -> w where
-- |@applyOneQubitUnitary a c d w@ applies unitary U to wire w, where U is given by the following matrix:
--
-- > a c*
-- > c d
applyOneQubitUnitary :: Complex Double -> Complex Double -> Complex Double -> w -> m ()
-- |@applyPrepareQubitApprox a b@ prepares a qubit in a state a|0>+b|1>.
applyPrepareQubitApprox :: MonadApproxSequence w m => Complex Double -> Complex Double -> m w
applyPrepareQubitApprox a b = do
w <- ancilla
applyOneQubitUnitary a b (-conjugate a) w
return w
{-# INLINABLE applyPrepareQubitApprox #-}
applyGlobalPhase :: (MonadQuantum w m, MonadApproxSequence w m) => Double -> m ()
applyGlobalPhase fraction =
handleMaybeCtrl $ \ctrl ->
case ctrl of
Nothing -> return ()
Just ctrlwire ->
applyOneQubitUnitary 1 0 (cis (2 * pi * fraction)) ctrlwire
{-# INLINABLE applyGlobalPhase #-}
-- Instance for ReaderT
instance MonadApproxSequence w m => MonadApproxSequence w (ReaderT r m) where
applyOneQubitUnitary a c d w = lift $ applyOneQubitUnitary a c d w
{-# INLINABLE applyOneQubitUnitary #-}
| ti1024/hacq | src/Control/Monad/Quantum/ApproxSequence/Class.hs | bsd-3-clause | 1,776 | 1 | 15 | 327 | 362 | 193 | 169 | 25 | 2 |
{-# language CPP #-}
-- | = Name
--
-- VK_NV_framebuffer_mixed_samples - device extension
--
-- == VK_NV_framebuffer_mixed_samples
--
-- [__Name String__]
-- @VK_NV_framebuffer_mixed_samples@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 153
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- [__Contact__]
--
-- - Jeff Bolz
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_NV_framebuffer_mixed_samples] @jeffbolznv%0A<<Here describe the issue or question you have about the VK_NV_framebuffer_mixed_samples extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2017-06-04
--
-- [__Contributors__]
--
-- - Jeff Bolz, NVIDIA
--
-- == Description
--
-- This extension allows multisample rendering with a raster and
-- depth\/stencil sample count that is larger than the color sample count.
-- Rasterization and the results of the depth and stencil tests together
-- determine the portion of a pixel that is “covered”. It can be useful to
-- evaluate coverage at a higher frequency than color samples are stored.
-- This coverage is then “reduced” to a collection of covered color
-- samples, each having an opacity value corresponding to the fraction of
-- the color sample covered. The opacity can optionally be blended into
-- individual color samples.
--
-- Rendering with fewer color samples than depth\/stencil samples greatly
-- reduces the amount of memory and bandwidth consumed by the color buffer.
-- However, converting the coverage values into opacity introduces
-- artifacts where triangles share edges and /may/ not be suitable for
-- normal triangle mesh rendering.
--
-- One expected use case for this functionality is Stencil-then-Cover path
-- rendering (similar to the OpenGL GL_NV_path_rendering extension). The
-- stencil step determines the coverage (in the stencil buffer) for an
-- entire path at the higher sample frequency, and then the cover step
-- draws the path into the lower frequency color buffer using the coverage
-- information to antialias path edges. With this two-step process,
-- internal edges are fully covered when antialiasing is applied and there
-- is no corruption on these edges.
--
-- The key features of this extension are:
--
-- - It allows render pass and framebuffer objects to be created where
-- the number of samples in the depth\/stencil attachment in a subpass
-- is a multiple of the number of samples in the color attachments in
-- the subpass.
--
-- - A coverage reduction step is added to Fragment Operations which
-- converts a set of covered raster\/depth\/stencil samples to a set of
-- color samples that perform blending and color writes. The coverage
-- reduction step also includes an optional coverage modulation step,
-- multiplying color values by a fractional opacity corresponding to
-- the number of associated raster\/depth\/stencil samples covered.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core10.Pipeline.PipelineMultisampleStateCreateInfo':
--
-- - 'PipelineCoverageModulationStateCreateInfoNV'
--
-- == New Enums
--
-- - 'CoverageModulationModeNV'
--
-- == New Bitmasks
--
-- - 'PipelineCoverageModulationStateCreateFlagsNV'
--
-- == New Enum Constants
--
-- - 'NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME'
--
-- - 'NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV'
--
-- == Version History
--
-- - Revision 1, 2017-06-04 (Jeff Bolz)
--
-- - Internal revisions
--
-- == See Also
--
-- 'CoverageModulationModeNV',
-- 'PipelineCoverageModulationStateCreateFlagsNV',
-- 'PipelineCoverageModulationStateCreateInfoNV'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_NV_framebuffer_mixed_samples Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_NV_framebuffer_mixed_samples ( PipelineCoverageModulationStateCreateInfoNV(..)
, PipelineCoverageModulationStateCreateFlagsNV(..)
, CoverageModulationModeNV( COVERAGE_MODULATION_MODE_NONE_NV
, COVERAGE_MODULATION_MODE_RGB_NV
, COVERAGE_MODULATION_MODE_ALPHA_NV
, COVERAGE_MODULATION_MODE_RGBA_NV
, ..
)
, NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION
, pattern NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION
, NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME
, pattern NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import Control.Monad (unless)
import Foreign.Marshal.Alloc (allocaBytes)
import GHC.IO (throwIO)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import GHC.Show (showString)
import GHC.Show (showsPrec)
import Numeric (showHex)
import Data.Coerce (coerce)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Data.Vector (generateM)
import qualified Data.Vector (imapM_)
import qualified Data.Vector (length)
import qualified Data.Vector (null)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero)
import Vulkan.Zero (Zero(..))
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.C.Types (CFloat)
import Foreign.C.Types (CFloat(..))
import Foreign.C.Types (CFloat(CFloat))
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Data.Int (Int32)
import Foreign.Ptr (Ptr)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Data.Word (Word32)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.CStruct.Utils (advancePtrBytes)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.FundamentalTypes (Flags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV))
-- | VkPipelineCoverageModulationStateCreateInfoNV - Structure specifying
-- parameters controlling coverage modulation
--
-- = Description
--
-- If @coverageModulationTableEnable@ is
-- 'Vulkan.Core10.FundamentalTypes.FALSE', then for each color sample the
-- associated bits of the pixel coverage are counted and divided by the
-- number of associated bits to produce a modulation factor R in the range
-- (0,1] (a value of zero would have been killed due to a color coverage of
-- 0). Specifically:
--
-- - N = value of @rasterizationSamples@
--
-- - M = value of 'Vulkan.Core10.Pass.AttachmentDescription'::@samples@
-- for any color attachments
--
-- - R = popcount(associated coverage bits) \/ (N \/ M)
--
-- If @coverageModulationTableEnable@ is
-- 'Vulkan.Core10.FundamentalTypes.TRUE', the value R is computed using a
-- programmable lookup table. The lookup table has N \/ M elements, and the
-- element of the table is selected by:
--
-- - R = @pCoverageModulationTable@[popcount(associated coverage bits)-1]
--
-- Note that the table does not have an entry for popcount(associated
-- coverage bits) = 0, because such samples would have been killed.
--
-- The values of @pCoverageModulationTable@ /may/ be rounded to an
-- implementation-dependent precision, which is at least as fine as 1 \/ N,
-- and clamped to [0,1].
--
-- For each color attachment with a floating point or normalized color
-- format, each fragment output color value is replicated to M values which
-- /can/ each be modulated (multiplied) by that color sample’s associated
-- value of R. Which components are modulated is controlled by
-- @coverageModulationMode@.
--
-- If this structure is not included in the @pNext@ chain, it is as if
-- @coverageModulationMode@ is 'COVERAGE_MODULATION_MODE_NONE_NV'.
--
-- If the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#fragops-coverage-reduction coverage reduction mode>
-- is
-- 'Vulkan.Extensions.VK_NV_coverage_reduction_mode.COVERAGE_REDUCTION_MODE_TRUNCATE_NV',
-- each color sample is associated with only a single coverage sample. In
-- this case, it is as if @coverageModulationMode@ is
-- 'COVERAGE_MODULATION_MODE_NONE_NV'.
--
-- == Valid Usage
--
-- - #VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405#
-- If @coverageModulationTableEnable@ is
-- 'Vulkan.Core10.FundamentalTypes.TRUE',
-- @coverageModulationTableCount@ /must/ be equal to the number of
-- rasterization samples divided by the number of color samples in the
-- subpass
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkPipelineCoverageModulationStateCreateInfoNV-sType-sType#
-- @sType@ /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV'
--
-- - #VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask#
-- @flags@ /must/ be @0@
--
-- - #VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter#
-- @coverageModulationMode@ /must/ be a valid
-- 'CoverageModulationModeNV' value
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_framebuffer_mixed_samples VK_NV_framebuffer_mixed_samples>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32', 'CoverageModulationModeNV',
-- 'PipelineCoverageModulationStateCreateFlagsNV',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PipelineCoverageModulationStateCreateInfoNV = PipelineCoverageModulationStateCreateInfoNV
{ -- | @flags@ is reserved for future use.
flags :: PipelineCoverageModulationStateCreateFlagsNV
, -- | @coverageModulationMode@ is a 'CoverageModulationModeNV' value
-- controlling which color components are modulated.
coverageModulationMode :: CoverageModulationModeNV
, -- | @coverageModulationTableEnable@ controls whether the modulation factor
-- is looked up from a table in @pCoverageModulationTable@.
coverageModulationTableEnable :: Bool
, -- | @coverageModulationTableCount@ is the number of elements in
-- @pCoverageModulationTable@.
coverageModulationTableCount :: Word32
, -- | @pCoverageModulationTable@ is a table of modulation factors containing a
-- value for each number of covered samples.
coverageModulationTable :: Vector Float
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineCoverageModulationStateCreateInfoNV)
#endif
deriving instance Show PipelineCoverageModulationStateCreateInfoNV
instance ToCStruct PipelineCoverageModulationStateCreateInfoNV where
withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineCoverageModulationStateCreateInfoNV{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
lift $ poke ((p `plusPtr` 16 :: Ptr PipelineCoverageModulationStateCreateFlagsNV)) (flags)
lift $ poke ((p `plusPtr` 20 :: Ptr CoverageModulationModeNV)) (coverageModulationMode)
lift $ poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (coverageModulationTableEnable))
let pCoverageModulationTableLength = Data.Vector.length $ (coverageModulationTable)
coverageModulationTableCount'' <- lift $ if (coverageModulationTableCount) == 0
then pure $ fromIntegral pCoverageModulationTableLength
else do
unless (fromIntegral pCoverageModulationTableLength == (coverageModulationTableCount) || pCoverageModulationTableLength == 0) $
throwIO $ IOError Nothing InvalidArgument "" "pCoverageModulationTable must be empty or have 'coverageModulationTableCount' elements" Nothing Nothing
pure (coverageModulationTableCount)
lift $ poke ((p `plusPtr` 28 :: Ptr Word32)) (coverageModulationTableCount'')
pCoverageModulationTable'' <- if Data.Vector.null (coverageModulationTable)
then pure nullPtr
else do
pPCoverageModulationTable <- ContT $ allocaBytes @CFloat (((Data.Vector.length (coverageModulationTable))) * 4)
lift $ Data.Vector.imapM_ (\i e -> poke (pPCoverageModulationTable `plusPtr` (4 * (i)) :: Ptr CFloat) (CFloat (e))) ((coverageModulationTable))
pure $ pPCoverageModulationTable
lift $ poke ((p `plusPtr` 32 :: Ptr (Ptr CFloat))) pCoverageModulationTable''
lift $ f
cStructSize = 40
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 20 :: Ptr CoverageModulationModeNV)) (zero)
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PipelineCoverageModulationStateCreateInfoNV where
peekCStruct p = do
flags <- peek @PipelineCoverageModulationStateCreateFlagsNV ((p `plusPtr` 16 :: Ptr PipelineCoverageModulationStateCreateFlagsNV))
coverageModulationMode <- peek @CoverageModulationModeNV ((p `plusPtr` 20 :: Ptr CoverageModulationModeNV))
coverageModulationTableEnable <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32))
coverageModulationTableCount <- peek @Word32 ((p `plusPtr` 28 :: Ptr Word32))
pCoverageModulationTable <- peek @(Ptr CFloat) ((p `plusPtr` 32 :: Ptr (Ptr CFloat)))
let pCoverageModulationTableLength = if pCoverageModulationTable == nullPtr then 0 else (fromIntegral coverageModulationTableCount)
pCoverageModulationTable' <- generateM pCoverageModulationTableLength (\i -> do
pCoverageModulationTableElem <- peek @CFloat ((pCoverageModulationTable `advancePtrBytes` (4 * (i)) :: Ptr CFloat))
pure $ coerce @CFloat @Float pCoverageModulationTableElem)
pure $ PipelineCoverageModulationStateCreateInfoNV
flags coverageModulationMode (bool32ToBool coverageModulationTableEnable) coverageModulationTableCount pCoverageModulationTable'
instance Zero PipelineCoverageModulationStateCreateInfoNV where
zero = PipelineCoverageModulationStateCreateInfoNV
zero
zero
zero
zero
mempty
-- | VkPipelineCoverageModulationStateCreateFlagsNV - Reserved for future use
--
-- = Description
--
-- 'PipelineCoverageModulationStateCreateFlagsNV' is a bitmask type for
-- setting a mask, but is currently reserved for future use.
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_framebuffer_mixed_samples VK_NV_framebuffer_mixed_samples>,
-- 'PipelineCoverageModulationStateCreateInfoNV'
newtype PipelineCoverageModulationStateCreateFlagsNV = PipelineCoverageModulationStateCreateFlagsNV Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
conNamePipelineCoverageModulationStateCreateFlagsNV :: String
conNamePipelineCoverageModulationStateCreateFlagsNV = "PipelineCoverageModulationStateCreateFlagsNV"
enumPrefixPipelineCoverageModulationStateCreateFlagsNV :: String
enumPrefixPipelineCoverageModulationStateCreateFlagsNV = ""
showTablePipelineCoverageModulationStateCreateFlagsNV :: [(PipelineCoverageModulationStateCreateFlagsNV, String)]
showTablePipelineCoverageModulationStateCreateFlagsNV = []
instance Show PipelineCoverageModulationStateCreateFlagsNV where
showsPrec = enumShowsPrec enumPrefixPipelineCoverageModulationStateCreateFlagsNV
showTablePipelineCoverageModulationStateCreateFlagsNV
conNamePipelineCoverageModulationStateCreateFlagsNV
(\(PipelineCoverageModulationStateCreateFlagsNV x) -> x)
(\x -> showString "0x" . showHex x)
instance Read PipelineCoverageModulationStateCreateFlagsNV where
readPrec = enumReadPrec enumPrefixPipelineCoverageModulationStateCreateFlagsNV
showTablePipelineCoverageModulationStateCreateFlagsNV
conNamePipelineCoverageModulationStateCreateFlagsNV
PipelineCoverageModulationStateCreateFlagsNV
-- | VkCoverageModulationModeNV - Specify the coverage modulation mode
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_framebuffer_mixed_samples VK_NV_framebuffer_mixed_samples>,
-- 'PipelineCoverageModulationStateCreateInfoNV'
newtype CoverageModulationModeNV = CoverageModulationModeNV Int32
deriving newtype (Eq, Ord, Storable, Zero)
-- | 'COVERAGE_MODULATION_MODE_NONE_NV' specifies that no components are
-- multiplied by the modulation factor.
pattern COVERAGE_MODULATION_MODE_NONE_NV = CoverageModulationModeNV 0
-- | 'COVERAGE_MODULATION_MODE_RGB_NV' specifies that the red, green, and
-- blue components are multiplied by the modulation factor.
pattern COVERAGE_MODULATION_MODE_RGB_NV = CoverageModulationModeNV 1
-- | 'COVERAGE_MODULATION_MODE_ALPHA_NV' specifies that the alpha component
-- is multiplied by the modulation factor.
pattern COVERAGE_MODULATION_MODE_ALPHA_NV = CoverageModulationModeNV 2
-- | 'COVERAGE_MODULATION_MODE_RGBA_NV' specifies that all components are
-- multiplied by the modulation factor.
pattern COVERAGE_MODULATION_MODE_RGBA_NV = CoverageModulationModeNV 3
{-# complete COVERAGE_MODULATION_MODE_NONE_NV,
COVERAGE_MODULATION_MODE_RGB_NV,
COVERAGE_MODULATION_MODE_ALPHA_NV,
COVERAGE_MODULATION_MODE_RGBA_NV :: CoverageModulationModeNV #-}
conNameCoverageModulationModeNV :: String
conNameCoverageModulationModeNV = "CoverageModulationModeNV"
enumPrefixCoverageModulationModeNV :: String
enumPrefixCoverageModulationModeNV = "COVERAGE_MODULATION_MODE_"
showTableCoverageModulationModeNV :: [(CoverageModulationModeNV, String)]
showTableCoverageModulationModeNV =
[ (COVERAGE_MODULATION_MODE_NONE_NV , "NONE_NV")
, (COVERAGE_MODULATION_MODE_RGB_NV , "RGB_NV")
, (COVERAGE_MODULATION_MODE_ALPHA_NV, "ALPHA_NV")
, (COVERAGE_MODULATION_MODE_RGBA_NV , "RGBA_NV")
]
instance Show CoverageModulationModeNV where
showsPrec = enumShowsPrec enumPrefixCoverageModulationModeNV
showTableCoverageModulationModeNV
conNameCoverageModulationModeNV
(\(CoverageModulationModeNV x) -> x)
(showsPrec 11)
instance Read CoverageModulationModeNV where
readPrec = enumReadPrec enumPrefixCoverageModulationModeNV
showTableCoverageModulationModeNV
conNameCoverageModulationModeNV
CoverageModulationModeNV
type NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION"
pattern NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION :: forall a . Integral a => a
pattern NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION = 1
type NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME = "VK_NV_framebuffer_mixed_samples"
-- No documentation found for TopLevel "VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME"
pattern NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME = "VK_NV_framebuffer_mixed_samples"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_NV_framebuffer_mixed_samples.hs | bsd-3-clause | 20,822 | 1 | 22 | 3,770 | 2,461 | 1,492 | 969 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
module Network.Mail.Locutoria.Cli.Keymap where
import Control.Lens hiding (lens)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Default (Default, def)
import Graphics.Vty.Input (Key(..), Modifier(..))
import Network.Mail.Locutoria.Event
import Network.Mail.Locutoria.State
import Network.Mail.Locutoria.View
data KeyBindings = KeyBindings
{ _keymapGlobal :: Keymap
, _keymapChannelView :: Keymap
, _keymapConversationView :: Keymap
}
type Keymap = Map KeyCombo Event
type KeyCombo = (Key, [Modifier])
handleKey :: KeyBindings -> Key -> [Modifier] -> State -> Maybe Event
handleKey kb key mods st = case st^.stView of
Root -> tryKeymaps [_keymapGlobal]
ComposeReply _ _ -> tryKeymaps [_keymapGlobal]
ShowChannel _ _ -> tryKeymaps [_keymapChannelView, _keymapGlobal]
ShowConversation _ _ _ -> tryKeymaps [_keymapConversationView, _keymapGlobal]
ShowQueue -> tryKeymaps [_keymapGlobal]
Quit -> tryKeymaps [_keymapGlobal]
where
tryKeymaps [] = Nothing
tryKeymaps (m:ms) = case Map.lookup (key, mods) (m kb) of
Just e -> Just e
Nothing -> tryKeymaps ms
instance Default KeyBindings where
def = KeyBindings defKeymapGlobal defKeymapChannelView defKeymapConversationView
defKeymapGlobal :: Map KeyCombo Event
defKeymapGlobal = Map.fromList
[ ((KChar 'q', []), quit)
, ((KChar '@', []), refresh)
, ((KChar 'p', [MCtrl]), prevChannel)
, ((KChar 'n', [MCtrl]), nextChannel)
]
defKeymapChannelView :: Map KeyCombo Event
defKeymapChannelView = Map.fromList
[ ((KChar 'r', []), composeReply)
, ((KEnter, []), showConv)
, ((KChar 'j', []), nextConv)
, ((KChar 'k', []), prevConv)
, ((KChar 'g', []), setConv 0)
, ((KChar 'G', []), setConv (-1))
]
defKeymapConversationView :: Map KeyCombo Event
defKeymapConversationView = Map.fromList
[ ((KChar 'r', []), composeReply)
, ((KChar 'j', []), nextMsg)
, ((KChar 'k', []), prevMsg)
, ((KChar 'g', []), setMsg 0)
, ((KChar 'G', []), setMsg (-1))
]
| hallettj/locutoria | Network/Mail/Locutoria/Cli/Keymap.hs | bsd-3-clause | 2,183 | 0 | 10 | 510 | 764 | 436 | 328 | 51 | 8 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE CPP #-}
-- |
-- Module : Text.Syntax.Poly.Combinators
-- Copyright : 2010-11 University of Marburg, 2012 Kei Hibino
-- License : BSD3
--
-- Maintainer : ex8k.hibino@gmail.com
-- Stability : experimental
-- Portability : unknown
--
-- This module contains combinators for classes defined in "Text.Syntax.Poly.Classes".
module Text.Syntax.Poly.Combinators (
-- * Lexemes
this,
list,
-- * Repetition
none,
many,
some,
replicate,
sepBy, sepBy1,
chainl1,
count,
-- * Skipping
skipMany,
skipSome,
-- * Sequencing
(*>),
(<*),
between,
-- * Alternation
(<+>), choice,
optional, bool,
(<$?>), (<?$>),
-- * Printing
format
) where
#if __GLASGOW_HASKELL__ < 710
import Prelude hiding (foldl, succ, replicate, (.))
#else
import Prelude hiding (foldl, succ, replicate, (.), (<$>), (<*>), (<*), (*>))
#endif
import Control.Isomorphism.Partial.Ext
(nothing, just, nil, cons, left, right, foldl,
(.), Iso, (<$>), inverse, element, unit, commute, ignore,
mayAppend, mayPrepend, succ)
import Text.Syntax.Poly.Class
((<*>), (<|>), empty,
AbstractSyntax(syntax), Syntax(token))
-- | 'none' parses\/prints empty tokens stream consume\/produces a empty list.
none :: AbstractSyntax delta => delta [alpha]
none = nil <$> syntax ()
-- | The 'many' combinator is used to repeat syntax.
-- @many p@ repeats the passed syntax @p@
-- zero or more than zero times.
many :: AbstractSyntax delta => delta alpha -> delta [alpha]
many p = some p <|> none
-- | The 'some' combinator is used to repeat syntax.
-- @some p@ repeats the passed syntax @p@
-- more than zero times.
some :: AbstractSyntax delta => delta alpha -> delta [alpha]
some p = cons <$> p <*> many p
-- | The 'replicate' combinator is used to repeat syntax.
-- @replicate n p@ repeats the passwd syntax @p@
-- @n@ times.
replicate :: AbstractSyntax delta => Int -> delta alpha -> delta [alpha]
replicate n' p = rec n' where
rec n | n <= 0 = none
| otherwise = cons <$> p <*> rec (n - 1)
infixl 4 <+>
-- | The '<+>' combinator choose one of two syntax.
(<+>) :: AbstractSyntax delta => delta alpha -> delta beta -> delta (Either alpha beta)
p <+> q = (left <$> p) <|> (right <$> q)
-- | The 'this' combinator parses\/prints a fixed token
this :: (Syntax tok delta, Eq tok) => tok -> delta ()
this t = inverse (element t) <$> token
-- | The 'list' combinator parses\/prints a fixed token list and consumes\/produces a unit value.
list :: (Syntax tok delta, Eq tok) => [tok] -> delta ()
list [] = syntax ()
list (c:cs) = inverse (element ((), ()))
<$> this c
<*> list cs
-- list cs = foldr
-- (\ c -> (inverse (element ((), ())) <$>) . (this c <*>))
-- (syntax ())
-- cs
-- | This variant of '<*>' ignores its left result.
-- In contrast to its counterpart derived from the `Applicative` class, the ignored
-- parts have type `delta ()` rather than `delta beta` because otherwise information relevant
-- for pretty-printing would be lost.
(*>) :: AbstractSyntax delta => delta () -> delta alpha -> delta alpha
p *> q = inverse unit . commute <$> p <*> q
-- | This variant of '<*>' ignores its right result.
-- In contrast to its counterpart derived from the `Applicative` class, the ignored
-- parts have type `delta ()` rather than `delta beta` because otherwise information relevant
-- for pretty-printing would be lost.
(<*) :: AbstractSyntax delta => delta alpha -> delta () -> delta alpha
p <* q = inverse unit <$> p <*> q
infixl 7 *>, <*
-- | The 'between' function combines '*>' and '<*' in the obvious way.
between :: AbstractSyntax delta => delta () -> delta () -> delta alpha -> delta alpha
between p q r = p *> r <* q
-- | The 'chainl1' combinator is used to parse a
-- left-associative chain of infix operators.
chainl1 :: AbstractSyntax delta =>
delta alpha -> delta beta -> Iso (alpha, (beta, alpha)) alpha -> delta alpha
chainl1 arg op f
= foldl f <$> arg <*> many (op <*> arg)
-- | The 'count' combinator counts fixed syntax.
count :: (Eq beta, Enum beta, AbstractSyntax delta) => delta () -> delta beta
count p = succ <$> p *> count p <|> syntax (toEnum 0)
-- | The @skipMany p@ parse the passed syntax @p@
-- zero or more than zero times, and print nothing.
skipMany :: AbstractSyntax delta => delta alpha -> delta ()
skipMany p = ignore [] <$> many p
-- | The @skipSome v p@ parse the passed syntax @p@
-- more than zero times, and print @p@.
skipSome :: AbstractSyntax delta => delta alpha -> delta alpha
skipSome p = p <* skipMany p
-- | 'choice' a syntax from list.
choice :: AbstractSyntax delta => [delta alpha] -> delta alpha
choice (s:ss) = s <|> choice ss
choice [] = empty
-- | The 'optional' combinator may parse \/ print passed syntax.
optional :: AbstractSyntax delta => delta alpha -> delta (Maybe alpha)
optional x = just <$> x <|> nothing <$> syntax ()
-- | The 'bool' combinator parse \/ print passed syntax or not.
bool :: AbstractSyntax delta => delta () -> delta Bool
bool x = x *> syntax True <|> syntax False
-- | The 'sepBy' combinator separates syntax into delimited list.
-- @sepBy p d@ is @p@ list syntax delimited by @d@ syntax.
sepBy :: AbstractSyntax delta => delta alpha -> delta () -> delta [alpha]
sepBy x sep
= x `sepBy1` sep
<|> none
-- | The 'sepBy1' combinator separates syntax into delimited non-empty list.
-- @sepBy p d@ is @p@ list syntax delimited by @d@ syntax.
sepBy1 :: AbstractSyntax delta => delta alpha -> delta () -> delta [alpha]
sepBy1 x sep = cons <$> x <*> many (sep *> x)
-- | May append not to repeat prefix syntax.
(<$?>) :: AbstractSyntax delta => Iso (a, b) a -> delta (a, Maybe b) -> delta a
cf <$?> pair = mayAppend cf <$> pair
-- | May prepend not to repeat suffix syntax.
(<?$>) :: AbstractSyntax delta => Iso (a, b) b -> delta (Maybe a, b) -> delta b
cf <?$> pair = mayPrepend cf <$> pair
infix 5 <$?>, <?$>
-- | The 'format' combinator just print passed tokens
-- or may parse passed tokens.
-- This is useful in cases when just formatting with indents.
format :: (Syntax tok delta, Eq tok) => [tok] -> delta ()
format tks = ignore (Just ()) <$> optional (list tks)
| schernichkin/haskell-invertible-syntax-poly | src/Text/Syntax/Poly/Combinators.hs | bsd-3-clause | 6,253 | 0 | 11 | 1,320 | 1,596 | 857 | 739 | 86 | 1 |
{-
(c) The AQUA Project, Glasgow University, 1994-1998
\section[TysPrim]{Wired-in knowledge about primitive types}
-}
{-# LANGUAGE CPP #-}
-- | This module defines TyCons that can't be expressed in Haskell.
-- They are all, therefore, wired-in TyCons. C.f module TysWiredIn
module TysPrim(
mkPrimTyConName, -- For implicit parameters in TysWiredIn only
mkTemplateKindVars, mkTemplateTyVars, mkTemplateTyVarsFrom,
mkTemplateKiTyVars,
mkTemplateTyConBinders, mkTemplateKindTyConBinders,
mkTemplateAnonTyConBinders,
alphaTyVars, alphaTyVar, betaTyVar, gammaTyVar, deltaTyVar,
alphaTys, alphaTy, betaTy, gammaTy, deltaTy,
runtimeRep1TyVar, runtimeRep2TyVar, runtimeRep1Ty, runtimeRep2Ty,
openAlphaTy, openBetaTy, openAlphaTyVar, openBetaTyVar,
-- Kind constructors...
tYPETyConName, unliftedTypeKindTyConName,
-- Kinds
tYPE,
funTyCon, funTyConName,
primTyCons,
charPrimTyCon, charPrimTy,
intPrimTyCon, intPrimTy,
wordPrimTyCon, wordPrimTy,
addrPrimTyCon, addrPrimTy,
floatPrimTyCon, floatPrimTy,
doublePrimTyCon, doublePrimTy,
voidPrimTyCon, voidPrimTy,
statePrimTyCon, mkStatePrimTy,
realWorldTyCon, realWorldTy, realWorldStatePrimTy,
proxyPrimTyCon, mkProxyPrimTy,
arrayPrimTyCon, mkArrayPrimTy,
byteArrayPrimTyCon, byteArrayPrimTy,
arrayArrayPrimTyCon, mkArrayArrayPrimTy,
smallArrayPrimTyCon, mkSmallArrayPrimTy,
mutableArrayPrimTyCon, mkMutableArrayPrimTy,
mutableByteArrayPrimTyCon, mkMutableByteArrayPrimTy,
mutableArrayArrayPrimTyCon, mkMutableArrayArrayPrimTy,
smallMutableArrayPrimTyCon, mkSmallMutableArrayPrimTy,
mutVarPrimTyCon, mkMutVarPrimTy,
mVarPrimTyCon, mkMVarPrimTy,
tVarPrimTyCon, mkTVarPrimTy,
stablePtrPrimTyCon, mkStablePtrPrimTy,
stableNamePrimTyCon, mkStableNamePrimTy,
compactPrimTyCon, compactPrimTy,
bcoPrimTyCon, bcoPrimTy,
weakPrimTyCon, mkWeakPrimTy,
threadIdPrimTyCon, threadIdPrimTy,
int32PrimTyCon, int32PrimTy,
word32PrimTyCon, word32PrimTy,
int64PrimTyCon, int64PrimTy,
word64PrimTyCon, word64PrimTy,
eqPrimTyCon, -- ty1 ~# ty2
eqReprPrimTyCon, -- ty1 ~R# ty2 (at role Representational)
eqPhantPrimTyCon, -- ty1 ~P# ty2 (at role Phantom)
-- * SIMD
#include "primop-vector-tys-exports.hs-incl"
) where
#include "HsVersions.h"
import {-# SOURCE #-} TysWiredIn
( runtimeRepTy, liftedTypeKind
, vecRepDataConTyCon, ptrRepUnliftedDataConTyCon
, voidRepDataConTy, intRepDataConTy
, wordRepDataConTy, int64RepDataConTy, word64RepDataConTy, addrRepDataConTy
, floatRepDataConTy, doubleRepDataConTy
, vec2DataConTy, vec4DataConTy, vec8DataConTy, vec16DataConTy, vec32DataConTy
, vec64DataConTy
, int8ElemRepDataConTy, int16ElemRepDataConTy, int32ElemRepDataConTy
, int64ElemRepDataConTy, word8ElemRepDataConTy, word16ElemRepDataConTy
, word32ElemRepDataConTy, word64ElemRepDataConTy, floatElemRepDataConTy
, doubleElemRepDataConTy )
import Var ( TyVar, mkTyVar )
import Name
import TyCon
import SrcLoc
import Unique
import PrelNames
import FastString
import Outputable
import TyCoRep -- Doesn't need special access, but this is easier to avoid
-- import loops which show up if you import Type instead
import Data.Char
{-
************************************************************************
* *
\subsection{Primitive type constructors}
* *
************************************************************************
-}
primTyCons :: [TyCon]
primTyCons
= [ addrPrimTyCon
, arrayPrimTyCon
, byteArrayPrimTyCon
, arrayArrayPrimTyCon
, smallArrayPrimTyCon
, charPrimTyCon
, doublePrimTyCon
, floatPrimTyCon
, intPrimTyCon
, int32PrimTyCon
, int64PrimTyCon
, bcoPrimTyCon
, weakPrimTyCon
, mutableArrayPrimTyCon
, mutableByteArrayPrimTyCon
, mutableArrayArrayPrimTyCon
, smallMutableArrayPrimTyCon
, mVarPrimTyCon
, tVarPrimTyCon
, mutVarPrimTyCon
, realWorldTyCon
, stablePtrPrimTyCon
, stableNamePrimTyCon
, compactPrimTyCon
, statePrimTyCon
, voidPrimTyCon
, proxyPrimTyCon
, threadIdPrimTyCon
, wordPrimTyCon
, word32PrimTyCon
, word64PrimTyCon
, eqPrimTyCon
, eqReprPrimTyCon
, eqPhantPrimTyCon
, unliftedTypeKindTyCon
, tYPETyCon
#include "primop-vector-tycons.hs-incl"
]
mkPrimTc :: FastString -> Unique -> TyCon -> Name
mkPrimTc fs unique tycon
= mkWiredInName gHC_PRIM (mkTcOccFS fs)
unique
(ATyCon tycon) -- Relevant TyCon
UserSyntax
mkBuiltInPrimTc :: FastString -> Unique -> TyCon -> Name
mkBuiltInPrimTc fs unique tycon
= mkWiredInName gHC_PRIM (mkTcOccFS fs)
unique
(ATyCon tycon) -- Relevant TyCon
BuiltInSyntax
charPrimTyConName, intPrimTyConName, int32PrimTyConName, int64PrimTyConName, wordPrimTyConName, word32PrimTyConName, word64PrimTyConName, addrPrimTyConName, floatPrimTyConName, doublePrimTyConName, statePrimTyConName, proxyPrimTyConName, realWorldTyConName, arrayPrimTyConName, arrayArrayPrimTyConName, smallArrayPrimTyConName, byteArrayPrimTyConName, mutableArrayPrimTyConName, mutableByteArrayPrimTyConName, mutableArrayArrayPrimTyConName, smallMutableArrayPrimTyConName, mutVarPrimTyConName, mVarPrimTyConName, tVarPrimTyConName, stablePtrPrimTyConName, stableNamePrimTyConName, compactPrimTyConName, bcoPrimTyConName, weakPrimTyConName, threadIdPrimTyConName, eqPrimTyConName, eqReprPrimTyConName, eqPhantPrimTyConName, voidPrimTyConName :: Name
charPrimTyConName = mkPrimTc (fsLit "Char#") charPrimTyConKey charPrimTyCon
intPrimTyConName = mkPrimTc (fsLit "Int#") intPrimTyConKey intPrimTyCon
int32PrimTyConName = mkPrimTc (fsLit "Int32#") int32PrimTyConKey int32PrimTyCon
int64PrimTyConName = mkPrimTc (fsLit "Int64#") int64PrimTyConKey int64PrimTyCon
wordPrimTyConName = mkPrimTc (fsLit "Word#") wordPrimTyConKey wordPrimTyCon
word32PrimTyConName = mkPrimTc (fsLit "Word32#") word32PrimTyConKey word32PrimTyCon
word64PrimTyConName = mkPrimTc (fsLit "Word64#") word64PrimTyConKey word64PrimTyCon
addrPrimTyConName = mkPrimTc (fsLit "Addr#") addrPrimTyConKey addrPrimTyCon
floatPrimTyConName = mkPrimTc (fsLit "Float#") floatPrimTyConKey floatPrimTyCon
doublePrimTyConName = mkPrimTc (fsLit "Double#") doublePrimTyConKey doublePrimTyCon
statePrimTyConName = mkPrimTc (fsLit "State#") statePrimTyConKey statePrimTyCon
voidPrimTyConName = mkPrimTc (fsLit "Void#") voidPrimTyConKey voidPrimTyCon
proxyPrimTyConName = mkPrimTc (fsLit "Proxy#") proxyPrimTyConKey proxyPrimTyCon
eqPrimTyConName = mkPrimTc (fsLit "~#") eqPrimTyConKey eqPrimTyCon
eqReprPrimTyConName = mkBuiltInPrimTc (fsLit "~R#") eqReprPrimTyConKey eqReprPrimTyCon
eqPhantPrimTyConName = mkBuiltInPrimTc (fsLit "~P#") eqPhantPrimTyConKey eqPhantPrimTyCon
realWorldTyConName = mkPrimTc (fsLit "RealWorld") realWorldTyConKey realWorldTyCon
arrayPrimTyConName = mkPrimTc (fsLit "Array#") arrayPrimTyConKey arrayPrimTyCon
byteArrayPrimTyConName = mkPrimTc (fsLit "ByteArray#") byteArrayPrimTyConKey byteArrayPrimTyCon
arrayArrayPrimTyConName = mkPrimTc (fsLit "ArrayArray#") arrayArrayPrimTyConKey arrayArrayPrimTyCon
smallArrayPrimTyConName = mkPrimTc (fsLit "SmallArray#") smallArrayPrimTyConKey smallArrayPrimTyCon
mutableArrayPrimTyConName = mkPrimTc (fsLit "MutableArray#") mutableArrayPrimTyConKey mutableArrayPrimTyCon
mutableByteArrayPrimTyConName = mkPrimTc (fsLit "MutableByteArray#") mutableByteArrayPrimTyConKey mutableByteArrayPrimTyCon
mutableArrayArrayPrimTyConName= mkPrimTc (fsLit "MutableArrayArray#") mutableArrayArrayPrimTyConKey mutableArrayArrayPrimTyCon
smallMutableArrayPrimTyConName= mkPrimTc (fsLit "SmallMutableArray#") smallMutableArrayPrimTyConKey smallMutableArrayPrimTyCon
mutVarPrimTyConName = mkPrimTc (fsLit "MutVar#") mutVarPrimTyConKey mutVarPrimTyCon
mVarPrimTyConName = mkPrimTc (fsLit "MVar#") mVarPrimTyConKey mVarPrimTyCon
tVarPrimTyConName = mkPrimTc (fsLit "TVar#") tVarPrimTyConKey tVarPrimTyCon
stablePtrPrimTyConName = mkPrimTc (fsLit "StablePtr#") stablePtrPrimTyConKey stablePtrPrimTyCon
stableNamePrimTyConName = mkPrimTc (fsLit "StableName#") stableNamePrimTyConKey stableNamePrimTyCon
compactPrimTyConName = mkPrimTc (fsLit "Compact#") compactPrimTyConKey compactPrimTyCon
bcoPrimTyConName = mkPrimTc (fsLit "BCO#") bcoPrimTyConKey bcoPrimTyCon
weakPrimTyConName = mkPrimTc (fsLit "Weak#") weakPrimTyConKey weakPrimTyCon
threadIdPrimTyConName = mkPrimTc (fsLit "ThreadId#") threadIdPrimTyConKey threadIdPrimTyCon
{-
************************************************************************
* *
\subsection{Support code}
* *
************************************************************************
alphaTyVars is a list of type variables for use in templates:
["a", "b", ..., "z", "t1", "t2", ... ]
-}
mkTemplateKindVars :: [Kind] -> [TyVar]
-- k0 with unique (mkAlphaTyVarUnique 0)
-- k1 with unique (mkAlphaTyVarUnique 1)
-- ... etc
mkTemplateKindVars kinds
= [ mkTyVar name kind
| (kind, u) <- kinds `zip` [0..]
, let occ = mkTyVarOccFS (mkFastString ('k' : show u))
name = mkInternalName (mkAlphaTyVarUnique u) occ noSrcSpan
]
mkTemplateTyVarsFrom :: Int -> [Kind] -> [TyVar]
-- a with unique (mkAlphaTyVarUnique n)
-- b with unique (mkAlphaTyVarUnique n+1)
-- ... etc
-- Typically called as
-- mkTemplateTyVarsFrom (legth kv_bndrs) kinds
-- where kv_bndrs are the kind-level binders of a TyCon
mkTemplateTyVarsFrom n kinds
= [ mkTyVar name kind
| (kind, index) <- zip kinds [0..],
let ch_ord = index + ord 'a'
name_str | ch_ord <= ord 'z' = [chr ch_ord]
| otherwise = 't':show index
uniq = mkAlphaTyVarUnique (index + n)
name = mkInternalName uniq occ noSrcSpan
occ = mkTyVarOccFS (mkFastString name_str)
]
mkTemplateTyVars :: [Kind] -> [TyVar]
mkTemplateTyVars = mkTemplateTyVarsFrom 1
mkTemplateTyConBinders
:: [Kind] -- [k1, .., kn] Kinds of kind-forall'd vars
-> ([Kind] -> [Kind]) -- Arg is [kv1:k1, ..., kvn:kn]
-- same length as first arg
-- Result is anon arg kinds
-> [TyConBinder]
mkTemplateTyConBinders kind_var_kinds mk_anon_arg_kinds
= kv_bndrs ++ tv_bndrs
where
kv_bndrs = mkTemplateKindTyConBinders kind_var_kinds
anon_kinds = mk_anon_arg_kinds (mkTyVarTys (binderVars kv_bndrs))
tv_bndrs = mkTemplateAnonTyConBindersFrom (length kv_bndrs) anon_kinds
mkTemplateKiTyVars
:: [Kind] -- [k1, .., kn] Kinds of kind-forall'd vars
-> ([Kind] -> [Kind]) -- Arg is [kv1:k1, ..., kvn:kn]
-- same length as first arg
-- Result is anon arg kinds [ak1, .., akm]
-> [TyVar] -- [kv1:k1, ..., kvn:kn, av1:ak1, ..., avm:akm]
-- Example: if you want the tyvars for
-- forall (r:RuntimeRep) (a:TYPE r) (b:*). blah
-- call mkTemplateKiTyVars [RuntimeRep] (\[r]. [TYPE r, *)
mkTemplateKiTyVars kind_var_kinds mk_arg_kinds
= kv_bndrs ++ tv_bndrs
where
kv_bndrs = mkTemplateKindVars kind_var_kinds
anon_kinds = mk_arg_kinds (mkTyVarTys kv_bndrs)
tv_bndrs = mkTemplateTyVarsFrom (length kv_bndrs) anon_kinds
mkTemplateKindTyConBinders :: [Kind] -> [TyConBinder]
-- Makes named, Specified binders
mkTemplateKindTyConBinders kinds = [mkNamedTyConBinder Specified tv | tv <- mkTemplateKindVars kinds]
mkTemplateAnonTyConBinders :: [Kind] -> [TyConBinder]
mkTemplateAnonTyConBinders kinds = map mkAnonTyConBinder (mkTemplateTyVars kinds)
mkTemplateAnonTyConBindersFrom :: Int -> [Kind] -> [TyConBinder]
mkTemplateAnonTyConBindersFrom n kinds = map mkAnonTyConBinder (mkTemplateTyVarsFrom n kinds)
alphaTyVars :: [TyVar]
alphaTyVars = mkTemplateTyVars $ repeat liftedTypeKind
alphaTyVar, betaTyVar, gammaTyVar, deltaTyVar :: TyVar
(alphaTyVar:betaTyVar:gammaTyVar:deltaTyVar:_) = alphaTyVars
alphaTys :: [Type]
alphaTys = mkTyVarTys alphaTyVars
alphaTy, betaTy, gammaTy, deltaTy :: Type
(alphaTy:betaTy:gammaTy:deltaTy:_) = alphaTys
runtimeRep1TyVar, runtimeRep2TyVar :: TyVar
(runtimeRep1TyVar : runtimeRep2TyVar : _)
= drop 16 (mkTemplateTyVars (repeat runtimeRepTy)) -- selects 'q','r'
runtimeRep1Ty, runtimeRep2Ty :: Type
runtimeRep1Ty = mkTyVarTy runtimeRep1TyVar
runtimeRep2Ty = mkTyVarTy runtimeRep2TyVar
openAlphaTyVar, openBetaTyVar :: TyVar
[openAlphaTyVar,openBetaTyVar]
= mkTemplateTyVars [tYPE runtimeRep1Ty, tYPE runtimeRep2Ty]
openAlphaTy, openBetaTy :: Type
openAlphaTy = mkTyVarTy openAlphaTyVar
openBetaTy = mkTyVarTy openBetaTyVar
{-
************************************************************************
* *
FunTyCon
* *
************************************************************************
-}
funTyConName :: Name
funTyConName = mkPrimTyConName (fsLit "(->)") funTyConKey funTyCon
funTyCon :: TyCon
funTyCon = mkFunTyCon funTyConName tc_bndrs tc_rep_nm
where
tc_bndrs = mkTemplateAnonTyConBinders [liftedTypeKind, liftedTypeKind]
-- You might think that (->) should have type (?? -> ? -> *), and you'd be right
-- But if we do that we get kind errors when saying
-- instance Control.Arrow (->)
-- because the expected kind is (*->*->*). The trouble is that the
-- expected/actual stuff in the unifier does not go contra-variant, whereas
-- the kind sub-typing does. Sigh. It really only matters if you use (->) in
-- a prefix way, thus: (->) Int# Int#. And this is unusual.
-- because they are never in scope in the source
tc_rep_nm = mkPrelTyConRepName funTyConName
{-
************************************************************************
* *
Kinds
* *
************************************************************************
Note [TYPE and RuntimeRep]
~~~~~~~~~~~~~~~~~~~~~~~~~~
All types that classify values have a kind of the form (TYPE rr), where
data RuntimeRep -- Defined in ghc-prim:GHC.Types
= PtrRepLifted
| PtrRepUnlifted
| IntRep
| FloatRep
.. etc ..
rr :: RuntimeRep
TYPE :: RuntimeRep -> TYPE 'PtrRepLifted -- Built in
So for example:
Int :: TYPE 'PtrRepLifted
Array# Int :: TYPE 'PtrRepUnlifted
Int# :: TYPE 'IntRep
Float# :: TYPE 'FloatRep
Maybe :: TYPE 'PtrRepLifted -> TYPE 'PtrRepLifted
We abbreviate '*' specially:
type * = TYPE 'PtrRepLifted
The 'rr' parameter tells us how the value is represented at runime.
Generally speaking, you can't be polymorphic in 'rr'. E.g
f :: forall (rr:RuntimeRep) (a:TYPE rr). a -> [a]
f = /\(rr:RuntimeRep) (a:rr) \(a:rr). ...
This is no good: we could not generate code code for 'f', because the
calling convention for 'f' varies depending on whether the argument is
a a Int, Int#, or Float#. (You could imagine generating specialised
code, one for each instantiation of 'rr', but we don't do that.)
Certain functions CAN be runtime-rep-polymorphic, because the code
generator never has to manipulate a value of type 'a :: TYPE rr'.
* error :: forall (rr:RuntimeRep) (a:TYPE rr). String -> a
Code generator never has to manipulate the return value.
* unsafeCoerce#, defined in MkId.unsafeCoerceId:
Always inlined to be a no-op
unsafeCoerce# :: forall (r1 :: RuntimeRep) (r2 :: RuntimeRep)
(a :: TYPE r1) (b :: TYPE r2).
a -> b
* Unboxed tuples, and unboxed sums, defined in TysWiredIn
Always inlined, and hence specialised to the call site
(#,#) :: forall (r1 :: RuntimeRep) (r2 :: RuntimeRep)
(a :: TYPE r1) (b :: TYPE r2).
a -> b -> TYPE 'UnboxedTupleRep
See Note [Unboxed tuple kinds]
Note [Unboxed tuple kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~
What kind does (# Int, Float# #) have?
The "right" answer would be
TYPE ('UnboxedTupleRep [PtrRepLifted, FloatRep])
Currently we do not do this. We just have
(# Int, Float# #) :: TYPE 'UnboxedTupleRep
which does not tell us exactly how is is represented.
Note [PrimRep and kindPrimRep]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As part of its source code, in TyCon, GHC has
data PrimRep = PtrRep | IntRep | FloatRep | ...etc...
Notice that
* RuntimeRep is part of the syntax tree of the program being compiled
(defined in a library: ghc-prim:GHC.Types)
* PrimRep is part of GHC's source code.
(defined in TyCon)
We need to get from one to the other; that is what kindPrimRep does.
Suppose we have a value
(v :: t) where (t :: k)
Given this kind
k = TyConApp "TYPE" [rep]
GHC needs to be able to figure out how 'v' is represented at runtime.
It expects 'rep' to be form
TyConApp rr_dc args
where 'rr_dc' is a promoteed data constructor from RuntimeRep. So
now we need to go from 'dc' to the correponding PrimRep. We store this
PrimRep in the promoted data constructor itself: see TyCon.promDcRepInfo.
-}
tYPETyCon, unliftedTypeKindTyCon :: TyCon
tYPETyConName, unliftedTypeKindTyConName :: Name
tYPETyCon = mkKindTyCon tYPETyConName
(mkTemplateAnonTyConBinders [runtimeRepTy])
liftedTypeKind
[Nominal]
(mkPrelTyConRepName tYPETyConName)
-- See Note [TYPE and RuntimeRep]
-- NB: unlifted is wired in because there is no way to parse it in
-- Haskell. That's the only reason for wiring it in.
unliftedTypeKindTyCon = mkSynonymTyCon unliftedTypeKindTyConName
[] liftedTypeKind []
(tYPE (TyConApp ptrRepUnliftedDataConTyCon []))
True -- no foralls
True -- family free
--------------------------
-- ... and now their names
-- If you edit these, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
tYPETyConName = mkPrimTyConName (fsLit "TYPE") tYPETyConKey tYPETyCon
unliftedTypeKindTyConName = mkPrimTyConName (fsLit "#") unliftedTypeKindTyConKey unliftedTypeKindTyCon
mkPrimTyConName :: FastString -> Unique -> TyCon -> Name
mkPrimTyConName = mkPrimTcName BuiltInSyntax
-- All of the super kinds and kinds are defined in Prim,
-- and use BuiltInSyntax, because they are never in scope in the source
mkPrimTcName :: BuiltInSyntax -> FastString -> Unique -> TyCon -> Name
mkPrimTcName built_in_syntax occ key tycon
= mkWiredInName gHC_PRIM (mkTcOccFS occ) key (ATyCon tycon) built_in_syntax
-----------------------------
-- | Given a RuntimeRep, applies TYPE to it.
-- see Note [TYPE and RuntimeRep]
tYPE :: Type -> Type
tYPE rr = TyConApp tYPETyCon [rr]
{-
************************************************************************
* *
\subsection[TysPrim-basic]{Basic primitive types (@Char#@, @Int#@, etc.)}
* *
************************************************************************
-}
-- only used herein
pcPrimTyCon :: Name -> [Role] -> PrimRep -> TyCon
pcPrimTyCon name roles rep
= mkPrimTyCon name binders result_kind roles
where
binders = mkTemplateAnonTyConBinders (map (const liftedTypeKind) roles)
result_kind = tYPE rr
rr = case rep of
VoidRep -> voidRepDataConTy
PtrRep -> TyConApp ptrRepUnliftedDataConTyCon []
IntRep -> intRepDataConTy
WordRep -> wordRepDataConTy
Int64Rep -> int64RepDataConTy
Word64Rep -> word64RepDataConTy
AddrRep -> addrRepDataConTy
FloatRep -> floatRepDataConTy
DoubleRep -> doubleRepDataConTy
VecRep n elem -> TyConApp vecRepDataConTyCon [n', elem']
where
n' = case n of
2 -> vec2DataConTy
4 -> vec4DataConTy
8 -> vec8DataConTy
16 -> vec16DataConTy
32 -> vec32DataConTy
64 -> vec64DataConTy
_ -> pprPanic "Disallowed VecCount" (ppr n)
elem' = case elem of
Int8ElemRep -> int8ElemRepDataConTy
Int16ElemRep -> int16ElemRepDataConTy
Int32ElemRep -> int32ElemRepDataConTy
Int64ElemRep -> int64ElemRepDataConTy
Word8ElemRep -> word8ElemRepDataConTy
Word16ElemRep -> word16ElemRepDataConTy
Word32ElemRep -> word32ElemRepDataConTy
Word64ElemRep -> word64ElemRepDataConTy
FloatElemRep -> floatElemRepDataConTy
DoubleElemRep -> doubleElemRepDataConTy
pcPrimTyCon0 :: Name -> PrimRep -> TyCon
pcPrimTyCon0 name rep
= pcPrimTyCon name [] rep
charPrimTy :: Type
charPrimTy = mkTyConTy charPrimTyCon
charPrimTyCon :: TyCon
charPrimTyCon = pcPrimTyCon0 charPrimTyConName WordRep
intPrimTy :: Type
intPrimTy = mkTyConTy intPrimTyCon
intPrimTyCon :: TyCon
intPrimTyCon = pcPrimTyCon0 intPrimTyConName IntRep
int32PrimTy :: Type
int32PrimTy = mkTyConTy int32PrimTyCon
int32PrimTyCon :: TyCon
int32PrimTyCon = pcPrimTyCon0 int32PrimTyConName IntRep
int64PrimTy :: Type
int64PrimTy = mkTyConTy int64PrimTyCon
int64PrimTyCon :: TyCon
int64PrimTyCon = pcPrimTyCon0 int64PrimTyConName Int64Rep
wordPrimTy :: Type
wordPrimTy = mkTyConTy wordPrimTyCon
wordPrimTyCon :: TyCon
wordPrimTyCon = pcPrimTyCon0 wordPrimTyConName WordRep
word32PrimTy :: Type
word32PrimTy = mkTyConTy word32PrimTyCon
word32PrimTyCon :: TyCon
word32PrimTyCon = pcPrimTyCon0 word32PrimTyConName WordRep
word64PrimTy :: Type
word64PrimTy = mkTyConTy word64PrimTyCon
word64PrimTyCon :: TyCon
word64PrimTyCon = pcPrimTyCon0 word64PrimTyConName Word64Rep
addrPrimTy :: Type
addrPrimTy = mkTyConTy addrPrimTyCon
addrPrimTyCon :: TyCon
addrPrimTyCon = pcPrimTyCon0 addrPrimTyConName AddrRep
floatPrimTy :: Type
floatPrimTy = mkTyConTy floatPrimTyCon
floatPrimTyCon :: TyCon
floatPrimTyCon = pcPrimTyCon0 floatPrimTyConName FloatRep
doublePrimTy :: Type
doublePrimTy = mkTyConTy doublePrimTyCon
doublePrimTyCon :: TyCon
doublePrimTyCon = pcPrimTyCon0 doublePrimTyConName DoubleRep
{-
************************************************************************
* *
\subsection[TysPrim-state]{The @State#@ type (and @_RealWorld@ types)}
* *
************************************************************************
Note [The equality types story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC sports a veritable menagerie of equality types:
Hetero? Levity Result Role Defining module
------------------------------------------------------------
~# hetero unlifted # nominal GHC.Prim
~~ hetero lifted Constraint nominal GHC.Types
~ homo lifted Constraint nominal Data.Type.Equality
:~: homo lifted * nominal Data.Type.Equality
~R# hetero unlifted # repr GHC.Prim
Coercible homo lifted Constraint repr GHC.Types
Coercion homo lifted * repr Data.Type.Coercion
~P# hetero unlifted phantom GHC.Prim
Recall that "hetero" means the equality can related types of different
kinds. Knowing that (t1 ~# t2) or (t1 ~R# t2) or even that (t1 ~P# t2)
also means that (k1 ~# k2), where (t1 :: k1) and (t2 :: k2).
To produce less confusion for end users, when not dumping and without
-fprint-equality-relations, each of these groups is printed as the bottommost
listed equality. That is, (~#) and (~~) are both rendered as (~) in
error messages, and (~R#) is rendered as Coercible.
Let's take these one at a time:
--------------------------
(~#) :: forall k1 k2. k1 -> k2 -> #
--------------------------
This is The Type Of Equality in GHC. It classifies nominal coercions.
This type is used in the solver for recording equality constraints.
It responds "yes" to Type.isEqPred and classifies as an EqPred in
Type.classifyPredType.
All wanted constraints of this type are built with coercion holes.
(See Note [Coercion holes] in TyCoRep.) But see also
Note [Deferred errors for coercion holes] in TcErrors to see how
equality constraints are deferred.
Within GHC, ~# is called eqPrimTyCon, and it is defined in TysPrim.
--------------------------
(~~) :: forall k1 k2. k1 -> k2 -> Constraint
--------------------------
This is (almost) an ordinary class, defined as if by
class a ~# b => a ~~ b
instance a ~# b => a ~~ b
Here's what's unusual about it:
* We can't actually declare it that way because we don't have syntax for ~#.
And ~# isn't a constraint, so even if we could write it, it wouldn't kind
check.
* Users cannot write instances of it.
* It is "naturally coherent". This means that the solver won't hesitate to
solve a goal of type (a ~~ b) even if there is, say (Int ~~ c) in the
context. (Normally, it waits to learn more, just in case the given
influences what happens next.) This is quite like having
IncoherentInstances enabled.
* It always terminates. That is, in the UndecidableInstances checks, we
don't worry if a (~~) constraint is too big, as we know that solving
equality terminates.
On the other hand, this behaves just like any class w.r.t. eager superclass
unpacking in the solver. So a lifted equality given quickly becomes an unlifted
equality given. This is good, because the solver knows all about unlifted
equalities. There is some special-casing in TcInteract.matchClassInst to
pretend that there is an instance of this class, as we can't write the instance
in Haskell.
Within GHC, ~~ is called heqTyCon, and it is defined in TysWiredIn.
--------------------------
(~) :: forall k. k -> k -> Constraint
--------------------------
This is defined in Data.Type.Equality:
class a ~~ b => (a :: k) ~ (b :: k)
instance a ~~ b => a ~ b
This is even more so an ordinary class than (~~), with the following exceptions:
* Users cannot write instances of it.
* It is "naturally coherent". (See (~~).)
* (~) is magical syntax, as ~ is a reserved symbol. It cannot be exported
or imported.
* It always terminates.
Within GHC, ~ is called eqTyCon, and it is defined in PrelNames. Note that
it is *not* wired in.
--------------------------
(:~:) :: forall k. k -> k -> *
--------------------------
This is a perfectly ordinary GADT, wrapping (~). It is not defined within
GHC at all.
--------------------------
(~R#) :: forall k1 k2. k1 -> k2 -> #
--------------------------
The is the representational analogue of ~#. This is the type of representational
equalities that the solver works on. All wanted constraints of this type are
built with coercion holes.
Within GHC, ~R# is called eqReprPrimTyCon, and it is defined in TysPrim.
--------------------------
Coercible :: forall k. k -> k -> Constraint
--------------------------
This is quite like (~~) in the way it's defined and treated within GHC, but
it's homogeneous. Homogeneity helps with type inference (as GHC can solve one
kind from the other) and, in my (Richard's) estimation, will be more intuitive
for users.
An alternative design included HCoercible (like (~~)) and Coercible (like (~)).
One annoyance was that we want `coerce :: Coercible a b => a -> b`, and
we need the type of coerce to be fully wired-in. So the HCoercible/Coercible
split required that both types be fully wired-in. Instead of doing this,
I just got rid of HCoercible, as I'm not sure who would use it, anyway.
Within GHC, Coercible is called coercibleTyCon, and it is defined in
TysWiredIn.
--------------------------
Coercion :: forall k. k -> k -> *
--------------------------
This is a perfectly ordinary GADT, wrapping Coercible. It is not defined
within GHC at all.
--------------------------
(~P#) :: forall k1 k2. k1 -> k2 -> #
--------------------------
This is the phantom analogue of ~# and it is barely used at all.
(The solver has no idea about this one.) Here is the motivation:
data Phant a = MkPhant
type role Phant phantom
Phant <Int, Bool>_P :: Phant Int ~P# Phant Bool
We just need to have something to put on that last line. You probably
don't need to worry about it.
Note [The State# TyCon]
~~~~~~~~~~~~~~~~~~~~~~~
State# is the primitive, unlifted type of states. It has one type parameter,
thus
State# RealWorld
or
State# s
where s is a type variable. The only purpose of the type parameter is to
keep different state threads separate. It is represented by nothing at all.
The type parameter to State# is intended to keep separate threads separate.
Even though this parameter is not used in the definition of State#, it is
given role Nominal to enforce its intended use.
-}
mkStatePrimTy :: Type -> Type
mkStatePrimTy ty = TyConApp statePrimTyCon [ty]
statePrimTyCon :: TyCon -- See Note [The State# TyCon]
statePrimTyCon = pcPrimTyCon statePrimTyConName [Nominal] VoidRep
{-
RealWorld is deeply magical. It is *primitive*, but it is not
*unlifted* (hence ptrArg). We never manipulate values of type
RealWorld; it's only used in the type system, to parameterise State#.
-}
realWorldTyCon :: TyCon
realWorldTyCon = mkLiftedPrimTyCon realWorldTyConName [] liftedTypeKind []
realWorldTy :: Type
realWorldTy = mkTyConTy realWorldTyCon
realWorldStatePrimTy :: Type
realWorldStatePrimTy = mkStatePrimTy realWorldTy -- State# RealWorld
-- Note: the ``state-pairing'' types are not truly primitive,
-- so they are defined in \tr{TysWiredIn.hs}, not here.
voidPrimTy :: Type
voidPrimTy = TyConApp voidPrimTyCon []
voidPrimTyCon :: TyCon
voidPrimTyCon = pcPrimTyCon voidPrimTyConName [] VoidRep
mkProxyPrimTy :: Type -> Type -> Type
mkProxyPrimTy k ty = TyConApp proxyPrimTyCon [k, ty]
proxyPrimTyCon :: TyCon
proxyPrimTyCon = mkPrimTyCon proxyPrimTyConName binders res_kind [Nominal,Nominal]
where
-- Kind: forall k. k -> Void#
binders = mkTemplateTyConBinders [liftedTypeKind] (\ks-> ks)
res_kind = tYPE voidRepDataConTy
{- *********************************************************************
* *
Primitive equality constraints
See Note [The equality types story]
* *
********************************************************************* -}
eqPrimTyCon :: TyCon -- The representation type for equality predicates
-- See Note [The equality types story]
eqPrimTyCon = mkPrimTyCon eqPrimTyConName binders res_kind roles
where
-- Kind :: forall k1 k2. k1 -> k2 -> Void#
binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks)
res_kind = tYPE voidRepDataConTy
roles = [Nominal, Nominal, Nominal, Nominal]
-- like eqPrimTyCon, but the type for *Representational* coercions
-- this should only ever appear as the type of a covar. Its role is
-- interpreted in coercionRole
eqReprPrimTyCon :: TyCon -- See Note [The equality types story]
eqReprPrimTyCon = mkPrimTyCon eqReprPrimTyConName binders res_kind roles
where
-- Kind :: forall k1 k2. k1 -> k2 -> Void#
binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks)
res_kind = tYPE voidRepDataConTy
roles = [Nominal, Nominal, Representational, Representational]
-- like eqPrimTyCon, but the type for *Phantom* coercions.
-- This is only used to make higher-order equalities. Nothing
-- should ever actually have this type!
eqPhantPrimTyCon :: TyCon
eqPhantPrimTyCon = mkPrimTyCon eqPhantPrimTyConName binders res_kind roles
where
-- Kind :: forall k1 k2. k1 -> k2 -> Void#
binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks)
res_kind = tYPE voidRepDataConTy
roles = [Nominal, Nominal, Phantom, Phantom]
{- *********************************************************************
* *
The primitive array types
* *
********************************************************************* -}
arrayPrimTyCon, mutableArrayPrimTyCon, mutableByteArrayPrimTyCon,
byteArrayPrimTyCon, arrayArrayPrimTyCon, mutableArrayArrayPrimTyCon,
smallArrayPrimTyCon, smallMutableArrayPrimTyCon :: TyCon
arrayPrimTyCon = pcPrimTyCon arrayPrimTyConName [Representational] PtrRep
mutableArrayPrimTyCon = pcPrimTyCon mutableArrayPrimTyConName [Nominal, Representational] PtrRep
mutableByteArrayPrimTyCon = pcPrimTyCon mutableByteArrayPrimTyConName [Nominal] PtrRep
byteArrayPrimTyCon = pcPrimTyCon0 byteArrayPrimTyConName PtrRep
arrayArrayPrimTyCon = pcPrimTyCon0 arrayArrayPrimTyConName PtrRep
mutableArrayArrayPrimTyCon = pcPrimTyCon mutableArrayArrayPrimTyConName [Nominal] PtrRep
smallArrayPrimTyCon = pcPrimTyCon smallArrayPrimTyConName [Representational] PtrRep
smallMutableArrayPrimTyCon = pcPrimTyCon smallMutableArrayPrimTyConName [Nominal, Representational] PtrRep
mkArrayPrimTy :: Type -> Type
mkArrayPrimTy elt = TyConApp arrayPrimTyCon [elt]
byteArrayPrimTy :: Type
byteArrayPrimTy = mkTyConTy byteArrayPrimTyCon
mkArrayArrayPrimTy :: Type
mkArrayArrayPrimTy = mkTyConTy arrayArrayPrimTyCon
mkSmallArrayPrimTy :: Type -> Type
mkSmallArrayPrimTy elt = TyConApp smallArrayPrimTyCon [elt]
mkMutableArrayPrimTy :: Type -> Type -> Type
mkMutableArrayPrimTy s elt = TyConApp mutableArrayPrimTyCon [s, elt]
mkMutableByteArrayPrimTy :: Type -> Type
mkMutableByteArrayPrimTy s = TyConApp mutableByteArrayPrimTyCon [s]
mkMutableArrayArrayPrimTy :: Type -> Type
mkMutableArrayArrayPrimTy s = TyConApp mutableArrayArrayPrimTyCon [s]
mkSmallMutableArrayPrimTy :: Type -> Type -> Type
mkSmallMutableArrayPrimTy s elt = TyConApp smallMutableArrayPrimTyCon [s, elt]
{- *********************************************************************
* *
The mutable variable type
* *
********************************************************************* -}
mutVarPrimTyCon :: TyCon
mutVarPrimTyCon = pcPrimTyCon mutVarPrimTyConName [Nominal, Representational] PtrRep
mkMutVarPrimTy :: Type -> Type -> Type
mkMutVarPrimTy s elt = TyConApp mutVarPrimTyCon [s, elt]
{-
************************************************************************
* *
\subsection[TysPrim-synch-var]{The synchronizing variable type}
* *
************************************************************************
-}
mVarPrimTyCon :: TyCon
mVarPrimTyCon = pcPrimTyCon mVarPrimTyConName [Nominal, Representational] PtrRep
mkMVarPrimTy :: Type -> Type -> Type
mkMVarPrimTy s elt = TyConApp mVarPrimTyCon [s, elt]
{-
************************************************************************
* *
\subsection[TysPrim-stm-var]{The transactional variable type}
* *
************************************************************************
-}
tVarPrimTyCon :: TyCon
tVarPrimTyCon = pcPrimTyCon tVarPrimTyConName [Nominal, Representational] PtrRep
mkTVarPrimTy :: Type -> Type -> Type
mkTVarPrimTy s elt = TyConApp tVarPrimTyCon [s, elt]
{-
************************************************************************
* *
\subsection[TysPrim-stable-ptrs]{The stable-pointer type}
* *
************************************************************************
-}
stablePtrPrimTyCon :: TyCon
stablePtrPrimTyCon = pcPrimTyCon stablePtrPrimTyConName [Representational] AddrRep
mkStablePtrPrimTy :: Type -> Type
mkStablePtrPrimTy ty = TyConApp stablePtrPrimTyCon [ty]
{-
************************************************************************
* *
\subsection[TysPrim-stable-names]{The stable-name type}
* *
************************************************************************
-}
stableNamePrimTyCon :: TyCon
stableNamePrimTyCon = pcPrimTyCon stableNamePrimTyConName [Representational] PtrRep
mkStableNamePrimTy :: Type -> Type
mkStableNamePrimTy ty = TyConApp stableNamePrimTyCon [ty]
{-
************************************************************************
* *
\subsection[TysPrim-compact-nfdata]{The Compact NFData (CNF) type}
* *
************************************************************************
-}
compactPrimTyCon :: TyCon
compactPrimTyCon = pcPrimTyCon0 compactPrimTyConName PtrRep
compactPrimTy :: Type
compactPrimTy = mkTyConTy compactPrimTyCon
{-
************************************************************************
* *
\subsection[TysPrim-BCOs]{The ``bytecode object'' type}
* *
************************************************************************
-}
bcoPrimTy :: Type
bcoPrimTy = mkTyConTy bcoPrimTyCon
bcoPrimTyCon :: TyCon
bcoPrimTyCon = pcPrimTyCon0 bcoPrimTyConName PtrRep
{-
************************************************************************
* *
\subsection[TysPrim-Weak]{The ``weak pointer'' type}
* *
************************************************************************
-}
weakPrimTyCon :: TyCon
weakPrimTyCon = pcPrimTyCon weakPrimTyConName [Representational] PtrRep
mkWeakPrimTy :: Type -> Type
mkWeakPrimTy v = TyConApp weakPrimTyCon [v]
{-
************************************************************************
* *
\subsection[TysPrim-thread-ids]{The ``thread id'' type}
* *
************************************************************************
A thread id is represented by a pointer to the TSO itself, to ensure
that they are always unique and we can always find the TSO for a given
thread id. However, this has the unfortunate consequence that a
ThreadId# for a given thread is treated as a root by the garbage
collector and can keep TSOs around for too long.
Hence the programmer API for thread manipulation uses a weak pointer
to the thread id internally.
-}
threadIdPrimTy :: Type
threadIdPrimTy = mkTyConTy threadIdPrimTyCon
threadIdPrimTyCon :: TyCon
threadIdPrimTyCon = pcPrimTyCon0 threadIdPrimTyConName PtrRep
{-
************************************************************************
* *
\subsection{SIMD vector types}
* *
************************************************************************
-}
#include "primop-vector-tys.hs-incl"
| mettekou/ghc | compiler/prelude/TysPrim.hs | bsd-3-clause | 41,385 | 0 | 17 | 10,358 | 4,301 | 2,429 | 1,872 | 421 | 25 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Config
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- Utilities for handling saved state such as known packages, known servers and
-- downloaded packages.
-----------------------------------------------------------------------------
module Distribution.Client.Config (
SavedConfig(..),
loadConfig,
showConfig,
showConfigWithComments,
parseConfig,
defaultCabalDir,
defaultConfigFile,
defaultCacheDir,
defaultCompiler,
defaultLogsDir,
defaultUserInstall,
baseSavedConfig,
commentSavedConfig,
initialSavedConfig,
configFieldDescriptions,
haddockFlagsFields,
installDirsFields,
withProgramsFields,
withProgramOptionsFields,
userConfigDiff,
userConfigUpdate
) where
import Distribution.Client.Types
( RemoteRepo(..), Username(..), Password(..) )
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Setup
( GlobalFlags(..), globalCommand, defaultGlobalFlags
, ConfigExFlags(..), configureExOptions, defaultConfigExFlags
, InstallFlags(..), installOptions, defaultInstallFlags
, UploadFlags(..), uploadCommand
, ReportFlags(..), reportCommand
, showRepo, parseRepo )
import Distribution.Utils.NubList
( NubList, fromNubList, toNubList)
import Distribution.Simple.Compiler
( DebugInfoLevel(..), OptimisationLevel(..) )
import Distribution.Simple.Setup
( ConfigFlags(..), configureOptions, defaultConfigFlags
, HaddockFlags(..), haddockOptions, defaultHaddockFlags
, installDirsOptions
, programConfigurationPaths', programConfigurationOptions
, Flag(..), toFlag, flagToMaybe, fromFlagOrDefault )
import Distribution.Simple.InstallDirs
( InstallDirs(..), defaultInstallDirs
, PathTemplate, toPathTemplate )
import Distribution.ParseUtils
( FieldDescr(..), liftField
, ParseResult(..), PError(..), PWarning(..)
, locatedErrorMsg, showPWarning
, readFields, warning, lineNo
, simpleField, listField, parseFilePathQ, parseTokenQ )
import Distribution.Client.ParseUtils
( parseFields, ppFields, ppSection )
import qualified Distribution.ParseUtils as ParseUtils
( Field(..) )
import qualified Distribution.Text as Text
( Text(..) )
import Distribution.Simple.Command
( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..)
, viewAsFieldDescr )
import Distribution.Simple.Program
( defaultProgramConfiguration )
import Distribution.Simple.Utils
( die, notice, warn, lowercase, cabalVersion )
import Distribution.Compiler
( CompilerFlavor(..), defaultCompilerFlavor )
import Distribution.Verbosity
( Verbosity, normal )
import Data.List
( partition, find, foldl' )
import Data.Maybe
( fromMaybe )
import Data.Monoid
( Monoid(..) )
import Control.Monad
( unless, foldM, liftM, liftM2 )
import qualified Distribution.Compat.ReadP as Parse
( option )
import qualified Text.PrettyPrint as Disp
( render, text, empty )
import Text.PrettyPrint
( ($+$) )
import System.Directory
( createDirectoryIfMissing, getAppUserDataDirectory, renameFile )
import Network.URI
( URI(..), URIAuth(..) )
import System.FilePath
( (<.>), (</>), takeDirectory )
import System.IO.Error
( isDoesNotExistError )
import Distribution.Compat.Environment
( getEnvironment )
import Distribution.Compat.Exception
( catchIO )
import qualified Paths_cabal_install
( version )
import Data.Version
( showVersion )
import Data.Char
( isSpace )
import qualified Data.Map as M
--
-- * Configuration saved in the config file
--
data SavedConfig = SavedConfig {
savedGlobalFlags :: GlobalFlags,
savedInstallFlags :: InstallFlags,
savedConfigureFlags :: ConfigFlags,
savedConfigureExFlags :: ConfigExFlags,
savedUserInstallDirs :: InstallDirs (Flag PathTemplate),
savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate),
savedUploadFlags :: UploadFlags,
savedReportFlags :: ReportFlags,
savedHaddockFlags :: HaddockFlags
}
instance Monoid SavedConfig where
mempty = SavedConfig {
savedGlobalFlags = mempty,
savedInstallFlags = mempty,
savedConfigureFlags = mempty,
savedConfigureExFlags = mempty,
savedUserInstallDirs = mempty,
savedGlobalInstallDirs = mempty,
savedUploadFlags = mempty,
savedReportFlags = mempty,
savedHaddockFlags = mempty
}
mappend a b = SavedConfig {
savedGlobalFlags = combinedSavedGlobalFlags,
savedInstallFlags = combinedSavedInstallFlags,
savedConfigureFlags = combinedSavedConfigureFlags,
savedConfigureExFlags = combinedSavedConfigureExFlags,
savedUserInstallDirs = combinedSavedUserInstallDirs,
savedGlobalInstallDirs = combinedSavedGlobalInstallDirs,
savedUploadFlags = combinedSavedUploadFlags,
savedReportFlags = combinedSavedReportFlags,
savedHaddockFlags = combinedSavedHaddockFlags
}
where
-- This is ugly, but necessary. If we're mappending two config files, we
-- want the values of the *non-empty* list fields from the second one to
-- *override* the corresponding values from the first one. Default
-- behaviour (concatenation) is confusing and makes some use cases (see
-- #1884) impossible.
--
-- However, we also want to allow specifying multiple values for a list
-- field in a *single* config file. For example, we want the following to
-- continue to work:
--
-- remote-repo: hackage.haskell.org:http://hackage.haskell.org/
-- remote-repo: private-collection:http://hackage.local/
--
-- So we can't just wrap the list fields inside Flags; we have to do some
-- special-casing just for SavedConfig.
-- NB: the signature prevents us from using 'combine' on lists.
combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a
combine' field subfield =
(subfield . field $ a) `mappend` (subfield . field $ b)
lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a]
lastNonEmpty' field subfield =
let a' = subfield . field $ a
b' = subfield . field $ b
in case b' of [] -> a'
_ -> b'
lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a)
-> NubList a
lastNonEmptyNL' field subfield =
let a' = subfield . field $ a
b' = subfield . field $ b
in case fromNubList b' of [] -> a'
_ -> b'
combinedSavedGlobalFlags = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion,
globalConfigFile = combine globalConfigFile,
globalSandboxConfigFile = combine globalSandboxConfigFile,
globalRemoteRepos = lastNonEmptyNL globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = lastNonEmptyNL globalLocalRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile,
globalRequireSandbox = combine globalRequireSandbox,
globalIgnoreSandbox = combine globalIgnoreSandbox
}
where
combine = combine' savedGlobalFlags
lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags
combinedSavedInstallFlags = InstallFlags {
installDocumentation = combine installDocumentation,
installHaddockIndex = combine installHaddockIndex,
installDryRun = combine installDryRun,
installMaxBackjumps = combine installMaxBackjumps,
installReorderGoals = combine installReorderGoals,
installIndependentGoals = combine installIndependentGoals,
installShadowPkgs = combine installShadowPkgs,
installStrongFlags = combine installStrongFlags,
installReinstall = combine installReinstall,
installAvoidReinstalls = combine installAvoidReinstalls,
installOverrideReinstall = combine installOverrideReinstall,
installUpgradeDeps = combine installUpgradeDeps,
installOnly = combine installOnly,
installOnlyDeps = combine installOnlyDeps,
installRootCmd = combine installRootCmd,
installSummaryFile = lastNonEmptyNL installSummaryFile,
installLogFile = combine installLogFile,
installBuildReports = combine installBuildReports,
installReportPlanningFailure = combine installReportPlanningFailure,
installSymlinkBinDir = combine installSymlinkBinDir,
installOneShot = combine installOneShot,
installNumJobs = combine installNumJobs,
installRunTests = combine installRunTests
}
where
combine = combine' savedInstallFlags
lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags
combinedSavedConfigureFlags = ConfigFlags {
configPrograms = configPrograms . savedConfigureFlags $ b,
-- TODO: NubListify
configProgramPaths = lastNonEmpty configProgramPaths,
-- TODO: NubListify
configProgramArgs = lastNonEmpty configProgramArgs,
configProgramPathExtra = lastNonEmptyNL configProgramPathExtra,
configBuildHc = combine configBuildHc,
configBuildHcPkg = combine configBuildHcPkg,
configHcFlavor = combine configHcFlavor,
configHcPath = combine configHcPath,
configHcPkg = combine configHcPkg,
configVanillaLib = combine configVanillaLib,
configProfLib = combine configProfLib,
configProf = combine configProf,
configSharedLib = combine configSharedLib,
configDynExe = combine configDynExe,
configProfExe = combine configProfExe,
-- TODO: NubListify
configConfigureArgs = lastNonEmpty configConfigureArgs,
configOptimization = combine configOptimization,
configDebugInfo = combine configDebugInfo,
configProgPrefix = combine configProgPrefix,
configProgSuffix = combine configProgSuffix,
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
configInstallDirs =
(configInstallDirs . savedConfigureFlags $ a)
`mappend` (configInstallDirs . savedConfigureFlags $ b),
configScratchDir = combine configScratchDir,
-- TODO: NubListify
configExtraLibDirs = lastNonEmpty configExtraLibDirs,
-- TODO: NubListify
configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs,
configDistPref = combine configDistPref,
configVerbosity = combine configVerbosity,
configUserInstall = combine configUserInstall,
-- TODO: NubListify
configPackageDBs = lastNonEmpty configPackageDBs,
configGHCiLib = combine configGHCiLib,
configSplitObjs = combine configSplitObjs,
configStripExes = combine configStripExes,
configStripLibs = combine configStripLibs,
-- TODO: NubListify
configConstraints = lastNonEmpty configConstraints,
-- TODO: NubListify
configDependencies = lastNonEmpty configDependencies,
configInstantiateWith = lastNonEmpty configInstantiateWith,
-- TODO: NubListify
configConfigurationsFlags = lastNonEmpty configConfigurationsFlags,
configTests = combine configTests,
configBenchmarks = combine configBenchmarks,
configCoverage = combine configCoverage,
configLibCoverage = combine configLibCoverage,
configExactConfiguration = combine configExactConfiguration,
configFlagError = combine configFlagError,
configRelocatable = combine configRelocatable
}
where
combine = combine' savedConfigureFlags
lastNonEmpty = lastNonEmpty' savedConfigureFlags
lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags
combinedSavedConfigureExFlags = ConfigExFlags {
configCabalVersion = combine configCabalVersion,
-- TODO: NubListify
configExConstraints = lastNonEmpty configExConstraints,
-- TODO: NubListify
configPreferences = lastNonEmpty configPreferences,
configSolver = combine configSolver,
configAllowNewer = combine configAllowNewer
}
where
combine = combine' savedConfigureExFlags
lastNonEmpty = lastNonEmpty' savedConfigureExFlags
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
combinedSavedUserInstallDirs = savedUserInstallDirs a
`mappend` savedUserInstallDirs b
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a
`mappend` savedGlobalInstallDirs b
combinedSavedUploadFlags = UploadFlags {
uploadCheck = combine uploadCheck,
uploadUsername = combine uploadUsername,
uploadPassword = combine uploadPassword,
uploadVerbosity = combine uploadVerbosity
}
where
combine = combine' savedUploadFlags
combinedSavedReportFlags = ReportFlags {
reportUsername = combine reportUsername,
reportPassword = combine reportPassword,
reportVerbosity = combine reportVerbosity
}
where
combine = combine' savedReportFlags
combinedSavedHaddockFlags = HaddockFlags {
-- TODO: NubListify
haddockProgramPaths = lastNonEmpty haddockProgramPaths,
-- TODO: NubListify
haddockProgramArgs = lastNonEmpty haddockProgramArgs,
haddockHoogle = combine haddockHoogle,
haddockHtml = combine haddockHtml,
haddockHtmlLocation = combine haddockHtmlLocation,
haddockExecutables = combine haddockExecutables,
haddockTestSuites = combine haddockTestSuites,
haddockBenchmarks = combine haddockBenchmarks,
haddockInternal = combine haddockInternal,
haddockCss = combine haddockCss,
haddockHscolour = combine haddockHscolour,
haddockHscolourCss = combine haddockHscolourCss,
haddockContents = combine haddockContents,
haddockDistPref = combine haddockDistPref,
haddockKeepTempFiles = combine haddockKeepTempFiles,
haddockVerbosity = combine haddockVerbosity
}
where
combine = combine' savedHaddockFlags
lastNonEmpty = lastNonEmpty' savedHaddockFlags
updateInstallDirs :: Flag Bool -> SavedConfig -> SavedConfig
updateInstallDirs userInstallFlag
savedConfig@SavedConfig {
savedConfigureFlags = configureFlags,
savedUserInstallDirs = userInstallDirs,
savedGlobalInstallDirs = globalInstallDirs
} =
savedConfig {
savedConfigureFlags = configureFlags {
configInstallDirs = installDirs
}
}
where
installDirs | userInstall = userInstallDirs
| otherwise = globalInstallDirs
userInstall = fromFlagOrDefault defaultUserInstall $
configUserInstall configureFlags `mappend` userInstallFlag
--
-- * Default config
--
-- | These are the absolute basic defaults. The fields that must be
-- initialised. When we load the config from the file we layer the loaded
-- values over these ones, so any missing fields in the file take their values
-- from here.
--
baseSavedConfig :: IO SavedConfig
baseSavedConfig = do
userPrefix <- defaultCabalDir
logsDir <- defaultLogsDir
worldFile <- defaultWorldFile
return mempty {
savedConfigureFlags = mempty {
configHcFlavor = toFlag defaultCompiler,
configUserInstall = toFlag defaultUserInstall,
configVerbosity = toFlag normal
},
savedUserInstallDirs = mempty {
prefix = toFlag (toPathTemplate userPrefix)
},
savedGlobalFlags = mempty {
globalLogsDir = toFlag logsDir,
globalWorldFile = toFlag worldFile
}
}
-- | This is the initial configuration that we write out to to the config file
-- if the file does not exist (or the config we use if the file cannot be read
-- for some other reason). When the config gets loaded it gets layered on top
-- of 'baseSavedConfig' so we do not need to include it into the initial
-- values we save into the config file.
--
initialSavedConfig :: IO SavedConfig
initialSavedConfig = do
cacheDir <- defaultCacheDir
logsDir <- defaultLogsDir
worldFile <- defaultWorldFile
extraPath <- defaultExtraPath
return mempty {
savedGlobalFlags = mempty {
globalCacheDir = toFlag cacheDir,
globalRemoteRepos = toNubList [defaultRemoteRepo],
globalWorldFile = toFlag worldFile
},
savedConfigureFlags = mempty {
configProgramPathExtra = toNubList extraPath
},
savedInstallFlags = mempty {
installSummaryFile = toNubList [toPathTemplate (logsDir </> "build.log")],
installBuildReports= toFlag AnonymousReports,
installNumJobs = toFlag Nothing
}
}
--TODO: misleading, there's no way to override this default
-- either make it possible or rename to simply getCabalDir.
defaultCabalDir :: IO FilePath
defaultCabalDir = getAppUserDataDirectory "cabal"
defaultConfigFile :: IO FilePath
defaultConfigFile = do
dir <- defaultCabalDir
return $ dir </> "config"
defaultCacheDir :: IO FilePath
defaultCacheDir = do
dir <- defaultCabalDir
return $ dir </> "packages"
defaultLogsDir :: IO FilePath
defaultLogsDir = do
dir <- defaultCabalDir
return $ dir </> "logs"
-- | Default position of the world file
defaultWorldFile :: IO FilePath
defaultWorldFile = do
dir <- defaultCabalDir
return $ dir </> "world"
defaultExtraPath :: IO [FilePath]
defaultExtraPath = do
dir <- defaultCabalDir
return [dir </> "bin"]
defaultCompiler :: CompilerFlavor
defaultCompiler = fromMaybe GHC defaultCompilerFlavor
defaultUserInstall :: Bool
defaultUserInstall = True
-- We do per-user installs by default on all platforms. We used to default to
-- global installs on Windows but that no longer works on Windows Vista or 7.
defaultRemoteRepo :: RemoteRepo
defaultRemoteRepo = RemoteRepo name uri
where
name = "hackage.haskell.org"
uri = URI "http:" (Just (URIAuth "" name "")) "/packages/archive" "" ""
--
-- * Config file reading
--
loadConfig :: Verbosity -> Flag FilePath -> Flag Bool -> IO SavedConfig
loadConfig verbosity configFileFlag userInstallFlag = addBaseConf $ do
let sources = [
("commandline option", return . flagToMaybe $ configFileFlag),
("env var CABAL_CONFIG", lookup "CABAL_CONFIG" `liftM` getEnvironment),
("default config file", Just `liftM` defaultConfigFile) ]
getSource [] = error "no config file path candidate found."
getSource ((msg,action): xs) =
action >>= maybe (getSource xs) (return . (,) msg)
(source, configFile) <- getSource sources
minp <- readConfigFile mempty configFile
case minp of
Nothing -> do
notice verbosity $ "Config file path source is " ++ source ++ "."
notice verbosity $ "Config file " ++ configFile ++ " not found."
notice verbosity $ "Writing default configuration to " ++ configFile
commentConf <- commentSavedConfig
initialConf <- initialSavedConfig
writeConfigFile configFile commentConf initialConf
return initialConf
Just (ParseOk ws conf) -> do
unless (null ws) $ warn verbosity $
unlines (map (showPWarning configFile) ws)
return conf
Just (ParseFailed err) -> do
let (line, msg) = locatedErrorMsg err
die $
"Error parsing config file " ++ configFile
++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg
where
addBaseConf body = do
base <- baseSavedConfig
extra <- body
return (updateInstallDirs userInstallFlag (base `mappend` extra))
readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig))
readConfigFile initial file = handleNotExists $
fmap (Just . parseConfig initial) (readFile file)
where
handleNotExists action = catchIO action $ \ioe ->
if isDoesNotExistError ioe
then return Nothing
else ioError ioe
writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO ()
writeConfigFile file comments vals = do
let tmpFile = file <.> "tmp"
createDirectoryIfMissing True (takeDirectory file)
writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n"
renameFile tmpFile file
where
explanation = unlines
["-- This is the configuration file for the 'cabal' command line tool."
,""
,"-- The available configuration options are listed below."
,"-- Some of them have default values listed."
,""
,"-- Lines (like this one) beginning with '--' are comments."
,"-- Be careful with spaces and indentation because they are"
,"-- used to indicate layout for nested sections."
,""
,"-- Cabal library version: " ++ showVersion cabalVersion
,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version
,"",""
]
-- | These are the default values that get used in Cabal if a no value is
-- given. We use these here to include in comments when we write out the
-- initial config file so that the user can see what default value they are
-- overriding.
--
commentSavedConfig :: IO SavedConfig
commentSavedConfig = do
userInstallDirs <- defaultInstallDirs defaultCompiler True True
globalInstallDirs <- defaultInstallDirs defaultCompiler False True
return SavedConfig {
savedGlobalFlags = defaultGlobalFlags,
savedInstallFlags = defaultInstallFlags,
savedConfigureExFlags = defaultConfigExFlags,
savedConfigureFlags = (defaultConfigFlags defaultProgramConfiguration) {
configUserInstall = toFlag defaultUserInstall
},
savedUserInstallDirs = fmap toFlag userInstallDirs,
savedGlobalInstallDirs = fmap toFlag globalInstallDirs,
savedUploadFlags = commandDefaultFlags uploadCommand,
savedReportFlags = commandDefaultFlags reportCommand,
savedHaddockFlags = defaultHaddockFlags
}
-- | All config file fields.
--
configFieldDescriptions :: [FieldDescr SavedConfig]
configFieldDescriptions =
toSavedConfig liftGlobalFlag
(commandOptions (globalCommand []) ParseArgs)
["version", "numeric-version", "config-file", "sandbox-config-file"] []
++ toSavedConfig liftConfigFlag
(configureOptions ParseArgs)
(["builddir", "constraint", "dependency"]
++ map fieldName installDirsFields)
--FIXME: this is only here because viewAsFieldDescr gives us a parser
-- that only recognises 'ghc' etc, the case-sensitive flag names, not
-- what the normal case-insensitive parser gives us.
[simpleField "compiler"
(fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse)
configHcFlavor (\v flags -> flags { configHcFlavor = v })
-- TODO: The following is a temporary fix. The "optimization"
-- and "debug-info" fields are OptArg, and viewAsFieldDescr
-- fails on that. Instead of a hand-written hackaged parser
-- and printer, we should handle this case properly in the
-- library.
,liftField configOptimization (\v flags -> flags { configOptimization = v }) $
let name = "optimization" in
FieldDescr name
(\f -> case f of
Flag NoOptimisation -> Disp.text "False"
Flag NormalOptimisation -> Disp.text "True"
Flag MaximumOptimisation -> Disp.text "2"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoOptimisation)
| str == "True" -> ParseOk [] (Flag NormalOptimisation)
| str == "0" -> ParseOk [] (Flag NoOptimisation)
| str == "1" -> ParseOk [] (Flag NormalOptimisation)
| str == "2" -> ParseOk [] (Flag MaximumOptimisation)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.")
,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $
let name = "debug-info" in
FieldDescr name
(\f -> case f of
Flag NoDebugInfo -> Disp.text "False"
Flag MinimalDebugInfo -> Disp.text "1"
Flag NormalDebugInfo -> Disp.text "True"
Flag MaximalDebugInfo -> Disp.text "3"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoDebugInfo)
| str == "True" -> ParseOk [] (Flag NormalDebugInfo)
| str == "0" -> ParseOk [] (Flag NoDebugInfo)
| str == "1" -> ParseOk [] (Flag MinimalDebugInfo)
| str == "2" -> ParseOk [] (Flag NormalDebugInfo)
| str == "3" -> ParseOk [] (Flag MaximalDebugInfo)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.")
]
++ toSavedConfig liftConfigExFlag
(configureExOptions ParseArgs)
[] []
++ toSavedConfig liftInstallFlag
(installOptions ParseArgs)
["dry-run", "only", "only-dependencies", "dependencies-only"] []
++ toSavedConfig liftUploadFlag
(commandOptions uploadCommand ParseArgs)
["verbose", "check"] []
++ toSavedConfig liftReportFlag
(commandOptions reportCommand ParseArgs)
["verbose", "username", "password"] []
--FIXME: this is a hack, hiding the user name and password.
-- But otherwise it masks the upload ones. Either need to
-- share the options or make then distinct. In any case
-- they should probably be per-server.
where
toSavedConfig lift options exclusions replacements =
[ lift (fromMaybe field replacement)
| opt <- options
, let field = viewAsFieldDescr opt
name = fieldName field
replacement = find ((== name) . fieldName) replacements
, name `notElem` exclusions ]
optional = Parse.option mempty . fmap toFlag
-- TODO: next step, make the deprecated fields elicit a warning.
--
deprecatedFieldDescriptions :: [FieldDescr SavedConfig]
deprecatedFieldDescriptions =
[ liftGlobalFlag $
listField "repos"
(Disp.text . showRepo) parseRepo
(fromNubList . globalRemoteRepos)
(\rs cfg -> cfg { globalRemoteRepos = toNubList rs })
, liftGlobalFlag $
simpleField "cachedir"
(Disp.text . fromFlagOrDefault "") (optional parseFilePathQ)
globalCacheDir (\d cfg -> cfg { globalCacheDir = d })
, liftUploadFlag $
simpleField "hackage-username"
(Disp.text . fromFlagOrDefault "" . fmap unUsername)
(optional (fmap Username parseTokenQ))
uploadUsername (\d cfg -> cfg { uploadUsername = d })
, liftUploadFlag $
simpleField "hackage-password"
(Disp.text . fromFlagOrDefault "" . fmap unPassword)
(optional (fmap Password parseTokenQ))
uploadPassword (\d cfg -> cfg { uploadPassword = d })
]
++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields
++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields
where
optional = Parse.option mempty . fmap toFlag
modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a
modifyFieldName f d = d { fieldName = f (fieldName d) }
liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate))
-> FieldDescr SavedConfig
liftUserInstallDirs = liftField
savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags })
liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate))
-> FieldDescr SavedConfig
liftGlobalInstallDirs = liftField
savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags })
liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig
liftGlobalFlag = liftField
savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags })
liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig
liftConfigFlag = liftField
savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags })
liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig
liftConfigExFlag = liftField
savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags })
liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig
liftInstallFlag = liftField
savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags })
liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig
liftUploadFlag = liftField
savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags })
liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig
liftReportFlag = liftField
savedReportFlags (\flags conf -> conf { savedReportFlags = flags })
parseConfig :: SavedConfig -> String -> ParseResult SavedConfig
parseConfig initial = \str -> do
fields <- readFields str
let (knownSections, others) = partition isKnownSection fields
config <- parse others
let user0 = savedUserInstallDirs config
global0 = savedGlobalInstallDirs config
(haddockFlags, user, global, paths, args) <-
foldM parseSections
(savedHaddockFlags config, user0, global0, [], [])
knownSections
return config {
savedConfigureFlags = (savedConfigureFlags config) {
configProgramPaths = paths,
configProgramArgs = args
},
savedHaddockFlags = haddockFlags,
savedUserInstallDirs = user,
savedGlobalInstallDirs = global
}
where
isKnownSection (ParseUtils.Section _ "haddock" _ _) = True
isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True
isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True
isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True
isKnownSection _ = False
parse = parseFields (configFieldDescriptions
++ deprecatedFieldDescriptions) initial
parseSections accum@(h,u,g,p,a)
(ParseUtils.Section _ "haddock" name fs)
| name == "" = do h' <- parseFields haddockFlagsFields h fs
return (h', u, g, p, a)
| otherwise = do
warning "The 'haddock' section should be unnamed"
return accum
parseSections accum@(h,u,g,p,a)
(ParseUtils.Section _ "install-dirs" name fs)
| name' == "user" = do u' <- parseFields installDirsFields u fs
return (h, u', g, p, a)
| name' == "global" = do g' <- parseFields installDirsFields g fs
return (h, u, g', p, a)
| otherwise = do
warning "The 'install-paths' section should be for 'user' or 'global'"
return accum
where name' = lowercase name
parseSections accum@(h,u,g,p,a)
(ParseUtils.Section _ "program-locations" name fs)
| name == "" = do p' <- parseFields withProgramsFields p fs
return (h, u, g, p', a)
| otherwise = do
warning "The 'program-locations' section should be unnamed"
return accum
parseSections accum@(h, u, g, p, a)
(ParseUtils.Section _ "program-default-options" name fs)
| name == "" = do a' <- parseFields withProgramOptionsFields a fs
return (h, u, g, p, a')
| otherwise = do
warning "The 'program-default-options' section should be unnamed"
return accum
parseSections accum f = do
warning $ "Unrecognized stanza on line " ++ show (lineNo f)
return accum
showConfig :: SavedConfig -> String
showConfig = showConfigWithComments mempty
showConfigWithComments :: SavedConfig -> SavedConfig -> String
showConfigWithComments comment vals = Disp.render $
ppFields configFieldDescriptions mcomment vals
$+$ Disp.text ""
$+$ ppSection "haddock" "" haddockFlagsFields
(fmap savedHaddockFlags mcomment) (savedHaddockFlags vals)
$+$ Disp.text ""
$+$ installDirsSection "user" savedUserInstallDirs
$+$ Disp.text ""
$+$ installDirsSection "global" savedGlobalInstallDirs
$+$ Disp.text ""
$+$ configFlagsSection "program-locations" withProgramsFields
configProgramPaths
$+$ Disp.text ""
$+$ configFlagsSection "program-default-options" withProgramOptionsFields
configProgramArgs
where
mcomment = Just comment
installDirsSection name field =
ppSection "install-dirs" name installDirsFields
(fmap field mcomment) (field vals)
configFlagsSection name fields field =
ppSection name "" fields
(fmap (field . savedConfigureFlags) mcomment)
((field . savedConfigureFlags) vals)
-- | Fields for the 'install-dirs' sections.
installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))]
installDirsFields = map viewAsFieldDescr installDirsOptions
-- | Fields for the 'haddock' section.
haddockFlagsFields :: [FieldDescr HaddockFlags]
haddockFlagsFields = [ field
| opt <- haddockOptions ParseArgs
, let field = viewAsFieldDescr opt
name = fieldName field
, name `notElem` exclusions ]
where
exclusions = ["verbose", "builddir"]
-- | Fields for the 'program-locations' section.
withProgramsFields :: [FieldDescr [(String, FilePath)]]
withProgramsFields =
map viewAsFieldDescr $
programConfigurationPaths' (++ "-location") defaultProgramConfiguration
ParseArgs id (++)
-- | Fields for the 'program-default-options' section.
withProgramOptionsFields :: [FieldDescr [(String, [String])]]
withProgramOptionsFields =
map viewAsFieldDescr $
programConfigurationOptions defaultProgramConfiguration ParseArgs id (++)
-- | Get the differences (as a pseudo code diff) between the user's
-- '~/.cabal/config' and the one that cabal would generate if it didn't exist.
userConfigDiff :: GlobalFlags -> IO [String]
userConfigDiff globalFlags = do
userConfig <- loadConfig normal (globalConfigFile globalFlags) mempty
testConfig <- liftM2 mappend baseSavedConfig initialSavedConfig
return $ reverse . foldl' createDiff [] . M.toList
$ M.unionWith combine
(M.fromList . map justFst $ filterShow testConfig)
(M.fromList . map justSnd $ filterShow userConfig)
where
justFst (a, b) = (a, (Just b, Nothing))
justSnd (a, b) = (a, (Nothing, Just b))
combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b)
combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b)
combine x y = error $ "Can't happen : userConfigDiff " ++ show x ++ " " ++ show y
createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String]
createDiff acc (key, (Just a, Just b))
| a == b = acc
| otherwise = ("+ " ++ key ++ ": " ++ b) : ("- " ++ key ++ ": " ++ a) : acc
createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc
createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc
createDiff acc (_, (Nothing, Nothing)) = acc
filterShow :: SavedConfig -> [(String, String)]
filterShow cfg = map keyValueSplit
. filter (\s -> not (null s) && any (== ':') s)
. map nonComment
. lines
$ showConfig cfg
nonComment [] = []
nonComment ('-':'-':_) = []
nonComment (x:xs) = x : nonComment xs
topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace
keyValueSplit s =
let (left, right) = break (== ':') s
in (topAndTail left, topAndTail (drop 1 right))
-- | Update the user's ~/.cabal/config' keeping the user's customizations.
userConfigUpdate :: Verbosity -> GlobalFlags -> IO ()
userConfigUpdate verbosity globalFlags = do
userConfig <- loadConfig normal (globalConfigFile globalFlags) mempty
newConfig <- liftM2 mappend baseSavedConfig initialSavedConfig
commentConf <- commentSavedConfig
cabalFile <- defaultConfigFile
let backup = cabalFile ++ ".backup"
notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "."
renameFile cabalFile backup
notice verbosity $ "Writing merged config to " ++ cabalFile ++ "."
writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig)
| plumlife/cabal | cabal-install/Distribution/Client/Config.hs | bsd-3-clause | 38,871 | 0 | 25 | 10,490 | 8,118 | 4,369 | 3,749 | 703 | 9 |
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-name-shadowing#-}
-- This code is written by Pedro Vasconcelos for his implementation of
-- Tzaar game: HsTzaar. I'm reproducing it here just to be able to
-- compare the performance of his implementation with my own.
--
-- Thanks to Pedro for providing his code, I've got some nice ideas
-- from it.
--
-- HsTzaar may downloaded from Hackage:
-- http://hackage.haskell.org/package/hstzaar
module AI.Algorithms.Tzaar
( alphabeta
, negascout
, negamax
, Gametree(..)
, Valuation
, Valued(..)
, valued
, ($+)
) where
class Gametree p where
children :: p -> [p] -- list of move, position
is_terminal :: p -> Bool
is_terminal = null . children -- default definition
-- | type for valuation functions
type Valuation a = a -> Int
-- | a pair of something with a strict integer valuation
-- supporting equality, ordering and limited arithmetic on valuation
data Valued a = Valued { value :: !Int, unvalued :: a } deriving Show
instance Functor Valued where
fmap f (Valued v x) = Valued v (f x)
-- | apply a valuation
valued :: Valuation a -> a -> Valued a
valued f x = Valued (f x) x
-- | modify the valuation
revalue :: (Int -> Int) -> Valued a -> Valued a
revalue f (Valued v x) = Valued (f v) x
instance Eq (Valued a) where
x == y = value x==value y
instance Ord (Valued a) where
compare x y = compare (value x) (value y)
-- some instances of numeric type class (only negate and fromInteger)
instance Num (Valued a) where
(+) = undefined
(-) = undefined
(*) = undefined
negate = revalue negate
fromInteger n = valued (const (fromIntegral n)) undefined
abs = undefined
signum = undefined
-- | add a constant to a value
infix 6 $+
($+) :: Int -> Valued a -> Valued a
k $+ x = revalue (+k) x
-- | Naive negamax algorithm (no prunning)
-- wrapper
negamax :: Gametree p => Valuation p -> Int -> p -> Valued p
negamax node_value depth p = negamax' depth p
where
-- worker
negamax' d p
| d==0 || is_terminal p = valued node_value p
| otherwise = negate $ minimum [negamax' d' p' | p'<-children p]
where d' = d-1
-- | Negamax with alpha-beta prunning
-- wrapper
alphabeta :: Gametree p => Valuation p -> Int -> p -> Valued p
alphabeta node_value depth p
= let a = fromIntegral (minBound+1 :: Int)
b = fromIntegral (maxBound :: Int)
in alpha_beta' depth a b p
where
-- worker
alpha_beta' d alfa beta p
| d==0 || is_terminal p = valued node_value p
| otherwise = cmx alfa (children p)
where
d' = d-1
cmx alfa [] = alfa
cmx alfa (p:ps)
| a'>=beta = a'
| otherwise = cmx (max a' alfa) ps
where a' = negate $ alpha_beta' d' (negate beta) (negate alfa) p
-- | Negascout search
-- wrapper
negascout :: Gametree p => Valuation p -> Int -> p -> Valued p
negascout node_value depth p
= let a = fromIntegral (minBound+1 :: Int)
b = fromIntegral (maxBound :: Int)
in negascout' node_value depth a b p
where
-- worker
negascout' node_value d alfa beta p
| d==0 || is_terminal p = valued node_value p
| d==1 = valued (negate . node_value) p0 -- short-circuit for depth 1
| b >= beta = b
| otherwise = scout (max alfa b) b ps
where
d' = d-1
ps = children p
p0 = unvalued $ minimum $ map (valued node_value) ps
-- p0 = estimate_best node_value ps -- child with best static score
b = negate $ negascout' node_value d' (negate beta) (negate alfa) p0 -- full search estimate
scout _ !b [] = b
scout !alfa !b (p:ps)
| s>=beta = s
| otherwise = scout alfa' b' ps
where s = negate $ negascout' node_value d' (negate (1$+alfa)) (negate alfa) p
s' | s>alfa = negate $
negascout' node_value d' (negate beta) (negate alfa) p
| otherwise = s
alfa' = max alfa s'
b' = max b s'
| sphynx/hamisado | AI/Algorithms/Tzaar.hs | bsd-3-clause | 4,094 | 0 | 16 | 1,167 | 1,303 | 667 | 636 | 84 | 2 |
{-# LANGUAGE ConstraintKinds, TypeFamilies #-}
{- |
App module defines types used by the Spock framework.
-}
module Guide.App
where
-- hvect
import Data.HVect
-- Spock
import Web.Spock
import Guide.Types.User (User)
import Guide.Types.Session (GuideData)
import Guide.ServerStuff (ServerState)
-- | Type of connection, currently unused. (Acid-State DB stored in 'ServerState')
type GuideConn = ()
-- | Type of user session payload.
type GuideSessionData = GuideData
-- | Type of server state, accessed with 'getState'.
type GuideState = ServerState
-- | The fully qualified type of a Spock application/route.
type GuideM ctx r = SpockCtxM ctx GuideConn GuideData ServerState r
-- | Type of a root application.
type GuideApp ctx = GuideM ctx ()
-- | Type of a Guide action with a generic context.
type GuideAction ctx r = ActionCtxT ctx (WebStateM GuideConn GuideData ServerState) r
data IsAdmin = IsAdmin
type AuthM ctx r = forall n xs. (ctx ~ HVect xs, ListContains n User xs) => GuideM ctx r
type AuthAction ctx r = forall n xs. (ctx ~ HVect xs, ListContains n User xs) => GuideAction ctx r
type AdminM ctx r = forall n xs. (ctx ~ HVect xs, ListContains n IsAdmin xs) => GuideM ctx r
type AdminAction ctx r = forall n xs. (ctx ~ HVect xs, ListContains n IsAdmin xs) => GuideAction ctx r
| aelve/hslibs | src/Guide/App.hs | bsd-3-clause | 1,314 | 0 | 9 | 244 | 315 | 186 | 129 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module ServerSpec
(spec) where
import Test.Hspec
import Server
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "reqUri" $ do
it "is Nothing for an invalid HTTP GET request" $ do
reqUri "FOO" `shouldBe` Nothing
it "is Nothing for an HTTP request that is not GET" $ do
reqUri "POST / HTTP/1.1" `shouldBe` Nothing
it "is Nothing for a non-1.1 HTTP request" $ do
reqUri "GET /foo HTTP/1.0" `shouldBe` Nothing
it "is Nothing for a GET request with extra spaces after the URI" $ do
reqUri "GET /bar HTTP/1.1" `shouldBe` Nothing
it "is the request URI for a valid GET request" $ do
reqUri "GET / HTTP/1.1" `shouldBe` Just "/"
it "captures several path elements" $ do
reqUri "GET /foo/bar/baz/ HTTP/1.1" `shouldBe` Just "/foo/bar/baz/"
it "captures query parameters" $ do
reqUri "GET /x?a=1&b=2 HTTP/1.1" `shouldBe` Just "/x?a=1&b=2"
describe "HTTP response" $ do
it "has correct zero content-length" $ do
response "200 OK" "" `shouldBe`
"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n"
it "has correct non-zero content-length" $ do
response "404 Not Found" "Eff off." `shouldBe`
"HTTP/1.1 404 Not Found\r\nContent-Length: 8\r\n\r\nEff off."
| tripped/hlog | test/ServerSpec.hs | bsd-3-clause | 1,412 | 0 | 14 | 417 | 277 | 130 | 147 | 31 | 1 |
module Scheme.Eval
( eval, readEvalPrint, readEvalPrint', readEvalLines, readEvalLines'
, primitiveEnv, primitives) where
import Control.Monad
import Data.Maybe
import Data.IORef
import Control.Monad.Error
import Scheme.Types
import System.IO
import Scheme.Parser
import Scheme.Eval.DynamicTypes
import Scheme.Eval.Primitives
type Continuation = LispVal
-- special primitives receive [LispVal] unevaluated,
-- also get full access to continuation and the environment
-- are responsible for calling eval if necessary
selfRefPrimitives :: [(String, Continuation -> Env -> [LispVal] -> ThrowsErrorIO LispVal)]
selfRefPrimitives =
[ ("inspect", \cont env -> maxArgs 0 >=> const lispNull)
, ("define", \_ env -> define env . List)
, ("quote", \_ _ -> oneArgOnly)
, ("if", \_ env -> onlyArgs 3 >=> \[maybecond,a,b] -> do
cond <- eval env maybecond >>= expect boolType
eval env $ if cond then a else b)
, ("eval", \cont env -> oneArgOnly >=> \form -> case form of
(List [Atom "quote",lisp]) -> eval env lisp
lisp -> eval env lisp)
, ("lambda", \_ env -> minArgs 2 >=> \(params:body) -> do
params' <- expect (atomType `orType` listType `orType` dottedListType) params
either
(\atom -> makeVarargs (Atom atom) env [] body)
(either (\ps -> makeNormalFunc env ps body)
(\(ps, varargs) -> makeVarargs varargs env ps body))
params')
, ("lambda-closure", \_ env -> oneArgOnly >=> eval env >=> expect funcType >=>
\(ps, vars, bod, Env readOnly env) -> do
e <- liftIO $ readIORef env
liftM List $ mapM (\(n,vIO) ->
liftIO (readIORef vIO) >>= \v -> return $ List [String n, v]) e)
, ("set!", \_ env -> onlyArgs 2 >=> \[a,b] -> do
varName <- expect atomType a
setVar env varName b
lispNull)
, ("load", \_ env -> oneArgOnly >=> expect (stringType `orType` portType)
>=> either (\fname -> safeLiftIO $ readFile fname >>= evalAll env fname)
(\handle -> safeLiftIO $ hGetContents handle >>= evalAll env (show handle)))
]
-- when redefining, try to free any overwritten ports
define env (List [Atom v, form]) = eval env form >>= defineVar env v >> lispNull
define env (List (List (Atom var : params) : body)) =
makeNormalFunc env params body >>= defineVar env var >> lispNull
define env (List (DottedList (Atom var : params) varargs : body)) =
makeVarargs varargs env params body >>= defineVar env var >> lispNull
define _ (List xs) = throwError $ TypeMismatch
"first to be an atom or a dotted list or a normal list" (show xs)
evalPrimitives =
[ ("apply-safe", minArgs 1 >=> \(func:rest) -> apply func $ if length rest == 1 && isList (head rest)
then fromJust $ getList (head rest) else rest)
, ("read", maxArgs 1 >=> \l -> (case l of
[] -> return stdin
[p] -> expect portType p)
>>= \port ->
(safeLiftIO . hGetLine >=> liftIO . readExpr ("read from: " ++ show port)) port)
, ("read-all", maxArgs 1 >=> \l -> (case l of
[] -> return stdin
[p] -> expect portType p)
>>= \port ->
(safeLiftIO . hGetContents >=> liftIO . readAllExpr ("read-all from: " ++ show port)) port)
, ("read-string", oneArgOnly >=> expect stringType >=> readExpr "<string>")
, ("read-string-all", oneArgOnly >=> expect stringType >=> readAllExpr "<string>")
]
readOnlyPrimitives = toPrimitiveFuncs selfRefPrimitives
toPrimitiveFuncs = map (\(name, _) -> (name, PrimitiveFunc name))
primitives = evalPrimitives ++ basicPrimitives ++ ioPrimitives
primitiveEnv = nullEnv readOnlyPrimitives >>= flip bindVars (toPrimitiveFuncs primitives)
bindVars :: Env -> [(String, LispVal)] -> IO Env
bindVars (Env readOnlys env) vars = do
e <- readIORef env
newE <- liftM (++e) (mapM (\(n,v) -> newIORef v >>= \vRef -> return (n, vRef)) vars)
liftM (Env readOnlys) $ newIORef newE
makeFunc vararg env params body = do
expect (listOf atomType) (List params)
maybe (return ()) (void . expect atomType) vararg
return $ Func (map show params) (fmap show vararg) body env
makeNormalFunc :: Env -> [LispVal] -> [LispVal] -> ThrowsErrorIO LispVal
makeNormalFunc = makeFunc Nothing
makeVarargs :: LispVal -> Env -> [LispVal] -> [LispVal] -> ThrowsErrorIO LispVal
makeVarargs = makeFunc . Just
evalExpr env fname s = liftIO $
runErrorT (readLisp fname s >>= eval env) >>=
return . either errToLisp id
evalAll env fname s = liftIO $
runErrorT (readLisps fname s >>= mapM (eval env)) >>=
return . either errToLisp List
readExpr fname s = liftIO $
runErrorT (readLisp fname s) >>=
return . either errToLisp id
readAllExpr fname s = liftIO $
runErrorT (readLisps fname s) >>=
return . either errToLisp List
readEvalLines output fname s = liftIO $ do
env <- primitiveEnv
parseResult <- runErrorT $ readLisps fname s
either print
(mapM_ (showIOThrows . eval env >=> (when output . putStrLn)))
parseResult
readEvalLines' output env fname s = liftIO $ do
parseResult <- runErrorT $ readLisps fname s
either print
(mapM_ (showIOThrows . eval env >=> (when output . putStrLn)))
parseResult
readEvalPrint fname lisp = liftIO $ primitiveEnv >>= \env -> readEvalPrint' fname env lisp
readEvalPrint' fname env = showIOThrows . (readLisp fname >=> eval env) >=> putFlushLn
eval :: Env -> LispVal -> ThrowsErrorIO LispVal
eval _ v@(Bool _) = return v
eval _ v@(Num _) = return v
eval _ v@(Character _) = return v
eval _ v@(String _) = return v
eval env v@(Atom a) = getVar env a
eval env (List (func:args)) = do
f <- eval env func
eitherName <- expect (atomType `orType` primType `orType` funcType) f
let name' = liftM (either id id) (get (atomType `orType` primType) f)
maybe
(mapM (eval env) args >>= apply f)
(\evalFunc -> evalFunc undefined env args)
(name' >>= \name -> lookup name selfRefPrimitives)
eval _ badExpr = throwError $ BadExpr "unrecognized form" badExpr
apply p@(PrimitiveFunc f) args =
maybe (throwError $ BadExpr "unrecognized primitive" p) ($args) $
lookup f primitives
apply (Func params varargs body closure) args =
if length params /= length args && isNothing varargs
then throwError $ NumArgs (length params) args
else liftIO (bindVars closure $ zip params args)
>>= bindVarArgs varargs >>= \env -> liftM last (mapM (eval env) body)
where
remainingArgs = drop (length params) args
bindVarArgs arg env = maybe (return env) (\argName -> liftIO $ bindVars env [(argName, List remainingArgs)]) arg
apply f _ = throwError . Default $ "cannot apply " ++ show f ++ " as function"
putFlushLn msg = liftIO $ putStrLn msg >> hFlush stdout
| hucal/SCMinHS | Scheme/Eval.hs | bsd-3-clause | 7,024 | 0 | 21 | 1,783 | 2,621 | 1,339 | 1,282 | -1 | -1 |
module Test.Juggernaut.Api where
import Juggernaut.Api
main :: IO ()
main = putStrLn "hello"
| markhibberd/juggernaut | tests/Test/Juggernaut/Api.hs | bsd-3-clause | 95 | 0 | 6 | 15 | 30 | 17 | 13 | 4 | 1 |
-- Both these functions should successfully simplify
-- using the combine-identical-alternatives optimisation
module T7360 where
import Data.OldList as L
data Foo = Foo1 | Foo2 | Foo3 !Int
fun1 :: Foo -> ()
{-# NOINLINE fun1 #-}
fun1 x = case x of
Foo1 -> ()
Foo2 -> ()
Foo3 {} -> ()
fun2 x = (fun1 Foo1, -- Keep -ddump-simpl output
-- in a predicatable order
case x of
[] -> L.length x
(_:_) -> L.length x)
| jstolarek/ghc | testsuite/tests/simplCore/should_compile/T7360.hs | bsd-3-clause | 515 | 0 | 10 | 185 | 135 | 74 | 61 | 15 | 3 |
module Data.TTask.Command.Update
( updateTaskStatus
, updateStoryStatus
, updateSprintStatus
) where
import Control.Lens
import Data.TTask.Types
------
-- Update status
updateTaskStatus :: Id -> TStatusRecord -> Project -> Project
updateTaskStatus i r pj
= pj&task i%~ (\t -> t { _taskStatus = r `TStatusCons` _taskStatus t })
updateStoryStatus :: Id -> TStatusRecord -> Project -> Project
updateStoryStatus i r pj
= pj&story i%~ (\s -> s { _storyStatus = r `TStatusCons` _storyStatus s })
updateSprintStatus :: Id -> TStatusRecord -> Project -> Project
updateSprintStatus i r pj
= pj&sprint i%~ (\s -> s { _sprintStatus = r `TStatusCons` _sprintStatus s })
| tokiwoousaka/ttask | src/Data/TTask/Command/Update.hs | bsd-3-clause | 686 | 0 | 11 | 127 | 227 | 127 | 100 | 15 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Hadron.Controller
-- Copyright : Soostone Inc
-- License : BSD3
--
-- Maintainer : Ozgun Ataman
-- Stability : experimental
--
-- High level flow-control of Hadoop programs with ability to define a
-- sequence of Map-Reduce operations in a Monad, have strongly typed
-- data locations.
----------------------------------------------------------------------------
module Hadron.Controller
(
-- * Hadoop Program Construction
Controller
, connect
, connect'
, io
, orchIO
, nodeIO
, setVal
, getVal
, runOnce
, MapReduce (..)
, mrOptions
, Mapper
, Reducer
, (>.>)
, (<.<)
, MRKey (..)
, CompositeKey
, SingleKey (..)
, WrapSerialize (..)
, WrapSafeCopy (..)
-- * Data Sources
, Tap (..)
, tapProto, tapLocation
, tap
, taps
, mergeTaps
, concatTaps
, binaryDirTap
, setupBinaryDir
, fileListTap
, fanOutTap, sinkFanOut, sequentialSinkFanout
, readTap
, readHdfsFile
-- * Command Line Entry Point
, hadoopMain
, HadoopEnv (..)
-- * Settings for MapReduce Jobs
, MROptions
, mroPart
, mroNumMap
, mroNumReduce
, mroCompress
, mroOutSep
, mroTaskTimeout
, PartitionStrategy (..)
, Comparator (..)
, RerunStrategy (..)
-- * Hadoop Utilities
, emitCounter
, hsEmitCounter
, emitStatus
, getFileName
-- * MapReduce Combinators
, mapReduce
, firstBy
, mapMR
, oneSnap
, joinMR
, joinStep
, JoinType (..)
, JoinKey
-- * Data Serialization Utilities
, module Hadron.Protocol
, module Hadron.Run
) where
-------------------------------------------------------------------------------
import Control.Applicative
import Control.Arrow
import Control.Concurrent.Async
import Control.Concurrent.Chan
import Control.Concurrent.QSem
import Control.Error
import Control.Exception.Lens
import Control.Lens
import Control.Monad.Catch
import Control.Monad.Operational hiding (view)
import qualified Control.Monad.Operational as O
import Control.Monad.State
import Control.Monad.Trans.Resource
import Control.Retry
import qualified Crypto.Hash.MD5 as Crypto
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Char8 as B
import Data.ByteString.Search as B
import Data.Char
import Data.Conduit hiding (connect)
import Data.Conduit.Binary (sinkHandle, sourceHandle)
import qualified Data.Conduit.List as C
import Data.Conduit.Zlib
import Data.CSV.Conduit
import Data.Default
import Data.List
import qualified Data.Map.Strict as M
import Data.Monoid
import Data.SafeCopy
import Data.Serialize
import Data.String
import Data.String.Conv
import qualified Data.Text as T
import Data.Text.Encoding
import Data.Time
import Data.Typeable
import Network.HostName
import System.Environment
import System.FilePath.Lens
import System.IO
import System.Locale
import Text.Parsec
-------------------------------------------------------------------------------
import Hadron.Basic hiding (mapReduce)
import Hadron.Conduit
import Hadron.Join
import Hadron.Logger
import Hadron.Protocol
import Hadron.Run
import Hadron.Run.Hadoop (mrsInput, mrsJobName,
mrsNumReduce, mrsOutput)
import Hadron.Types
import Hadron.Utils
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
echo :: (Applicative m, MonadIO m, LogItem a) => Severity -> a -> LogStr -> m ()
echo sev cxt msg = runLog $ logF cxt "Run.Hadoop" sev msg
-------------------------------------------------------------------------------
echoInfo :: (Applicative m, MonadIO m, LogItem a) => a -> LogStr -> m ()
echoInfo = echo InfoS
newtype SingleKey a = SingleKey { unKey :: a }
deriving (Eq,Show,Read,Ord,Serialize)
newtype WrapSerialize a = WrapSerialize { _getSerialized :: a }
deriving (Eq,Show,Read,Ord,Serialize)
newtype WrapSafeCopy a = WrapSafeCopy { _getSafeCopy :: a }
deriving (Eq,Show,Read,Ord)
deriveSafeCopy 1 'base ''WrapSafeCopy
type Parser = Parsec [B.ByteString] ()
keyToken :: Parser B.ByteString
keyToken = tokenPrim B.unpack (\pos _ _ -> incSourceColumn pos 1) Just
fromCompKey :: MRKey a => [B.ByteString] -> Either ParseError a
fromCompKey s = runParser keyParser () "Key Input" s
class MRKey k where
toCompKey :: k -> CompositeKey
keyParser :: Parser k
numKeys :: k -> Int
instance MRKey () where
toCompKey _ = [""]
keyParser = keyToken >> return ()
numKeys _ = 1
instance MRKey B.ByteString where
toCompKey k = [k]
keyParser = keyToken
numKeys _ = 1
instance MRKey CompositeKey where
toCompKey ks = ks
keyParser = many1 keyToken
numKeys ks = length ks
instance MRKey String where
toCompKey = toCompKey . B.pack
keyParser = B.unpack <$> keyToken
numKeys _ = 1
instance MRKey T.Text where
toCompKey = toCompKey . encodeUtf8
keyParser = decodeUtf8 <$> keyToken
numKeys _ = 1
instance MRKey Int where
toCompKey = toCompKey . B.pack . show
keyParser = keyParser >>=
maybe (fail "Can't read Int MRKey") return . readMay
numKeys _ = 1
instance Serialize a => MRKey (WrapSerialize a) where
toCompKey = toCompKey . (^. re pSerialize) . _getSerialized
keyParser = do
a <- (^? pSerialize) <$> keyParser
maybe (fail "Can't decode WrapSerialize") (return . WrapSerialize) a
numKeys _ = 1
instance SafeCopy a => MRKey (WrapSafeCopy a) where
toCompKey = toCompKey . (^. re pSafeCopy) . _getSafeCopy
keyParser = do
a <- (^? pSafeCopy) <$> keyParser
maybe (fail "Can't decode WrapSerialize") (return . WrapSafeCopy) a
numKeys _ = 1
utcFormat :: String
utcFormat = "%Y-%m-%d %H:%M:%S.%q"
instance MRKey UTCTime where
toCompKey = toCompKey . formatTime defaultTimeLocale utcFormat
keyParser = do
res <- parseTime defaultTimeLocale utcFormat <$> keyParser
maybe (fail "Can't parse value as UTCTime") return res
numKeys _ = 1
instance (MRKey a, MRKey b) => MRKey (a,b) where
toCompKey (a,b) = toCompKey a ++ toCompKey b
keyParser = (,) <$> keyParser <*> keyParser
numKeys (a,b) = numKeys a + numKeys b
instance (MRKey a, MRKey b, MRKey c) => MRKey (a,b,c) where
toCompKey (a,b,c) = toCompKey a ++ toCompKey b ++ toCompKey c
keyParser = (,,) <$> keyParser <*> keyParser <*> keyParser
numKeys (a,b,c) = numKeys a + numKeys b + numKeys c
instance (MRKey a, MRKey b, MRKey c, MRKey d) => MRKey (a,b,c,d) where
toCompKey (a,b,c,d) = toCompKey a ++ toCompKey b ++ toCompKey c ++ toCompKey d
keyParser = (,,,) <$> keyParser <*> keyParser <*> keyParser <*> keyParser
numKeys (a,b,c,d) = numKeys a + numKeys b + numKeys c + numKeys d
-------------------------------------------------------------------------------
-- | Do something with m-r output before writing it to a tap.
(>.>) :: MapReduce a b -> Conduit b (ResourceT IO) c -> MapReduce a c
(MapReduce o p m c r) >.> f = MapReduce o p m c r'
where
r' = case r of
Left r'' -> Left $ r'' =$= f
Right conv -> Right $ conv =$= f
-------------------------------------------------------------------------------
-- | Do something with the m-r input before starting the map stage.
(<.<) :: Conduit c (ResourceT IO) a -> MapReduce a b -> MapReduce c b
f <.< (MapReduce o p m c r) = MapReduce o p (f =$= m) c r
-------------------------------------------------------------------------------
-- | A packaged MapReduce step. Make one of these for each distinct
-- map-reduce step in your overall 'Controller' flow.
data MapReduce a b = forall k v. MRKey k => MapReduce {
_mrOptions :: MROptions
-- ^ Hadoop and MapReduce options affecting only this specific
-- job.
, _mrInPrism :: Prism' B.ByteString v
-- ^ A serialization scheme for values between the map-reduce
-- steps.
, _mrMapper :: Mapper a k v
, _mrCombiner :: Maybe (Reducer k v (k,v))
, _mrReducer :: Either (Reducer k v b) (Conduit v (ResourceT IO) b)
-- ^ Either a reducer or a final value converter for a map-only
-- MapReduce job.
}
--------------------------------------------------------------------------------
mrOptions :: Lens' (MapReduce a b) MROptions
mrOptions f (MapReduce o p m c r) = (\ o' -> MapReduce o' p m c r) <$> f o
-- | Tap is a data source/sink definition that *knows* how to serve
-- records of type 'a'.
--
-- It comes with knowledge on how to decode ByteString to target type
-- and can be used both as a sink (to save data form MR output) or
-- source (to feed MR programs).
--
-- Usually, you just define the various data sources and destinations
-- your MapReduce program is going to need:
--
-- > customers = 'tap' "s3n://my-bucket/customers" (csvProtocol def)
data Tap a = Tap
{ _tapLocation :: [FilePath]
, _tapProto :: Protocol' a
}
makeLenses ''Tap
-- | If two 'location's are the same, we consider two Taps equal.
instance Eq (Tap a) where
a == b = _tapLocation a == _tapLocation b
-- | Construct a 'DataDef'
tap :: FilePath -> Protocol' a -> Tap a
tap fp p = Tap [fp] p
taps :: [FilePath] -> Protocol' a -> Tap a
taps fp p = Tap fp p
-------------------------------------------------------------------------------
-- | Does given file belong to tap?
belongsToTap :: Tap a -> FilePath -> Bool
belongsToTap t fn = any (`isInfixOf` fn) stem
where
stem = map (takeWhile (/= '*')) (t ^. tapLocation)
-------------------------------------------------------------------------------
concatTaps :: [Tap a] -> Tap a
concatTaps ts = Tap locs newP
where
locs = concatMap _tapLocation ts
newP = Protocol enc dec
dec = do
fn <- liftIO $ getFileName
case find (flip belongsToTap fn) ts of
Nothing -> error "Unexpected: Can't determine tap in concatTaps."
Just t -> t ^. (tapProto . protoDec)
enc = head ts ^. tapProto . protoEnc
-------------------------------------------------------------------------------
-- | Given a tap directory, enumerate and load all files inside.
-- Caution: This is meant only as a way to load small files, or else
-- you'll fill up your memory.
readTap :: RunContext -> Tap a -> IO [a]
readTap rc t = do
fs <- concat <$> forM (_tapLocation t) (hdfsLs rc)
let chk fp = not (elem (fp ^. filePath . filename) [".", ".."]) &&
(fp ^. fileSize) > 0
let fs' = filter chk fs
runResourceT $
inp (map _filePath fs')
=$= (t ^. tapProto . protoDec)
$$ C.consume
where
policy = capDelay 10000000 $
exponentialBackoff 50000 <> limitRetries 10
pullOne sem chan fp =
bracket_ (waitQSem sem) (signalQSem sem) $
recoverAll policy $ const $ do
a <- runResourceT $ hdfsCat rc fp $$ C.consume
writeChan chan (Just (B.concat a))
inp :: [FilePath] -> Producer (ResourceT IO) B.ByteString
inp fs = do
sem <- liftIO $ newQSem 10
chan <- liftIO newChan
a <- liftIO $ async $ do
mapConcurrently (pullOne sem chan) fs
writeChan chan Nothing
liftIO $ link a
sourceChan chan
-------------------------------------------------------------------------------
-- | Combine two taps intelligently into the Either sum type.
--
-- Matches on the prefix path given as part of each tap. It would
-- therefore fail to work properly on self joins where the same data
-- location is used in both taps.
mergeTaps :: Tap a -> Tap b -> Tap (Either a b)
mergeTaps ta tb = Tap (_tapLocation ta ++ _tapLocation tb) newP
where
newP = Protocol enc dec
dec = do
fn <- liftIO getFileName
if belongsToTap ta fn
then (ta ^. tapProto . protoDec) =$= C.map Left
else (tb ^. tapProto . protoDec) =$= C.map Right
as = ta ^. (tapProto . protoEnc)
bs = tb ^. (tapProto . protoEnc)
enc = awaitForever $ \ res ->
case res of
Left a -> yield a =$= as
Right b -> yield b =$= bs
------------------------------------------------------------------------------
-- | Conduit that takes in hdfs filenames and outputs the file
-- contents. Will unpack .gz files automatically.
readHdfsFile
:: RunContext
-> Conduit B.ByteString (ResourceT IO) (FilePath, B.ByteString)
readHdfsFile settings = awaitForever $ \s3Uri -> do
let uriStr = B.unpack s3Uri
getFile = hdfsLocalStream settings uriStr
outStream = if isSuffixOf "gz" uriStr
then getFile =$= ungzip
else getFile
outStream =$= C.map (\ s -> (uriStr, s))
------------------------------------------------------------------------------
-- | Tap for handling file lists. Hadoop can't process raw binary data
-- because it splits on newlines. This tap allows you to get around that
-- limitation by instead making your input a list of file paths that contain
-- binary data. Then the file names get split by hadoop and each map job
-- reads from those files as its first step.
fileListTap
:: RunContext
-> FilePath
-- ^ A file containing a list of files to be used as input
-> Tap (FilePath, B.ByteString)
fileListTap settings loc = tap loc (Protocol enc dec)
where
enc = error "You should never use a fileListTap as output!"
dec = linesConduit =$= readHdfsFile settings
-------------------------------------------------------------------------------
-- | Sink objects into multiple output files through concurrent
-- file-write processes behind the scenes. Work-around for Hadoop
-- Streaming limitations in having to sink output into a single
-- provided HDFS path.
fanOutTap
:: RunContext
-> FilePath
-- ^ Static location where fanout statistics will be written via
-- the regular hadoop output.
-> FilePath
-- ^ A temporary location where in-progress files can be kept.
-> (a -> FilePath)
-- ^ Decision dispatch of where each object should go. Make sure
-- to provide fully qualified hdfs directory paths; a unique token
-- will be appended to each file based on the node producing it.
-> Conduit a (ResourceT IO) B.ByteString
-- ^ How to serialize each object. Make sure this conduit provides
-- for all the typical requirements: One record per line, no
-- newlines inside the record, etc.
-> FanOutSink
-- ^ How to sink the fanout, exposed here for flexibility.
-> Tap a
fanOutTap rc loc tmp dispatch encoder sink = tap loc (Protocol enc dec)
where
dec = error "fanOutTap can't be used to read input."
enc = do
hn <- liftIO mkUniqueHostToken
let dispatch' a = dispatch a & basename %~ (<> "_" <> hn)
fo <- liftIO $ hdfsFanOut rc tmp
register $ liftIO $ fanCloseAll fo
sink dispatch' conv fo
stats <- liftIO $ fanStats fo
(forM_ (M.toList stats) $ \ (fp, cnt) -> yield (map B.pack [fp, (show cnt)]))
=$= fromCSV def
conv a = liftM mconcat $
C.sourceList [a] =$=
encoder $$
C.consume
-------------------------------------------------------------------------------
mkUniqueHostToken :: IO String
mkUniqueHostToken = do
tk <- randomToken 64
(toS . Base16.encode . toS . Crypto.hash . toS . (++ tk))
<$> getHostName
newtype AppLabel = AppLabel { unAppLabel :: T.Text }
deriving (Eq,Show,Read,Ord)
-------------------------------------------------------------------------------
mkAppLabel :: T.Text -> AppLabel
mkAppLabel txt
| all chk (toS txt) = AppLabel txt
| otherwise = error "Application labels can only be lowercase alphanumeric characters"
where
chk c = all ($ c) [isLower, isAlphaNum, not . isSpace]
instance IsString AppLabel where fromString = mkAppLabel . toS
data ContState = ContState {
_csApp :: AppLabel
, _csMRCount :: ! Int
-- ^ MR run count; one for each 'connect'.
, _csMRVars :: ! (M.Map String B.ByteString)
-- ^ Arbitrary key-val store that's communicated to nodes.
, _csDynId :: ! Int
-- ^ Keeps increasing count of dynamic taps in the order they are
-- created in the Controller monad. Needed so we can communicate
-- the tap locations to MR nodes.
, _csRunOnceId :: ! Int
-- ^ Increasing count of run-once cache items so we can
-- communicate to remote nodes.
, _csShortCircuit :: Bool
-- ^ Used by the remote nodes. When they hit their primary target
-- (the mapper, combiner or the reducer), they should stop
-- executing.
}
makeLenses ''ContState
instance Default ContState where
def = ContState (AppLabel "_") 0 M.empty 0 0 False
-------------------------------------------------------------------------------
-- | load controller varibles back up in worker nodes
loadState
:: (MonadState ContState m, MonadIO m)
=> RunContext
-> FilePath
-> m ()
loadState settings runToken = do
fn <- hdfsTempFilePath settings runToken
tmp <- liftIO $ hdfsGet settings fn
(app, st) <- liftIO $ withLocalFile settings tmp $ \ local -> do
!st <- readFile local <&> read
-- removeFile local
return st
csMRVars %= M.union st
csApp .= app
-------------------------------------------------------------------------------
-- | Write state from orchestrator for later load by worker nodes
writeState
:: (MonadIO m, MonadState ContState m)
=> RunContext
-> FilePath
-> m ()
writeState settings runToken = do
remote <- hdfsTempFilePath settings runToken
let local = LocalFile runToken
st <- use csMRVars
app <- use csApp
withLocalFile settings local $ \ lfp ->
liftIO $ writeFile lfp (show (app, st))
-- put settings file into a file named after the
-- randomly generated token.
liftIO $ hdfsPut settings local remote
-------------------------------------------------------------------------------
data ConI a where
Connect :: forall i o. MapReduce i o
-> [Tap i] -> Tap o
-> Maybe String
-> ConI ()
MakeTap :: Protocol' a -> ConI (Tap a)
BinaryDirTap
:: FilePath
-> (FilePath -> Bool)
-> ConI (Tap (FilePath, B.ByteString))
ConIO :: IO a -> ConI a
-- ^ General IO action; both orchestrator and nodes perform the action
OrchIO :: IO a -> ConI ()
-- ^ Only the orchestrator performs action
NodeIO :: IO a -> ConI a
-- ^ Only the nodes perform action
SetVal :: String -> B.ByteString -> ConI ()
GetVal :: String -> ConI (Maybe B.ByteString)
RunOnce :: Serialize a => IO a -> ConI a
-- ^ Only run on orchestrator, then make available to all the
-- nodes via HDFS.
-- | All MapReduce steps are integrated in the 'Controller' monad.
--
-- Warning: We do have an 'io' combinator as an escape valve for you
-- to use. However, you need to be careful how you use the result of
-- an IO computation. Remember that the same 'main' function will run
-- on both the main orchestrator process and on each and every
-- map/reduce node.
newtype Controller a = Controller (Program ConI a)
deriving (Functor, Applicative, Monad)
-------------------------------------------------------------------------------
-- | Connect a MapReduce program to a set of inputs, returning the
-- output tap that was implicity generated (on hdfs) in the process.
connect'
:: MapReduce a b
-- ^ MapReduce step to run
-> [Tap a]
-- ^ Input files
-> Protocol' b
-- ^ Serialization protocol to be used on the output
-> Maybe String
-- ^ A custom name for the job
-> Controller (Tap b)
connect' mr inp p nm = do
out <- makeTap p
connect mr inp out nm
return out
-------------------------------------------------------------------------------
-- | Connect a typed MapReduce program you supply with a list of
-- sources and a destination.
connect :: MapReduce a b -> [Tap a] -> Tap b -> Maybe String -> Controller ()
connect mr inp outp nm = Controller $ singleton $ Connect mr inp outp nm
-------------------------------------------------------------------------------
makeTap :: Protocol' a -> Controller (Tap a)
makeTap p = Controller $ singleton $ MakeTap p
-------------------------------------------------------------------------------
-- | Set a persistent variable in Controller state. This variable will
-- be set during main M-R job controller loop and communicated to all
-- the map and reduce nodes and will be available there.
setVal :: String -> B.ByteString -> Controller ()
setVal k v = Controller $ singleton $ SetVal k v
-------------------------------------------------------------------------------
-- | Get varible from Controller state
getVal :: String -> Controller (Maybe B.ByteString)
getVal k = Controller $ singleton $ GetVal k
-------------------------------------------------------------------------------
-- | Creates a tap for a directory of binary files.
binaryDirTap
:: FilePath
-- ^ A root location to list files under
-> (FilePath -> Bool)
-- ^ A filter condition to refine the listing
-> Controller (Tap (FilePath, B.ByteString))
binaryDirTap loc filt = Controller $ singleton $ BinaryDirTap loc filt
-------------------------------------------------------------------------------
-- | Perform an IO operation both on the orchestrator and on the worker nodes.
io :: IO a -> Controller a
io f = Controller $ singleton $ ConIO f
-------------------------------------------------------------------------------
-- | Perform an IO operation only on the orchestrator
orchIO :: IO a -> Controller ()
orchIO = Controller . singleton . OrchIO
-- | Perform an IO action in orchestrator to obtain value, then cache it on HDFS and
-- magically make it available to nodes during their runtime.
runOnce :: Serialize a => IO a -> Controller a
runOnce = Controller . singleton . RunOnce
-------------------------------------------------------------------------------
-- | Perform an IO operation only on the worker nodes.
nodeIO :: IO a -> Controller a
nodeIO = Controller . singleton . NodeIO
-------------------------------------------------------------------------------
newMRKey :: MonadState ContState m => m String
newMRKey = do
i <- gets _csMRCount
csMRCount %= (+1)
return $! show i
-------------------------------------------------------------------------------
-- | Grab list of files in destination, write into a file, put file on
-- HDFS so it is shared and return the (local, hdfs) paths.
setupBinaryDir
:: RunContext
-> FilePath
-> (FilePath -> Bool)
-> IO (LocalFile, FilePath)
setupBinaryDir settings loc chk = do
localFile <- randomLocalFile
hdfsFile <- randomRemoteFile settings
files <- hdfsLs settings loc <&> map _filePath
let files' = filter chk files
withLocalFile settings localFile $ \ f -> writeFile f (unlines files')
hdfsPut settings localFile hdfsFile
return (localFile, hdfsFile)
tapLens :: Int -> Lens' ContState (Maybe B.ByteString)
tapLens curId = csMRVars.at ("tap_" <> show curId)
runCacheLens :: Int -> Lens' ContState (Maybe B.ByteString)
runCacheLens curId = csMRVars.at ("runOnce_" <> show curId)
pickTapId :: MonadState ContState m => m Int
pickTapId = pickIdWith csDynId
pickRunCacheId :: MonadState ContState m => m Int
pickRunCacheId = pickIdWith csRunOnceId
-------------------------------------------------------------------------------
-- | Monotinically increasing counter.
pickIdWith :: MonadState ContState m => Lens' ContState Int -> m Int
pickIdWith l = do
curId <- use l
l %= (+1)
return curId
-------------------------------------------------------------------------------
-- | Interpreter for the central job control process
orchestrate
:: (MonadMask m, MonadIO m, Applicative m)
=> Controller a
-> RunContext
-> RerunStrategy
-> ContState
-> m (Either String a)
orchestrate (Controller p) settings rr s = do
bracket
(liftIO $ openFile "hadron.log" AppendMode)
(liftIO . hClose)
(\ h -> do echoInfo () "Initiating orchestration..."
evalStateT (runEitherT (go p)) s)
where
go = eval . O.view
eval (Return a) = return a
eval (i :>>= f) = eval' i >>= go . f
eval' :: (MonadIO m) => ConI a -> EitherT String (StateT ContState m) a
eval' (ConIO f) = liftIO f
eval' (OrchIO f) = void $ liftIO f
eval' (NodeIO _) = return (error "NodeIO can't be used in the orchestrator decision path")
-- evaluate the function, write its result into HDFS for later retrieval
eval' (RunOnce f) = do
a <- liftIO f
curId <- pickRunCacheId
runCacheLens curId .= Just (encode a)
-- loc <- liftIO $ randomRemoteFile settings
-- curId <- pickRunCacheId
-- runCacheLens curId .= Just (B.pack loc)
-- tmp <- randomFileName
-- liftIO $ withLocalFile settings tmp $ \ fn ->
-- B.writeFile fn (encode a)
-- liftIO $ hdfsPut settings tmp loc
return a
eval' (MakeTap tp) = do
loc <- liftIO $ randomRemoteFile settings
curId <- pickTapId
tapLens curId .= Just (B.pack loc)
return $ Tap [loc] tp
eval' (BinaryDirTap loc filt) = do
(_, hdfsFile) <- liftIO $ setupBinaryDir settings loc filt
-- remember location of the file from the original loc
-- string
curId <- pickTapId
tapLens curId .= Just (B.pack hdfsFile)
return $ fileListTap settings hdfsFile
eval' (SetVal k v) = csMRVars . at k .= Just v
eval' (GetVal k) = use (csMRVars . at k)
eval' (Connect (MapReduce mro _ _ _ rd) inp outp nm) = go'
where
go' = do
mrKey <- newMRKey
let info = sl "Key" mrKey <> sl "Name" nm
echoInfo info "Launching MR job"
chk <- liftIO $ mapM (hdfsFileExists settings) (_tapLocation outp)
case any id chk of
False -> do
echoInfo info "Destination file does not exist. Proceeding."
go'' mrKey
True ->
case rr of
RSFail -> echo ErrorS info $ ls $
"Destination exists: " <> head (_tapLocation outp)
RSSkip -> echoInfo info $
"Destination exists. Skipping " <>
ls (intercalate ", " (_tapLocation outp))
RSReRun -> do
echoInfo info $ ls $
"Destination file exists, will delete and rerun: " <>
head (_tapLocation outp)
_ <- liftIO $ mapM_ (hdfsDeletePath settings) (_tapLocation outp)
go'' mrKey
echoInfo info "MR job complete"
go'' mrKey = do
-- serialize current state to HDFS, to be read by
-- individual mappers reducers of this step.
runToken <- liftIO $ randomToken 64
writeState settings runToken
let mrs = mrOptsToRunOpts mro
launchMapReduce settings mrKey runToken $ mrs
& mrsInput .~ concatMap _tapLocation inp
& mrsOutput .~ head (_tapLocation outp)
& mrsJobName .~ nm
& (if onlyMap then mrsNumReduce .~ Just 0 else id)
onlyMap = case rd of
Left{} -> False
Right{} -> True
data Phase = Map | Combine | Reduce
-------------------------------------------------------------------------------
-- | What to do when we notice that a destination file already exists.
data RerunStrategy
= RSFail
-- ^ Fail and log the problem.
| RSReRun
-- ^ Delete the file and rerun the analysis
| RSSkip
-- ^ Consider the analaysis already done and skip.
deriving (Eq,Show,Read,Ord)
instance Default RerunStrategy where
def = RSFail
-------------------------------------------------------------------------------
-- | Decode key produced by the Map stage. Errors are simply raised as
-- key marshalling errors are unacceptable.
decodeKey :: MRKey k => (CompositeKey, v) -> (k, v)
decodeKey (k,v) = (k', v)
where
k' = either mkErr id $ fromCompKey k
mkErr e = error ("Stage could not decode Map's output: " ++ show e)
encodeKey :: MRKey k => (k, v) -> (CompositeKey, v)
encodeKey = first toCompKey
data NodeError
= NodeRunComplete
-- ^ Single short circuiting in node workers; map/reduce/combine
-- has been completed.
deriving (Eq,Show,Read,Ord,Typeable)
makePrisms ''NodeError
instance Exception NodeError
class AsNodeError t where
_NodeError :: Prism' t NodeError
instance AsNodeError NodeError where _NodeError = id
instance AsNodeError SomeException where _NodeError = exception
-------------------------------------------------------------------------------
-- | The main entry point. Use this function to produce a command line
-- program that encapsulates everything.
--
-- When run without arguments, the program will orchestrate the entire
-- MapReduce job flow. The same program also doubles as the actual
-- mapper/reducer executable when called with right arguments, though
-- you don't have to worry about that.
hadoopMain
:: ( MonadThrow m, MonadMask m
, MonadIO m, Functor m, Applicative m )
=> [(AppLabel, Controller ())]
-- ^ Hadoop streaming applications that can be run. First element
-- of tuple is used to lookup the right application to run from
-- the command line.
-> RunContext
-- ^ Hadoop environment info.
-> RerunStrategy
-- ^ What to do if destination files already exist.
-> m ()
hadoopMain conts settings rr = do
args <- liftIO getArgs
case args of
[nm] -> do
let nm' = mkAppLabel (toS nm)
case lookup nm' conts of
Nothing -> error (show nm <> " is not a known MapReduce application")
Just cont -> do
res <- orchestrate cont settings rr (def { _csApp = nm' })
echoInfo () ("Completed MR application " <> ls nm)
[runToken, arg] -> workNode settings conts runToken arg
_ -> error "You must provide the name of the MR application to initiate orchestration."
-------------------------------------------------------------------------------
mkArgs :: IsString [a] => [a] -> [(Phase, [a])]
mkArgs mrKey = [ (Map, "mapper_" ++ mrKey)
, (Reduce, "reducer_" ++ mrKey)
, (Combine, "combiner_" ++ mrKey) ]
-------------------------------------------------------------------------------
-- | Interpret the Controller in the context of a Hadoop worker node.
-- In this mode, the objective is to find the mapper, combiner or the
-- reducer that we are supposed to be executing as.
workNode
:: forall m a. (MonadIO m, MonadThrow m, MonadMask m, Functor m)
=> RunContext
-> [(AppLabel, Controller ())]
-> String
-> String
-> m ()
workNode settings conts runToken arg = do
handling (exception._NodeRunComplete) (const $ return ()) $ do
void $ flip evalStateT def $ do
loadState settings runToken
l <- use csApp
case lookup l conts of
Nothing -> error ("App not found in worker node: " <> show l)
Just (Controller p) -> interpretWithMonad go' p
where
-- A short-circuiting wrapper for go. We hijack the exception
-- system to implement shortcircuting here. It may be a better
-- idea to use ContT.
go' :: ConI b -> StateT ContState m b
go' c = do
chk <- use csShortCircuit
case chk of
True -> throwM NodeRunComplete
False -> go c
go :: ConI b -> StateT ContState m b
go (ConIO f) = liftIO f
go (OrchIO _) = return ()
go (NodeIO f) = liftIO f
go (MakeTap lp) = do
curId <- pickTapId
dynLoc <- use $ tapLens curId
case dynLoc of
Nothing -> error $
"Dynamic location can't be determined for MakTap at index " <>
show curId
Just loc' -> return $ Tap ([B.unpack loc']) lp
go (BinaryDirTap loc _) = do
-- remember location of the file from the original loc
-- string
curId <- pickTapId
dynLoc <- use $ tapLens curId
case dynLoc of
Nothing -> error $
"Dynamic location can't be determined for BinaryDirTap at: " <> loc
Just loc' -> return $ fileListTap settings $ B.unpack loc'
-- setting in map-reduce phase is a no-op... There's nobody to
-- communicate it to.
go (SetVal _ _) = return ()
go (GetVal k) = use (csMRVars . at k)
go (RunOnce _) = do
curId <- pickRunCacheId
bs <- use (runCacheLens curId)
either error return $
note "RunOnce cache missing on remote node" bs >>= decode
go (Connect (MapReduce mro mrInPrism mp comb rd) inp outp nm) = do
mrKey <- newMRKey
let dec = do
fn <- getFileName
let t = find (flip belongsToTap fn) inp
return $ case t of
Nothing -> head inp ^. tapProto . protoDec
Just t' -> t' ^. tapProto . protoDec
let enc = outp ^. tapProto . protoEnc
mp' = case rd of
Left _ -> mapRegular
Right conv -> do
setLineBuffering
dec' <- liftIO $ dec
runResourceT $ sourceHandle stdin
=$= dec'
=$= mp
=$= C.map snd
=$= conv
=$= enc
$$ sinkHandle stdout
mapRegular = do
dec' <- liftIO dec
mapperWith mrInPrism (dec' =$= mp =$= C.map encodeKey)
red = case rd of
Right _ -> error "Unexpected: Reducer called for a map-only job."
Left f -> do
setLineBuffering
runResourceT $
reducer mro mrInPrism (C.map decodeKey =$= f)
=$= enc
$$ sinkHandle stdout
comb' = case comb of
Nothing -> error "Unexpected: No combiner supplied."
Just c -> combiner mro mrInPrism (C.map decodeKey =$= c =$= C.map encodeKey)
-- error message maker for caught exceptions
mkErr :: Maybe FilePath -> String -> SomeException -> b
mkErr file stage e = error $
"Exception raised during " <> stage <>
" in MR Job #" <> mrKey <>
maybe "" (\nm' -> " (" <> nm' <> ") ") nm <>
maybe "" (" while processing file " <>) file <>
": " <> show e
case find ((== arg) . snd) $ mkArgs mrKey of
Just (Map, _) -> do
liftIO $ do
curFile <- getFileName
catching exception mp' (mkErr (Just curFile) "mapper")
csShortCircuit .= True
Just (Reduce, _) -> do
liftIO $ catching exception red (mkErr Nothing "reducer")
csShortCircuit .= True
Just (Combine, _) -> do
liftIO $ catching exception comb' (mkErr Nothing "combiner")
csShortCircuit .= True
Nothing -> return ()
-- -- | TODO: See if this works. Objective is to increase type safety of
-- -- join inputs. Notice how we have an existential on a.
-- --
-- -- A join definition that ultimately produces objects of type b.
-- data JoinDef b = forall a. JoinDef {
-- joinTap :: Tap a
-- , joinType :: JoinType
-- , joinMap :: Conduit a IO (JoinKey, b)
-- }
-------------------------------------------------------------------------------
-- | A convenient way to express map-sde multi-way join operations
-- into a single data type. All you need to supply is the map
-- operation for each tap, the reduce step is assumed to be the
-- Monoidal 'mconcat'.
--
-- 'joinMR' is probably easier to use if you can get by with an inner
-- join.
joinStep
:: forall k b a.
(Show b, Monoid b, Serialize b,
MRKey k)
=> [(Tap a, JoinType, Mapper a k b)]
-- ^ Dataset definitions and how to map each dataset.
-> MapReduce a b
joinStep fs = MapReduce mro pSerialize mp Nothing (Left rd)
where
showBS = B.pack . show
n = numKeys (undefined :: k)
mro = joinOpts { _mroPart = Partition (n+1) n }
locations :: [FilePath]
locations = concatMap (view (_1 . tapLocation)) fs
taps' :: [Tap a]
taps' = concatMap ((\t -> replicate (length (_tapLocation t)) t) . view _1) fs
locations' = map B.pack locations
dataSets :: [(FilePath, DataSet)]
dataSets = map (\ (loc, i) -> (loc, DataSet (showBS i))) $
zip locations ([0..] :: [Int])
dsIx :: M.Map FilePath DataSet
dsIx = M.fromList dataSets
tapIx :: M.Map DataSet (Tap a)
tapIx = M.fromList $ zip (map snd dataSets) taps'
getTapDS :: Tap a -> [DataSet]
getTapDS t = mapMaybe (flip M.lookup dsIx) (_tapLocation t)
fs' :: [(DataSet, JoinType)]
fs' = concatMap (\ (t, jt, _) -> for (getTapDS t) $ \ ds -> (ds, jt) ) fs
for = flip map
-- | get dataset name from a given input filename
getDS nm = fromMaybe (error "Can't identify current tap from filename.") $ do
let nm' = B.pack nm
curLoc <- find (\l -> length (B.indices l nm') > 0) locations'
M.lookup (B.unpack curLoc) dsIx
-- | get the conduit for given dataset name
mkMap' ds = fromMaybe (error "Can't identify current tap in IX.") $ do
t <- M.lookup ds tapIx
cond <- find ((== t) . view _1) fs
return $ (cond ^. _3) =$= C.map (_1 %~ toCompKey)
mp = joinMapper getDS mkMap'
rd = joinReducer fs'
mapReduce = undefined
-- -------------------------------------------------------------------------------
-- -- | A generic map-reduce function that should be good enough for most
-- -- cases.
-- mapReduce
-- :: forall a k v b. (MRKey k, Serialize v)
-- => (a -> MaybeT IO [(k, v)])
-- -- ^ Common map key
-- -> (k -> b -> v -> IO b)
-- -- ^ Left fold in reduce stage
-- -> b
-- -- ^ A starting point for fold
-- -> MapReduce a (k,b)
-- mapReduce mp rd a0 = MapReduce mro pSerialize m Nothing r
-- where
-- n = numKeys (undefined :: k)
-- mro = def { _mroPart = Partition n n }
-- m :: Mapper a k v
-- m = awaitForever $ \ a -> runMaybeT $ hoist (lift . lift) (mp a) >>= lift . C.sourceList
-- r :: Reducer k v (k,b)
-- r = do
-- (k, b) <- C.foldM step (Nothing, a0)
-- case k of
-- Nothing -> return ()
-- Just k' -> yield (k', b)
-- step (_, acc) (k, v) = do
-- !b <- liftIO $ rd k acc v
-- return (Just k, b)
firstBy = undefined
-- -------------------------------------------------------------------------------
-- -- | Deduplicate input objects that have the same key value; the first
-- -- object seen for each key will be kept.
-- firstBy
-- :: forall a k. (Serialize a, MRKey k)
-- => (a -> MaybeT IO [k])
-- -- ^ Key making function
-- -> MapReduce a a
-- firstBy f = mapReduce mp rd Nothing >.> (C.map snd =$= C.catMaybes)
-- where
-- mp :: a -> MaybeT IO [(k, a)]
-- mp a = do
-- k <- f a
-- return $ zip k (repeat a)
-- rd :: k -> Maybe a -> a -> IO (Maybe a)
-- rd _ Nothing a = return $! Just a
-- rd _ acc _ = return $! acc
mapMR = undefined
-- -------------------------------------------------------------------------------
-- -- | A generic map-only MR step.
-- mapMR :: (Serialize b) => (v -> IO [b]) -> MapReduce v b
-- mapMR f = MapReduce def pSerialize mp Nothing rd
-- where
-- mp = do
-- rng <- liftIO mkRNG
-- awaitForever $ \ a -> do
-- t <- liftIO $ randomToken 2 rng
-- res <- liftIO $ f a
-- mapM_ (\x -> yield (t, x)) res
-- rd = C.map snd
oneSnap = undefined
-- -------------------------------------------------------------------------------
-- -- | Do somthing with only the first row we see, putting the result in
-- -- the given HDFS destination.
-- oneSnap
-- :: RunContext
-- -> FilePath
-- -> (a -> B.ByteString)
-- -> Conduit a IO a
-- oneSnap settings s3fp f = do
-- h <- await
-- case h of
-- Nothing -> return ()
-- Just h' -> do
-- liftIO $ putHeaders (f h')
-- yield h'
-- awaitForever yield
-- where
-- putHeaders x = do
-- tmp <- randomFileName
-- withLocalFile settings tmp $ \ fn -> B.writeFile fn x
-- chk <- hdfsFileExists settings s3fp
-- when (not chk) $ void $ hdfsPut settings tmp s3fp
-- withLocalFile settings tmp removeFile
-------------------------------------------------------------------------------
-- | Monoidal inner (map-side) join for two types. Each type is mapped
-- into the common monoid, which is then collapsed during reduce.
--
-- Make sure an incoming 'Left' stays 'Left' and a 'Right' stays a
-- 'Right'.
--
-- TODO: Wrap around this with a better API so the user doesn't have
-- to care.
joinMR
:: forall a b k v. (MRKey k, Monoid v, Serialize v)
=> Mapper (Either a b) k (Either v v)
-- ^ Mapper for the input
-> MapReduce (Either a b) v
joinMR mp = MapReduce mro pSerialize mp' Nothing (Left red)
where
mro = def { _mroPart = Partition (n+1) n }
n = numKeys (undefined :: k)
-- add to key so we know for sure all Lefts arrive before
-- Rights.
mp' :: Mapper (Either a b) CompositeKey (Either v v)
mp' = mp =$= C.map modMap
modMap (k, Left v) = (toCompKey k ++ ["1"], Left v)
modMap (k, Right v) = (toCompKey k ++ ["2"], Right v)
-- cache lefts, start emitting upon seeing the first right.
red = go []
where
go ls = do
inc <- await
case inc of
Nothing -> return ()
Just (_, Left r) -> go $! (r:ls)
Just (_, Right b) -> do
mapM_ yield [mappend a b | a <- ls]
go ls
| juanpaucar/hadron | src/Hadron/Controller.hs | bsd-3-clause | 44,398 | 8 | 24 | 12,561 | 9,692 | 5,077 | 4,615 | -1 | -1 |
module Tests
( levelTest
, bassVolumeTest
, bassTimingTest
) where
import Euterpea
import Elements
-- | A short piece of music for testing that each of the instruments is set up
-- correctly, and that the levels are adjusted. Each instrument is played
-- indivudally for a few bars, and then several repeats of playing them all at
-- once.
levelTest :: Music Pitch
levelTest = line parts :+: timesM 4 (chord parts)
where
parts = [coilTest, bassTest, bellTest, padTest, drumTest]
coilTest = timesM 2 $ onCoilLong (line coilNotes)
:+: onCoilLong (timesM 4 $ chord coilNotes)
:+: onCoilShort (timesM 4 $ tempo (4/1) $ line coilNotes)
coilNotes = map (note qn) [(B,4),(E,5),(Fs,5),(Gs,5)]
bassTest = bassMF $ bassBit :+: transpose 7 bassBit
bassBit = line (concatMap (replicate 2) $ openStrings qn)
:+: chord [rest wn, strum en]
bellTest = onBells $ line $ zipWith note (cycle [qn, qn, qn, dhn])
[ (E,5), (Gs,5), (Fs,5), (B,4)
, (E,5), (Fs,5), (Gs,5), (E,5)
, (Gs,5), (E,5), (Fs,5), (B,4)
, (B,4), (Fs,5), (Gs,5), (E,5)
]
drumTest = line $ map (\v -> phrase [Dyn $ StdLoudness v] drumBit) [MP, FF]
drumBit = onDrums $
timesM 2 (line (map (perc AcousticSnare) [qn, qn, en, en, en]) :+: rest en)
:+: timesM 3 (perc RideCymbal2 sn :+: perc RideCymbal2 sn :+: perc CrashCymbal2 en)
:+: perc CrashCymbal2 en :+: perc CrashCymbal2 en
padTest = onPad $ timesM 2 $ lowLine :+: highLine
lowLine = line $ map (note hn) [(D,4), (B,4), (B,5), (Fs,5)]
highLine = line $ map (note qn) [(D,5), (Gs,5), (Cs,6), (E,6)]
-- | This test piece plays three arpeggiated chords at each of the 20 different
-- loudness levels the MechBass can produce. It was used to learn the relative
-- volumes of each setting.
--
-- Before each set of chords, the loudness level being tested is counted off by
-- playing notes on the G string. This is so that one can tell what is going on
-- when listening to an audio recording of the test.
--
-- See 'bassFF', 'bassMF', 'bassP', and 'bassPP' in "Elements" for the settings
-- used in the peice.
bassVolumeTest :: Music Pitch
bassVolumeTest = line $ map atVolume volumes
where
atVolume v =
phrase [Dyn $ Loudness 95] (countOff v)
:+: rest qn
:+: phrase [Dyn $ Loudness $ toVel v] arppeg
:+: rest qn
volumes = [20,19..1]
toVel = fromIntegral . (ceiling :: Double -> Int) . (/20) . (*128) . fromIntegral
countOff v = onBassGString $ line $ concat $ zipWith (:)
(replicate v $ note sn (G,4))
(cycle [[], [], [rest sn]])
arppeg = line $ map (\n -> transpose n $ strum en) [7, 5, 0]
strum :: Dur -> Music Pitch
strum = phrase [Art $ Legato 3.5 ] . line . openStrings
openStrings :: Dur -> [Music Pitch]
openStrings d =
[ onBassGString $ note d (G,3)
, onBassDString $ note d (D,3)
, onBassAString $ note d (A,2)
, onBassEString $ note d (E,2)
]
-- | A piece of music that tests if the fret shifter timing is correct.
--
-- For each pair of starting and ending fret position, the shifter on the G
-- string is pre-positioned to the starting position, then played at the ending
-- position. At the same time, the shifter on the D string is simply pre-
-- positioned and played at the ending fret position, and thus doesn't need to
-- move before playing.
--
-- If the allocator (see "MechBassAllocator") dosn't allow enough time for the
-- shifter motion from starting to ending, the not on the G string will play
-- late relative to the D string. If it allows enough (or too much) time, the
-- notes will sound together.
--
-- See "FindSkews.hs" for a program that can use an audio recoding of this test
-- to compute adjustments to the shifter timing in 'MechBass.shifterTimes'.
bassTimingTest :: Music Pitch
bassTimingTest = line $ map timingTo [0..12]
where
timingTo t = rest hn :+: line (map (timingToFrom t) [0..12])
timingToFrom t f =
onBassGString (fretNote qn (G,3) f 1 :+: fretNote qn (G,3) t 70)
:=:
onBassDString (fretNote qn (D,3) t 1 :+: fretNote qn (D,3) t 70)
fretNote d p t v = phrase [Dyn $ Loudness v] $ note d (trans t p)
| mzero/PlainChanges2 | src/Tests.hs | bsd-3-clause | 4,269 | 0 | 18 | 1,042 | 1,355 | 752 | 603 | 58 | 1 |
{-|
This module provides a function for computing the topological sort
of a directed acyclic graph.
-}
module TopologicalSort
(
topologicalSort
) where
import Prelude hiding (replicate)
import Data.Vector.Unboxed.Mutable
import Control.Monad.RWS
import Control.Monad
import Control.Lens
import Control.Lens.TH
import Control.Monad.ST
import GraphUtils
import VectorUtils
-- | The state threaded through the topological sort algorithm. Simply
-- tracks visited vertices.
data TSState s = TSState {
_visitedVertices :: STVector s Bool,
_topologicalSorting :: [Vertex]
}
makeLenses ''TSState
-- | Monad for the topological sort.
type TopSort s = RWST Graph () (TSState s) (ST s)
-- | Runs a computation in the topological sort monad.
runTopSort :: Graph -> (forall s . TopSort s a) -> [Vertex]
runTopSort graph tssAction = runST $ do
initialVisitedVertices <- replicate (order graph) False
(finalState,_) <-
execRWST tssAction graph (TSState initialVisitedVertices [])
return $ finalState ^. topologicalSorting
-- | Computes a topological sort of a directed acyclic graph.
-- Uses a DFS, runs in O(|V|+|E|).
topologicalSort :: Graph -> [Vertex]
topologicalSort graph = runTopSort graph topologicalSort'
-- | Computes a topological sort in the topological sort monad.
-- Simply the main loop of a DFS.
topologicalSort' :: TopSort s ()
topologicalSort' = do
graph <- ask
forM_ (vertices graph) $ \vertex -> do
visit vertex
-- | Visits a vertex, visits its successors, then adds itself
-- to the head of the topological sort, which puts all vertices
-- in their post ordering, therefore yielding a topological sort.
visit :: Vertex -> TopSort s ()
visit vertex = do
isVisited <- readL visitedVertices vertex
when (not isVisited) $ do
writeL visitedVertices vertex True
graph <- ask
forM_ (successors graph vertex) $ \successor -> do
visit successor
topologicalSorting %= (vertex:)
| alexisVallet/ag44-graph-algorithms | TopologicalSort.hs | bsd-3-clause | 1,957 | 0 | 14 | 365 | 426 | 226 | 200 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE RecordWildCards #-}
module AuthAPI (
Username
, Password
, Token
, Secret
, Storage
, LoginArgs(..)
, AuthAPI
, serveAuth
) where
import Control.Monad (when)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Except (ExceptT)
import Data.Aeson
import Data.Aeson.Types (Options, fieldLabelModifier)
import Data.Char (isUpper, toLower)
import Data.IORef (IORef, readIORef, modifyIORef)
import Data.Map (Map)
import GHC.Generics
import Servant (Server, (:>), (:<|>)(..), Get, Post, ReqBody, Capture)
import Servant (throwError)
import Servant.API.ContentTypes (JSON)
import Servant.API.Experimental.Auth (AuthProtect)
import Servant.Server (ServantErr)
import Servant.Server (err403, err404)
import Servant.Server.Experimental.Auth.HMAC
import System.Random
import qualified Data.Map as Map
type Username = String
type Password = String
type Token = String
type Secret = String
type Storage = IORef (Map Username Token)
type instance AuthHmacAccount = Username
type instance AuthHmacToken = Token
data LoginArgs = LoginArgs {
laUsername :: String
, laPassword :: String
} deriving Generic
instance FromJSON LoginArgs where
parseJSON = genericParseJSON dropPrefixOptions
instance ToJSON LoginArgs where
toJSON = genericToJSON dropPrefixOptions
dropPrefix :: String -> String
dropPrefix "" = ""
dropPrefix (c:t)
| isUpper c = toLower c : t
| otherwise = dropPrefix t
dropPrefixOptions :: Options
dropPrefixOptions = defaultOptions { fieldLabelModifier = dropPrefix }
type AuthAPI = "login" :> ReqBody '[JSON] LoginArgs :> Post '[JSON] String
:<|> "secret" :> Capture "username" Username
:> AuthProtect "hmac-auth" :> Get '[JSON] String
users :: [(Username, (Password, Secret))]
users = [
("mr_foo", ("password1", "War is Peace"))
, ("mr_bar", ("letmein" , "Freedom is Slavery"))
, ("mr_baz", ("baseball" , "Ignorance is Strength"))
]
serveAuth :: Storage -> Server AuthAPI
serveAuth storage = serveLogin :<|> serveSecret where
serveLogin (LoginArgs {..}) = serve' where
serve' = case isValidUser of
True -> liftIO $ getToken
False -> throwError err403
isValidUser = maybe False
(\(password', _) -> password' == laPassword)
(lookup laUsername users)
getToken :: IO String
getToken = (maybe mkToken return) =<< ((Map.lookup laUsername) <$> (readIORef storage))
mkToken :: IO String
mkToken = do
token <- (take 16 . randomRs ('A', 'Z')) <$> getStdGen
modifyIORef storage (Map.insert laUsername token)
return token
serveSecret :: Username -> (Username, Token) -> ExceptT ServantErr IO String
serveSecret username' (username'', _) = do
when (username' /= username'') $ throwError err403 -- User can request only his own secret
maybe (throwError err404) (\(_, secret') -> return secret') (lookup username' users)
| zohl/servant-auth-hmac | example/server/AuthAPI.hs | bsd-3-clause | 3,178 | 0 | 16 | 646 | 904 | 517 | 387 | 84 | 2 |
-- | UPDATE operations on HD wallets
module Cardano.Wallet.Kernel.DB.HdWallet.Update (
updateHdRoot
, updateHdRootPassword
, updateHdAccountName
) where
import Universum
import Cardano.Wallet.Kernel.DB.HdWallet
import Cardano.Wallet.Kernel.DB.Util.AcidState
import UTxO.Util (modifyAndGetNew)
{-------------------------------------------------------------------------------
UPDATE
-------------------------------------------------------------------------------}
-- | Updates in one gulp the Hd Wallet name and assurance level.
updateHdRoot :: HdRootId
-> AssuranceLevel
-> WalletName
-> Update' UnknownHdRoot HdWallets HdRoot
updateHdRoot rootId assurance name =
zoomHdRootId identity rootId $ do
modifyAndGetNew $ set hdRootAssurance assurance . set hdRootName name
updateHdRootPassword :: HdRootId
-> HasSpendingPassword
-> Update' UnknownHdRoot HdWallets HdRoot
updateHdRootPassword rootId hasSpendingPassword =
zoomHdRootId identity rootId $ do
modifyAndGetNew $ hdRootHasPassword .~ hasSpendingPassword
updateHdAccountName :: HdAccountId
-> AccountName
-> Update' UnknownHdAccount HdWallets HdAccount
updateHdAccountName accId name = do
zoomHdAccountId identity accId $ do
modifyAndGetNew $ hdAccountName .~ name
| input-output-hk/pos-haskell-prototype | wallet/src/Cardano/Wallet/Kernel/DB/HdWallet/Update.hs | mit | 1,433 | 0 | 11 | 337 | 223 | 119 | 104 | 27 | 1 |
import Control.Arrow
import Distribution.PackageDescription
import Distribution.Simple hiding (Module)
import Distribution.Simple.LocalBuildInfo
import Language.Preprocessor.Cpphs
import System.FilePath
import Text.XkbCommon.ParseDefines
import Module
import Utils
sourceLoc :: FilePath
sourceLoc = "./"
main :: IO ()
main = defaultMainWithHooks simpleUserHooks
{ buildHook = \p l h f -> generateSource sourceLoc >> buildHook simpleUserHooks p l h f
, haddockHook = \p l h f -> generateSource sourceLoc >> haddockHook simpleUserHooks p l h f
, sDistHook = \p ml h f -> case ml of
Nothing -> fail "No local buildinfo available. configure first"
Just l -> do
generateSource sourceLoc
sDistHook simpleUserHooks p ml h f
}
generateSource :: FilePath -> IO ()
generateSource fp = do
parsedDefs <- getKeysymDefs
saveModule fp (keysymsModule parsedDefs)
return ()
keysymsModule :: [(String,Integer)] -> Module
keysymsModule defs = Module "Text.XkbCommon.KeysymPatterns" [] $
Import ["Text.XkbCommon.InternalTypes"] :
map (\(name,val) ->
Pattern ("Keysym_" ++ name)
Nothing
("= Keysym " ++ show val))
defs
| tulcod/haskell-xkbcommon | Setup.hs | mit | 1,350 | 0 | 14 | 402 | 351 | 183 | 168 | 33 | 2 |
module Program.Array.Expression where
import Program.Array.Operator
import Autolib.TES.Identifier
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Size
import Autolib.Util.Zufall
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr hiding ( Operator )
import Data.Typeable
-- | access to array element
data Access = Access Identifier [ Expression ]
deriving Typeable
instance Size Access where
size ( Access name inds ) = 1 + sum ( map size inds )
instance ToDoc Access where
toDoc ( Access name inds ) =
toDoc name <+> hsep ( do ind <- inds ; return $ brackets $ toDoc ind )
instance Reader Access where
reader = do
name <- reader
inds <- many $ my_brackets $ reader
return $ Access name inds
-- | arithmetical expression, with multi-dimensional array access
data Expression = Reference Access
| Literal Integer
| Binary Operator Expression Expression
deriving Typeable
instance Size Expression where
size exp = case exp of
Reference acc -> size acc
Literal i -> 1
Binary op l r -> 1 + size l + size r
instance ToDoc Expression where
toDocPrec p e = case e of
Reference acc -> toDoc acc
Literal i -> toDoc i
Binary op l r ->
case op of
Add -> docParen ( p > 1 )
$ hsep [ toDocPrec 1 l , text "+" , toDocPrec 2 r ]
Subtract -> docParen ( p > 3 )
$ hsep [ toDocPrec 3 l , text "-" , toDocPrec 4 r ]
Multiply -> docParen ( p > 5 )
$ hsep [ toDocPrec 5 l , text "*" , toDocPrec 6 r ]
Divide -> docParen ( p > 7 )
$ hsep [ toDocPrec 7 l , text "/" , toDocPrec 8 r ]
instance Reader Expression where
reader = buildExpressionParser operators atomic
operators =
[ [ op "*" Multiply AssocLeft
, op "/" Divide AssocLeft
]
, [ op "+" Add AssocLeft
, op "-" Subtract AssocLeft
]
]
where
op name f assoc =
Infix ( do { my_symbol name; return $ Binary f } ) assoc
atomic :: Parser Expression
atomic = my_parens reader
<|> do i <- my_integer ; return $ Literal i
<|> do a <- reader ; return $ Reference a
| florianpilz/autotool | src/Program/Array/Expression.hs | gpl-2.0 | 2,232 | 10 | 14 | 670 | 706 | 362 | 344 | 58 | 1 |
{- |
Module : ./Temporal/NuSmv.hs
Copyright : (c) Klaus Hartke, Uni Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : portable
-}
module NuSmv (basicExpr, program) where
import Control.Monad (liftM, liftM2)
import Data.Char (toLower)
import Data.List (intercalate, intersperse)
import Text.ParserCombinators.Parsec hiding (State, token)
(<<) :: (Monad m) => m a -> m b -> m a
(<<) a b = do x <- a
b
return x
-- ----------------------------------------------------------------------------
checkWith :: (Show a) => GenParser tok st a -> (a -> Bool) -> GenParser tok st a
checkWith p f = do x <- p
if f x then return x
else unexpected (show x)
reserved :: [String] -> CharParser st String -> CharParser st String
reserved l p = try $ checkWith p (`notElem` l)
-- ----------------------------------------------------------------------------
ws :: Parser ()
ws = many (oneOf " \t\r\n") >> return ()
token :: String -> Parser ()
token x = string x >> ws
keyword :: String -> Parser ()
keyword x = try (string x >> notFollowedBy alphaNum) >> ws
identifier :: Parser String
identifier = liftM2 (:) idFirstChar (many idConsecutiveChar) <?> "identifier"
where
idFirstChar = letter
<|> char '_'
idConsecutiveChar = idFirstChar
<|> digit
<|> char '$'
<|> char '#'
<|> char '\\'
<|> char '-'
integer :: Parser Int
integer = liftM read (liftM2 (++) (option "" (string "-")) (many1 digit))
<?> "integer number"
-- ----------------------------------------------------------------------------
keywords :: [String]
keywords = [ "MODULE", "DEFINE", "CONSTANTS", "VAR", "IVAR", "INIT", "TRANS",
"INVAR", "SPEC", "CTLSPEC", "LTLSPEC", "PSLSPEC", "COMPUTE",
"INVARSPEC", "FAIRNESS", "JUSTICE", "COMPASSION", "ISA", "ASSIGN",
"CONSTRAINT", "SIMPWFF", "CTLWFF", "LTLWFF", "PSLWFF", "COMPWFF",
"IN", "MIN", "MAX", "process", "array", "of", "boolean",
"integer", "real", "word", "word1", "bool", "EX", "AX", "EF",
"AF", "EG", "AG", "E", "F", "O", "G", "H", "X", "Y", "Z", "A",
"U", "S", "V", "T", "BU", "EBF", "ABF", "EBG", "ABG", "case",
"esac", "mod", "next", "init", "union", "in", "xor", "xnor",
"self", "TRUE", "FALSE" ]
{- ----------------------------------------------------------------------------
Basic Expressions of NuSMV
---------------------------------------------------------------------------- -}
data Expr = Bool Bool -- Boolean Constant
| Int Int -- Integer Constant
| Var [String] {- Symbol Constant,
Variable Identifier,
or Define Identifier -}
| Word Int Int -- Word Constant
| Range Int Int -- Range Constant
| Not Expr -- Logical/Bitwise NOT
| And Expr Expr -- Logical/Bitwise AND
| Or Expr Expr -- Logical/Bitwise OR
| Xor Expr Expr -- Logical/Bitwise XOR
| Xnor Expr Expr -- Logical/Bitwise NOT XOR
| Impl Expr Expr -- Logical/Bitwise Implication
| Equiv Expr Expr -- Logical/Bitwise Equivalence
| Eq Expr Expr -- Equality
| Neq Expr Expr -- Inequality
| Lt Expr Expr -- Less Than
| Gt Expr Expr -- Greater Than
| Leq Expr Expr -- Less Than Or Equal
| Geq Expr Expr -- Greater Than Or Equal
| Neg Expr -- Integer Unary Minus
| Add Expr Expr -- Integer Addition
| Sub Expr Expr -- Integer Subtraction
| Mult Expr Expr -- Integer Multiplication
| Div Expr Expr -- Integer Division
| Mod Expr Expr -- Integer Remainder
| Bsr Expr Expr -- Bit Shift Right
| Bsl Expr Expr -- Bit Shift Left
| Concat Expr Expr -- Word Concatenation
| Select Expr (Int, Int) -- Word Bits Selection
| ToWord Expr -- Boolean to Word[1] convertion
| ToBool Expr -- Word[1] to Boolean convertion
| Union Expr Expr -- Union of Set Expressions
| Set [Expr] -- Set Expression
| In Expr Expr -- Inclusion Expression
| Case [(Expr, Expr)] -- Case Expression
| Next Expr -- Next Expression
instance Show Expr where
show expr = showBasicExpr expr True
showBasicExpr (Bool True) outer = "TRUE"
showBasicExpr (Bool False) outer = "FALSE"
showBasicExpr (Int value) outer = show value
showBasicExpr (Var ids) outer = intercalate "." ids
showBasicExpr (Word width value) outer = concat [ "0d", show width, "_",
show value ]
showBasicExpr (Range from to) outer = concat [ show from, "..", show to ]
showBasicExpr (Not expr) outer = '!' : showBasicExpr expr False
showBasicExpr (And exprA exprB) True = concat [ showBasicExpr exprA False,
" & ", showBasicExpr exprB False ]
showBasicExpr (Or exprA exprB) True = concat [ showBasicExpr exprA False, " | ",
showBasicExpr exprB False ]
showBasicExpr (Xor exprA exprB) True = concat [ showBasicExpr exprA False,
" xor ",
showBasicExpr exprB False ]
showBasicExpr (Xnor exprA exprB) True = concat [ showBasicExpr exprA False,
" xnor ",
showBasicExpr exprB False ]
showBasicExpr (Impl exprA exprB) True = concat [ showBasicExpr exprA False,
" -> ",
showBasicExpr exprB False ]
showBasicExpr (Equiv exprA exprB) True = concat [ showBasicExpr exprA False,
" <-> ",
showBasicExpr exprB False ]
showBasicExpr (Eq exprA exprB) True = concat [ showBasicExpr exprA False,
" = ",
showBasicExpr exprB False ]
showBasicExpr (Neq exprA exprB) True = concat [ showBasicExpr exprA False,
" != ",
showBasicExpr exprB False ]
showBasicExpr (Lt exprA exprB) True = concat [ showBasicExpr exprA False, " < ",
showBasicExpr exprB False ]
showBasicExpr (Gt exprA exprB) True = concat [ showBasicExpr exprA False, " > ",
showBasicExpr exprB False ]
showBasicExpr (Leq exprA exprB) True = concat [ showBasicExpr exprA False,
" <= ",
showBasicExpr exprB False ]
showBasicExpr (Geq exprA exprB) True = concat [ showBasicExpr exprA False,
" >= ",
showBasicExpr exprB False ]
showBasicExpr (Neg expr) outer = '-' : showBasicExpr expr False
showBasicExpr (Add exprA exprB) True = concat [ showBasicExpr exprA False,
" + ",
showBasicExpr exprB False ]
showBasicExpr (Sub exprA exprB) True = concat [ showBasicExpr exprA False,
" - ",
showBasicExpr exprB False ]
showBasicExpr (Mult exprA exprB) True = concat [ showBasicExpr exprA False,
" * ",
showBasicExpr exprB False ]
showBasicExpr (Div exprA exprB) True = concat [ showBasicExpr exprA False,
" / ",
showBasicExpr exprB False ]
showBasicExpr (Mod exprA exprB) True = concat [ showBasicExpr exprA False,
" mod ",
showBasicExpr exprB False ]
showBasicExpr (Bsr exprA exprB) True = concat [ showBasicExpr exprA False,
" >> ",
showBasicExpr exprB False ]
showBasicExpr (Bsl exprA exprB) True = concat [ showBasicExpr exprA False,
" << ",
showBasicExpr exprB False ]
showBasicExpr (Concat exprA exprB) True = concat [ showBasicExpr exprA False,
" :: ",
showBasicExpr exprB False ]
showBasicExpr (Select expr (from, to)) outer = concat
[ showBasicExpr expr False, "[", show from, ", ", show to, "]" ]
showBasicExpr (ToWord expr) outer = concat [ "word1(", showBasicExpr expr True,
")" ]
showBasicExpr (ToBool expr) outer = concat [ "bool(", showBasicExpr expr True,
")" ]
showBasicExpr (Union exprA exprB) True = concat [ showBasicExpr exprA False,
" union ",
showBasicExpr exprB False ]
showBasicExpr (Set exprs) outer = '{' : intercalate ", "
(map (`showBasicExpr` False) exprs) ++ "}"
showBasicExpr (In exprA exprB) True = concat [ showBasicExpr exprA False,
" in ",
showBasicExpr exprB False ]
showBasicExpr (Case cases) outer = concat [ "case ",
intercalate "\n" (map (\ (exprA, exprB) ->
concat [showBasicExpr exprA False, " : ",
showBasicExpr exprB False, ";"]) cases), " esac" ]
showBasicExpr (Next expr) outer = concat [ "next(",
showBasicExpr expr True, ")" ]
showBasicExpr expr False = concat [ "(", showBasicExpr expr True, ")" ]
{- ----------------------------------------------------------------------------
Parser for Basic Expressions
---------------------------------------------------------------------------- -}
basicExpr :: Parser Expr
basicExpr = implExpr
where
implExpr = chainr1 equivExpr (token "->" >> return Impl)
equivExpr = chainl1 orExpr (token "<->" >> return Equiv)
orExpr = chainl1 andExpr ((token "|" >> return Or) <|>
(keyword "xor" >> return Xor) <|>
(keyword "xnor" >> return Xnor))
andExpr = chainl1 eqExpr (token "&" >> return And)
eqExpr = chainl1 inExpr ((token "=" >> return Eq) <|>
(token "!=" >> return Neq) <|>
try (token "<=" >> return Leq) <|>
try (token ">=" >> return Geq) <|>
(token "<" >> return Lt) <|>
(token ">" >> return Gt))
inExpr = chainl1 unionExpr (keyword "in" >> return In)
unionExpr = chainl1 shiftExpr (keyword "union" >> return Union)
shiftExpr = chainl1 modExpr (try (token ">>" >> return Bsr) <|>
try (token "<<" >> return Bsl))
modExpr = chainl1 addSubExpr (keyword "mod" >> return Mod)
addSubExpr = chainl1 multDivExpr ((token "+" >> return Add) <|>
(token "-" >> return Sub))
multDivExpr = chainl1 negateExpr ((token "*" >> return Mult) <|>
(token "/" >> return Div))
negateExpr = (token "-" >> liftM Neg negateExpr) <|>
concatExpr
concatExpr = chainl1 selectExpr (try (token "::") >> return Concat)
selectExpr = do expr <- notExpr
option expr (do token "[" <?> "selector"
bits <- sepBy1 bit (token "[")
return (foldl Select expr bits))
notExpr = (token "!" >> liftM Not notExpr) <|>
primaryExpr
primaryExpr = parenthesizedExpr <|>
liftM Set (between (token "{") (token "}")
(sepBy1 implExpr (char ',' >> ws))) <|>
(do expr <- constantExpr << ws
case expr of
Var ["word1"] -> liftM ToWord parenthesizedExpr
Var ["bool"] -> liftM ToBool parenthesizedExpr
Var ["case"] -> caseExpr
Var ["next"] -> liftM Next parenthesizedExpr
_ -> return expr)
parenthesizedExpr = between (token "(") (token ")") implExpr
caseExpr = liftM Case (manyTill (do lhs <- implExpr
token ":"
rhs <- implExpr
token ";"
return (lhs, rhs)) (token "esac"))
bit = do value1 <- integer << ws
token ":"
value2 <- integer << ws
token "]"
return (value1, value2)
{- ----------------------------------------------------------------------------
Parser for Constant Expressions
---------------------------------------------------------------------------- -}
constantExpr :: Parser Expr
constantExpr = (constantA <?> "integer constant") <|>
(constantB <?> "boolean constant") <|>
(constantE <?> "symbolic constant") <|>
(constantF <?> "variable identifier") <|>
(constantG <?> "define identifier")
where
constantA = do char '-'
value <- many1 digit
range (negate (read value))
constantB = do char '0'
constantC <|> constantD
constantC = do base <- oneOf "bBoOdDhH"
width <- many digit
char '_'
value <- case toLower base of
'b' -> many1 (oneOf "01_")
'o' -> many1 (octDigit <|> char '_')
'd' -> many1 (digit <|> char '_')
'h' -> many1 (hexDigit <|> char '_')
let width' = case width of
"" -> case toLower base of
'b' -> 1 * length value
'o' -> 3 * length value
'd' -> error $ "Cannot calculate " ++
"width of decimal integers"
'h' -> 4 * length value
_ -> read width
let value' = case toLower base of
'b' -> error "Cannot decode binary integer"
'o' -> read ("0o" ++ filter (/= '_') value)
'd' -> read (filter (/= '_') value)
'h' -> read ("0x" ++ filter (/= '_') value)
return (Word width' value')
constantD = do value <- many digit
case value of
"" -> return (Bool False)
_ -> range (read ('0' : value))
constantE = do char '1'
value <- many digit
case value of
"" -> return (Bool True)
_ -> range (read ('1' : value))
constantF = do value <- many1 digit
range (read value)
constantG = do value <- sepBy1 identifier (char '.')
case value of
["FALSE"] -> return (Bool False)
["TRUE"] -> return (Bool True)
_ -> return (Var value)
range x = (string ".." >> liftM (Range x) integer)
<|> return (Int x)
{- ----------------------------------------------------------------------------
Complex Identifiers
---------------------------------------------------------------------------- -}
data ComplexId = Id String
| ComplexId ComplexId String
| IndexedId ComplexId Int
| Self
instance Show ComplexId where
show (Id s) = s
show (ComplexId id s) = show id ++ "." ++ s
show (IndexedId id i) = show id ++ "[" ++ show i ++ "]"
show (Self) = "self"
complexId :: Parser ComplexId
complexId = do id <- reserved keywords identifier << ws
return (Id id) -- TODO: really complex identifiers
{- ----------------------------------------------------------------------------
NuSMV Programs
---------------------------------------------------------------------------- -}
data Program = Program [Module]
data Module = Module String [String] [Element]
data Element = VarDecl [(String, Type)]
| IVarDecl [(String, Type)]
| DefineDecl [(String, Expr)]
| ConstDecl [String]
| Init Expr
| Invar Expr
| Trans Expr
| Assign [(AssignLhs, AssignRhs)]
| Fairness Expr
| Justice Expr
| Compassion Expr Expr
data Type = BoolType
| WordType Int
| EnumType [EnumValue]
| RangeType Int Int
| ArrayType Int Int Type
data EnumValue = Symbol String
| Number Int
data AssignLhs = CurrentValue ComplexId
| InitialValue ComplexId
| NextValue ComplexId
type AssignRhs = Expr
instance Show Program where
show = showProgram
showProgram (Program mods) = concatMap showModule mods
instance Show Module where
show = showModule
showModule (Module name params elements) = concat [ "MODULE ",
name, showParams params, "\n", concatMap showElement elements ]
where
showParams [] = ""
showParams params = concat [ "(", intercalate ", " params, ")" ]
instance Show Element where
show = showElement
showElement (VarDecl vars) = concat [ "VAR", concatMap showVar vars, "\n" ]
where
showVar (id, ty) = concat [ " ", id, " : ", showType ty, ";" ]
showElement (IVarDecl vars) = concat [ "IVAR", concatMap showVar vars, "\n" ]
where
showVar (id, ty) = concat [ " ", id, " : ", showType ty, ";" ]
showElement (DefineDecl defs) = concat [ "DEFINE",
concatMap showDefine defs, "\n" ]
where
showDefine (id, expr) = concat [ " ", id, " := ",
showBasicExpr expr True, ";" ]
showElement (ConstDecl consts) = concat [ "CONSTANTS ",
intercalate ", " consts, ";\n" ]
showElement (Init expr) = concat [ "INIT ", showBasicExpr expr True, ";\n" ]
showElement (Invar expr) = concat [ "INVAR ", showBasicExpr expr True, ";\n" ]
showElement (Trans expr) = concat [ "TRANS ", showBasicExpr expr True, ";\n" ]
showElement (Assign assigns) = concat [ "ASSIGN", concatMap showAssign assigns, "\n" ]
where
showAssign (lhs, rhs) = concat [ " ", show lhs, " := ", show rhs, ";" ]
showElement (Fairness expr) = concat [ "FAIRNESS ", showBasicExpr expr True, ";\n" ]
showElement (Justice expr) = concat [ "JUSTICE ", showBasicExpr expr True, ";\n" ]
showElement (Compassion exprA exprB) = concat [ "COMPASSION(",
showBasicExpr exprA True, ", ", showBasicExpr exprB True, ");\n" ]
instance Show Type where
show = showType
showType BoolType = "boolean"
showType (WordType width) = concat [ "word[", show width, "]" ]
showType (EnumType values) = concat [ "{",
intercalate "," (map showValue values), " }" ]
where
showValue (Symbol symbol) = ' ' : symbol
showValue (Number number) = ' ' : show number
showType (RangeType from to) = concat [ show from, "..", show to ]
showType (ArrayType from to ty) = concat [ "array ",
show from, "..", show to, " of ", showType ty ]
instance Show AssignLhs where
show (CurrentValue id) = show id
show (InitialValue id) = "init(" ++ show id ++ ")"
show (NextValue id) = "next(" ++ show id ++ ")"
{- ----------------------------------------------------------------------------
Parser for Programs
---------------------------------------------------------------------------- -}
program :: Parser Program
program = liftM Program (many modul)
modul :: Parser Module
modul = do keyword "MODULE"
name <- reserved keywords identifier << ws
params <- option [] (between (token "(") (token ")") parameters)
elements <- many element
return (Module name params elements)
where
parameters = sepBy1 (reserved keywords identifier << ws) (token ",")
element :: Parser Element
element = varDecl
<|> ivarDecl
<|> constDecl
<|> defineDecl
<|> initConstraint
<|> invarConstraint
<|> transConstraint
<|> assign
<|> fairness
<|> justice
<|> compassion
where
varDecl = do keyword "VAR"
vars <- many1 (do id <- reserved keywords identifier << ws
token ":"
ty <- typeSpec
token ";"
return (id, ty))
return (VarDecl vars)
ivarDecl = do keyword "IVAR"
vars <- many1 (do id <- reserved keywords identifier << ws
token ":"
ty <- typeSpec
token ";"
return (id, ty))
return (IVarDecl vars)
defineDecl = do keyword "DEFINE"
defs <- many1 (do id <- reserved keywords identifier << ws
token ":="
expr <- basicExpr
token ";"
return (id, expr))
return (DefineDecl defs)
constDecl = do keyword "CONSTANTS"
consts <- sepBy1 (reserved keywords identifier << ws)
(token ",")
token ";"
return (ConstDecl consts)
initConstraint = do keyword "INIT"
expr <- basicExpr
optional (token ";")
return (Init expr)
invarConstraint = do keyword "INVAR"
expr <- basicExpr
optional (token ";")
return (Invar expr)
transConstraint = do keyword "TRANS"
expr <- basicExpr
optional (token ";")
return (Trans expr)
assign = do keyword "ASSIGN"
assigns <- many1 (do lhs <- assignLhs
token ":="
rhs <- basicExpr
token ";"
return (lhs, rhs))
return (Assign assigns)
where
assignLhs = do keyword "init"
token "("
id <- complexId
token ")"
return (InitialValue id)
<|> do keyword "next"
token "("
id <- complexId
token ")"
return (NextValue id)
<|> do id <- complexId
return (CurrentValue id)
fairness = do keyword "FAIRNESS"
expr <- basicExpr
optional (token ";")
return (Fairness expr)
justice = do keyword "JUSTICE"
expr <- basicExpr
optional (token ";")
return (Justice expr)
compassion = do keyword "COMPASSION"
token "("
exprA <- basicExpr
token ","
exprB <- basicExpr
token ")"
optional (token ";")
return (Compassion exprA exprB)
typeSpec :: Parser Type
typeSpec = boolSpec
<|> wordSpec
<|> enumSpec
<|> rangeSpec
<|> arraySpec
where
boolSpec = do keyword "boolean"
return BoolType
wordSpec = do keyword "word"
token "["
width <- integer
token "]"
return (WordType width)
enumSpec = do token "{"
values <- sepBy1 (liftM Symbol identifier <|>
liftM Number integer) (ws >> token ",")
token "}"
return (EnumType values)
rangeSpec = do from <- integer
token ".."
to <- integer
return (RangeType from to)
arraySpec = do keyword "array"
from <- integer
token ".."
to <- integer
keyword "of"
ty <- typeSpec
return (ArrayType from to ty)
-- ----------------------------------------------------------------------------
| spechub/Hets | Temporal/NuSmv.hs | gpl-2.0 | 26,407 | 0 | 19 | 11,221 | 6,578 | 3,308 | 3,270 | 476 | 15 |
{-# LANGUAGE ScopedTypeVariables, FlexibleContexts, TypeFamilies,
TypeSynonymInstances, FlexibleInstances, GADTs, RankNTypes,
UndecidableInstances, TypeOperators #-}
-- | This module provides types and functions for creating and manipulating
-- control signals (ready and ack) associated with protocols. The 'Ack' signal
-- indicates that a protocol element has received data from an upstream source,
-- and the 'Ready' signal indicates that the component is prepared to accept
-- data from an upstream source.
module Language.KansasLava.Protocols.Types where
import Language.KansasLava.Rep
import Language.KansasLava.Signal
import Language.KansasLava.Types
import Language.KansasLava.Utils
import Control.Monad
-- It is preferable to be sending a message that expects an Ack,
-- but to recieve a message based on your Ready signal.
------------------------------------------------------------------------------------
-- | An Ack is always in response to an incoming packet or message.
newtype Ack = Ack { unAck :: Bool }
deriving (Eq,Ord)
instance Show Ack where
show (Ack True) = "A"
show (Ack False) = "~"
-- TODO: use $(repSynonym ''Ack ''Bool)
instance Rep Ack where
data X Ack = XAckRep { unXAckRep :: X Bool }
type W Ack = W Bool
-- The template for using representations
unX = liftM Ack . unX . unXAckRep
optX = XAckRep . optX . liftM unAck
toRep = toRep . unXAckRep
fromRep = XAckRep . fromRep
repType Witness = repType (Witness :: Witness Bool)
showRep = showRepDefault
-- | Convert a 'Bool' signal to an 'Ack' signal.
toAck :: (sig ~ Signal clk) => sig Bool -> sig Ack
toAck = coerce Ack
-- | Convert an 'Ack' to a 'Bool' signal.
fromAck :: (sig ~ Signal clk) => sig Ack -> sig Bool
fromAck = coerce unAck
------------------------------------------------------------------------------------
-- | An Ready is always in response to an incoming packet or message
newtype Ready = Ready { unReady :: Bool }
deriving (Eq,Ord)
instance Show Ready where
show (Ready True) = "R"
show (Ready False) = "~"
instance Rep Ready where
data X Ready = XReadyRep { unXReadyRep :: X Bool }
type W Ready = W Bool
-- The template for using representations
unX = liftM Ready . unX . unXReadyRep
optX = XReadyRep . optX . liftM unReady
toRep = toRep . unXReadyRep
fromRep = XReadyRep . fromRep
repType Witness = repType (Witness :: Witness Bool)
showRep = showRepDefault
-- | Convert a Bool signal to a 'Ready' signal.
toReady :: (sig ~ Signal clk) => sig Bool -> sig Ready
toReady = coerce Ready
-- | Convert a 'Ready' signal to a Bool signal.
fromReady :: (sig ~ Signal clk) => sig Ready -> sig Bool
fromReady = coerce unReady
------------------------------------------------------------------------------------------------------------------------------------------------
| andygill/kansas-lava | Language/KansasLava/Protocols/Types.hs | bsd-3-clause | 2,962 | 4 | 10 | 623 | 563 | 305 | 258 | 45 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Building info tables.
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmLayout (
mkArgDescr,
emitCall, emitReturn, adjustHpBackwards,
emitClosureProcAndInfoTable,
emitClosureAndInfoTable,
slowCall, directCall,
mkVirtHeapOffsets, mkVirtConstrOffsets, getHpRelOffset,
ArgRep(..), toArgRep, argRepSizeW -- re-exported from StgCmmArgRep
) where
#include "HsVersions.h"
#if __GLASGOW_HASKELL__ >= 709
import Prelude hiding ((<*>))
#endif
import StgCmmClosure
import StgCmmEnv
import StgCmmArgRep -- notably: ( slowCallPattern )
import StgCmmTicky
import StgCmmMonad
import StgCmmUtils
import StgCmmProf (curCCS)
import MkGraph
import SMRep
import Cmm
import CmmUtils
import CmmInfo
import CLabel
import StgSyn
import Id
import TyCon ( PrimRep(..) )
import BasicTypes ( RepArity )
import DynFlags
import Module
import Util
import Data.List
import Outputable
import FastString
import Control.Monad
------------------------------------------------------------------------
-- Call and return sequences
------------------------------------------------------------------------
-- | Return multiple values to the sequel
--
-- If the sequel is @Return@
--
-- > return (x,y)
--
-- If the sequel is @AssignTo [p,q]@
--
-- > p=x; q=y;
--
emitReturn :: [CmmExpr] -> FCode ReturnKind
emitReturn results
= do { dflags <- getDynFlags
; sequel <- getSequel
; updfr_off <- getUpdFrameOff
; case sequel of
Return _ ->
do { adjustHpBackwards
; let e = CmmLoad (CmmStackSlot Old updfr_off) (gcWord dflags)
; emit (mkReturn dflags (entryCode dflags e) results updfr_off)
}
AssignTo regs adjust ->
do { when adjust adjustHpBackwards
; emitMultiAssign regs results }
; return AssignedDirectly
}
-- | @emitCall conv fun args@ makes a call to the entry-code of @fun@,
-- using the call/return convention @conv@, passing @args@, and
-- returning the results to the current sequel.
--
emitCall :: (Convention, Convention) -> CmmExpr -> [CmmExpr] -> FCode ReturnKind
emitCall convs fun args
= emitCallWithExtraStack convs fun args noExtraStack
-- | @emitCallWithExtraStack conv fun args stack@ makes a call to the
-- entry-code of @fun@, using the call/return convention @conv@,
-- passing @args@, pushing some extra stack frames described by
-- @stack@, and returning the results to the current sequel.
--
emitCallWithExtraStack
:: (Convention, Convention) -> CmmExpr -> [CmmExpr]
-> [CmmExpr] -> FCode ReturnKind
emitCallWithExtraStack (callConv, retConv) fun args extra_stack
= do { dflags <- getDynFlags
; adjustHpBackwards
; sequel <- getSequel
; updfr_off <- getUpdFrameOff
; case sequel of
Return _ -> do
emit $ mkJumpExtra dflags callConv fun args updfr_off extra_stack
return AssignedDirectly
AssignTo res_regs _ -> do
k <- newLabelC
let area = Young k
(off, _, copyin) = copyInOflow dflags retConv area res_regs []
copyout = mkCallReturnsTo dflags fun callConv args k off updfr_off
extra_stack
emit (copyout <*> mkLabel k <*> copyin)
return (ReturnedTo k off)
}
adjustHpBackwards :: FCode ()
-- This function adjusts the heap pointer just before a tail call or
-- return. At a call or return, the virtual heap pointer may be less
-- than the real Hp, because the latter was advanced to deal with
-- the worst-case branch of the code, and we may be in a better-case
-- branch. In that case, move the real Hp *back* and retract some
-- ticky allocation count.
--
-- It *does not* deal with high-water-mark adjustment. That's done by
-- functions which allocate heap.
adjustHpBackwards
= do { hp_usg <- getHpUsage
; let rHp = realHp hp_usg
vHp = virtHp hp_usg
adjust_words = vHp -rHp
; new_hp <- getHpRelOffset vHp
; emit (if adjust_words == 0
then mkNop
else mkAssign hpReg new_hp) -- Generates nothing when vHp==rHp
; tickyAllocHeap False adjust_words -- ...ditto
; setRealHp vHp
}
-------------------------------------------------------------------------
-- Making calls: directCall and slowCall
-------------------------------------------------------------------------
-- General plan is:
-- - we'll make *one* fast call, either to the function itself
-- (directCall) or to stg_ap_<pat>_fast (slowCall)
-- Any left-over arguments will be pushed on the stack,
--
-- e.g. Sp[old+8] = arg1
-- Sp[old+16] = arg2
-- Sp[old+32] = stg_ap_pp_info
-- R2 = arg3
-- R3 = arg4
-- call f() return to Nothing updfr_off: 32
directCall :: Convention -> CLabel -> RepArity -> [StgArg] -> FCode ReturnKind
-- (directCall f n args)
-- calls f(arg1, ..., argn), and applies the result to the remaining args
-- The function f has arity n, and there are guaranteed at least n args
-- Both arity and args include void args
directCall conv lbl arity stg_args
= do { argreps <- getArgRepsAmodes stg_args
; direct_call "directCall" conv lbl arity argreps }
slowCall :: CmmExpr -> [StgArg] -> FCode ReturnKind
-- (slowCall fun args) applies fun to args, returning the results to Sequel
slowCall fun stg_args
= do dflags <- getDynFlags
argsreps <- getArgRepsAmodes stg_args
let (rts_fun, arity) = slowCallPattern (map fst argsreps)
(r, slow_code) <- getCodeR $ do
r <- direct_call "slow_call" NativeNodeCall
(mkRtsApFastLabel rts_fun) arity ((P,Just fun):argsreps)
emitComment $ mkFastString ("slow_call for " ++
showSDoc dflags (ppr fun) ++
" with pat " ++ unpackFS rts_fun)
return r
-- Note [avoid intermediate PAPs]
let n_args = length stg_args
if n_args > arity && optLevel dflags >= 2
then do
funv <- (CmmReg . CmmLocal) `fmap` assignTemp fun
fun_iptr <- (CmmReg . CmmLocal) `fmap`
assignTemp (closureInfoPtr dflags (cmmUntag dflags funv))
-- ToDo: we could do slightly better here by reusing the
-- continuation from the slow call, which we have in r.
-- Also we'd like to push the continuation on the stack
-- before the branch, so that we only get one copy of the
-- code that saves all the live variables across the
-- call, but that might need some improvements to the
-- special case in the stack layout code to handle this
-- (see Note [diamond proc point]).
fast_code <- getCode $
emitCall (NativeNodeCall, NativeReturn)
(entryCode dflags fun_iptr)
(nonVArgs ((P,Just funv):argsreps))
slow_lbl <- newLabelC
fast_lbl <- newLabelC
is_tagged_lbl <- newLabelC
end_lbl <- newLabelC
let correct_arity = cmmEqWord dflags (funInfoArity dflags fun_iptr)
(mkIntExpr dflags n_args)
emit (mkCbranch (cmmIsTagged dflags funv) is_tagged_lbl slow_lbl
<*> mkLabel is_tagged_lbl
<*> mkCbranch correct_arity fast_lbl slow_lbl
<*> mkLabel fast_lbl
<*> fast_code
<*> mkBranch end_lbl
<*> mkLabel slow_lbl
<*> slow_code
<*> mkLabel end_lbl)
return r
else do
emit slow_code
return r
-- Note [avoid intermediate PAPs]
--
-- A slow call which needs multiple generic apply patterns will be
-- almost guaranteed to create one or more intermediate PAPs when
-- applied to a function that takes the correct number of arguments.
-- We try to avoid this situation by generating code to test whether
-- we are calling a function with the correct number of arguments
-- first, i.e.:
--
-- if (TAG(f) != 0} { // f is not a thunk
-- if (f->info.arity == n) {
-- ... make a fast call to f ...
-- }
-- }
-- ... otherwise make the slow call ...
--
-- We *only* do this when the call requires multiple generic apply
-- functions, which requires pushing extra stack frames and probably
-- results in intermediate PAPs. (I say probably, because it might be
-- that we're over-applying a function, but that seems even less
-- likely).
--
-- This very rarely applies, but if it does happen in an inner loop it
-- can have a severe impact on performance (#6084).
--------------
direct_call :: String
-> Convention -- e.g. NativeNodeCall or NativeDirectCall
-> CLabel -> RepArity
-> [(ArgRep,Maybe CmmExpr)] -> FCode ReturnKind
direct_call caller call_conv lbl arity args
| debugIsOn && real_arity > length args -- Too few args
= do -- Caller should ensure that there enough args!
pprPanic "direct_call" $
text caller <+> ppr arity <+>
ppr lbl <+> ppr (length args) <+>
ppr (map snd args) <+> ppr (map fst args)
| null rest_args -- Precisely the right number of arguments
= emitCall (call_conv, NativeReturn) target (nonVArgs args)
| otherwise -- Note [over-saturated calls]
= do dflags <- getDynFlags
emitCallWithExtraStack (call_conv, NativeReturn)
target
(nonVArgs fast_args)
(nonVArgs (stack_args dflags))
where
target = CmmLit (CmmLabel lbl)
(fast_args, rest_args) = splitAt real_arity args
stack_args dflags = slowArgs dflags rest_args
real_arity = case call_conv of
NativeNodeCall -> arity+1
_ -> arity
-- When constructing calls, it is easier to keep the ArgReps and the
-- CmmExprs zipped together. However, a void argument has no
-- representation, so we need to use Maybe CmmExpr (the alternative of
-- using zeroCLit or even undefined would work, but would be ugly).
--
getArgRepsAmodes :: [StgArg] -> FCode [(ArgRep, Maybe CmmExpr)]
getArgRepsAmodes = mapM getArgRepAmode
where getArgRepAmode arg
| V <- rep = return (V, Nothing)
| otherwise = do expr <- getArgAmode (NonVoid arg)
return (rep, Just expr)
where rep = toArgRep (argPrimRep arg)
nonVArgs :: [(ArgRep, Maybe CmmExpr)] -> [CmmExpr]
nonVArgs [] = []
nonVArgs ((_,Nothing) : args) = nonVArgs args
nonVArgs ((_,Just arg) : args) = arg : nonVArgs args
{-
Note [over-saturated calls]
The natural thing to do for an over-saturated call would be to call
the function with the correct number of arguments, and then apply the
remaining arguments to the value returned, e.g.
f a b c d (where f has arity 2)
-->
r = call f(a,b)
call r(c,d)
but this entails
- saving c and d on the stack
- making a continuation info table
- at the continuation, loading c and d off the stack into regs
- finally, call r
Note that since there are a fixed number of different r's
(e.g. stg_ap_pp_fast), we can also pre-compile continuations
that correspond to each of them, rather than generating a fresh
one for each over-saturated call.
Not only does this generate much less code, it is faster too. We will
generate something like:
Sp[old+16] = c
Sp[old+24] = d
Sp[old+32] = stg_ap_pp_info
call f(a,b) -- usual calling convention
For the purposes of the CmmCall node, we count this extra stack as
just more arguments that we are passing on the stack (cml_args).
-}
-- | 'slowArgs' takes a list of function arguments and prepares them for
-- pushing on the stack for "extra" arguments to a function which requires
-- fewer arguments than we currently have.
slowArgs :: DynFlags -> [(ArgRep, Maybe CmmExpr)] -> [(ArgRep, Maybe CmmExpr)]
slowArgs _ [] = []
slowArgs dflags args -- careful: reps contains voids (V), but args does not
| gopt Opt_SccProfilingOn dflags
= save_cccs ++ this_pat ++ slowArgs dflags rest_args
| otherwise = this_pat ++ slowArgs dflags rest_args
where
(arg_pat, n) = slowCallPattern (map fst args)
(call_args, rest_args) = splitAt n args
stg_ap_pat = mkCmmRetInfoLabel rtsPackageKey arg_pat
this_pat = (N, Just (mkLblExpr stg_ap_pat)) : call_args
save_cccs = [(N, Just (mkLblExpr save_cccs_lbl)), (N, Just curCCS)]
save_cccs_lbl = mkCmmRetInfoLabel rtsPackageKey (fsLit "stg_restore_cccs")
-------------------------------------------------------------------------
---- Laying out objects on the heap and stack
-------------------------------------------------------------------------
-- The heap always grows upwards, so hpRel is easy to compute
hpRel :: VirtualHpOffset -- virtual offset of Hp
-> VirtualHpOffset -- virtual offset of The Thing
-> WordOff -- integer word offset
hpRel hp off = off - hp
getHpRelOffset :: VirtualHpOffset -> FCode CmmExpr
-- See Note [Virtual and real heap pointers] in StgCmmMonad
getHpRelOffset virtual_offset
= do dflags <- getDynFlags
hp_usg <- getHpUsage
return (cmmRegOffW dflags hpReg (hpRel (realHp hp_usg) virtual_offset))
mkVirtHeapOffsets
:: DynFlags
-> Bool -- True <=> is a thunk
-> [(PrimRep,a)] -- Things to make offsets for
-> (WordOff, -- _Total_ number of words allocated
WordOff, -- Number of words allocated for *pointers*
[(NonVoid a, ByteOff)])
-- Things with their offsets from start of object in order of
-- increasing offset; BUT THIS MAY BE DIFFERENT TO INPUT ORDER
-- First in list gets lowest offset, which is initial offset + 1.
--
-- Void arguments are removed, so output list may be shorter than
-- input list
--
-- mkVirtHeapOffsets always returns boxed things with smaller offsets
-- than the unboxed things
mkVirtHeapOffsets dflags is_thunk things
= ( bytesToWordsRoundUp dflags tot_bytes
, bytesToWordsRoundUp dflags bytes_of_ptrs
, ptrs_w_offsets ++ non_ptrs_w_offsets
)
where
hdr_words | is_thunk = thunkHdrSize dflags
| otherwise = fixedHdrSizeW dflags
hdr_bytes = wordsToBytes dflags hdr_words
non_void_things = filterOut (isVoidRep . fst) things
(ptrs, non_ptrs) = partition (isGcPtrRep . fst) non_void_things
(bytes_of_ptrs, ptrs_w_offsets) =
mapAccumL computeOffset 0 ptrs
(tot_bytes, non_ptrs_w_offsets) =
mapAccumL computeOffset bytes_of_ptrs non_ptrs
computeOffset bytes_so_far (rep, thing)
= (bytes_so_far + wordsToBytes dflags (argRepSizeW dflags (toArgRep rep)),
(NonVoid thing, hdr_bytes + bytes_so_far))
-- | Just like mkVirtHeapOffsets, but for constructors
mkVirtConstrOffsets
:: DynFlags -> [(PrimRep,a)]
-> (WordOff, WordOff, [(NonVoid a, ByteOff)])
mkVirtConstrOffsets dflags = mkVirtHeapOffsets dflags False
-------------------------------------------------------------------------
--
-- Making argument descriptors
--
-- An argument descriptor describes the layout of args on the stack,
-- both for * GC (stack-layout) purposes, and
-- * saving/restoring registers when a heap-check fails
--
-- Void arguments aren't important, therefore (contrast constructSlowCall)
--
-------------------------------------------------------------------------
-- bring in ARG_P, ARG_N, etc.
#include "../includes/rts/storage/FunTypes.h"
mkArgDescr :: DynFlags -> [Id] -> ArgDescr
mkArgDescr dflags args
= let arg_bits = argBits dflags arg_reps
arg_reps = filter isNonV (map idArgRep args)
-- Getting rid of voids eases matching of standard patterns
in case stdPattern arg_reps of
Just spec_id -> ArgSpec spec_id
Nothing -> ArgGen arg_bits
argBits :: DynFlags -> [ArgRep] -> [Bool] -- True for non-ptr, False for ptr
argBits _ [] = []
argBits dflags (P : args) = False : argBits dflags args
argBits dflags (arg : args) = take (argRepSizeW dflags arg) (repeat True)
++ argBits dflags args
----------------------
stdPattern :: [ArgRep] -> Maybe Int
stdPattern reps
= case reps of
[] -> Just ARG_NONE -- just void args, probably
[N] -> Just ARG_N
[P] -> Just ARG_P
[F] -> Just ARG_F
[D] -> Just ARG_D
[L] -> Just ARG_L
[V16] -> Just ARG_V16
[V32] -> Just ARG_V32
[V64] -> Just ARG_V64
[N,N] -> Just ARG_NN
[N,P] -> Just ARG_NP
[P,N] -> Just ARG_PN
[P,P] -> Just ARG_PP
[N,N,N] -> Just ARG_NNN
[N,N,P] -> Just ARG_NNP
[N,P,N] -> Just ARG_NPN
[N,P,P] -> Just ARG_NPP
[P,N,N] -> Just ARG_PNN
[P,N,P] -> Just ARG_PNP
[P,P,N] -> Just ARG_PPN
[P,P,P] -> Just ARG_PPP
[P,P,P,P] -> Just ARG_PPPP
[P,P,P,P,P] -> Just ARG_PPPPP
[P,P,P,P,P,P] -> Just ARG_PPPPPP
_ -> Nothing
-------------------------------------------------------------------------
--
-- Generating the info table and code for a closure
--
-------------------------------------------------------------------------
-- Here we make an info table of type 'CmmInfo'. The concrete
-- representation as a list of 'CmmAddr' is handled later
-- in the pipeline by 'cmmToRawCmm'.
-- When loading the free variables, a function closure pointer may be tagged,
-- so we must take it into account.
emitClosureProcAndInfoTable :: Bool -- top-level?
-> Id -- name of the closure
-> LambdaFormInfo
-> CmmInfoTable
-> [NonVoid Id] -- incoming arguments
-> ((Int, LocalReg, [LocalReg]) -> FCode ()) -- function body
-> FCode ()
emitClosureProcAndInfoTable top_lvl bndr lf_info info_tbl args body
= do { dflags <- getDynFlags
-- Bind the binder itself, but only if it's not a top-level
-- binding. We need non-top let-bindings to refer to the
-- top-level binding, which this binding would incorrectly shadow.
; node <- if top_lvl then return $ idToReg dflags (NonVoid bndr)
else bindToReg (NonVoid bndr) lf_info
; let node_points = nodeMustPointToIt dflags lf_info
; arg_regs <- bindArgsToRegs args
; let args' = if node_points then (node : arg_regs) else arg_regs
conv = if nodeMustPointToIt dflags lf_info then NativeNodeCall
else NativeDirectCall
(offset, _, _) = mkCallEntry dflags conv args' []
; emitClosureAndInfoTable info_tbl conv args' $ body (offset, node, arg_regs)
}
-- Data constructors need closures, but not with all the argument handling
-- needed for functions. The shared part goes here.
emitClosureAndInfoTable ::
CmmInfoTable -> Convention -> [LocalReg] -> FCode () -> FCode ()
emitClosureAndInfoTable info_tbl conv args body
= do { blks <- getCode body
; let entry_lbl = toEntryLbl (cit_lbl info_tbl)
; emitProcWithConvention conv (Just info_tbl) entry_lbl args blks
}
| spacekitteh/smcghc | compiler/codeGen/StgCmmLayout.hs | bsd-3-clause | 19,947 | 0 | 22 | 5,530 | 3,551 | 1,903 | 1,648 | 275 | 25 |
{-# LANGUAGE CPP,MagicHash,ScopedTypeVariables,FlexibleInstances,RankNTypes,TypeSynonymInstances,MultiParamTypeClasses,BangPatterns #-}
-- | By Chris Kuklewicz, drawing heavily from binary and binary-strict,
-- but all the bugs are my own.
--
-- This file is under the usual BSD3 licence, copyright 2008.
--
-- Modified the monad to be strict for version 2.0.0
--
-- This started out as an improvement to
-- "Data.Binary.Strict.IncrementalGet" with slightly better internals.
-- The simplified 'Get', 'runGet', 'Result' trio with the
-- "Data.Binary.Strict.Class.BinaryParser" instance are an _untested_
-- upgrade from IncrementalGet. Especially untested are the
-- strictness properties.
--
-- 'Get' usefully implements Applicative and Monad, MonadError,
-- Alternative and MonadPlus. Unhandled errors are reported along
-- with the number of bytes successfully consumed. Effects of
-- 'suspend' and 'putAvailable' are visible after
-- fail/throwError/mzero.
--
-- Each time the parser reaches the end of the input it will return a
-- Partial wrapped continuation which requests a (Maybe
-- Lazy.ByteString). Passing (Just bs) will append bs to the input so
-- far and continue processing. If you pass Nothing to the
-- continuation then you are declaring that there will never be more
-- input and that the parser should never again return a partial
-- contination; it should return failure or finished.
--
-- 'suspendUntilComplete' repeatedly uses a partial continuation to
-- ask for more input until 'Nothing' is passed and then it proceeds
-- with parsing.
--
-- The 'getAvailable' command returns the lazy byte string the parser
-- has remaining before calling 'suspend'. The 'putAvailable'
-- replaces this input and is a bit fancy: it also replaces the input
-- at the current offset for all the potential catchError/mplus
-- handlers. This change is _not_ reverted by fail/throwError/mzero.
--
-- The three 'lookAhead' and 'lookAheadM' and 'lookAheadE' functions are
-- very similar to the ones in binary's Data.Binary.Get.
--
--
-- Add specialized high-bit-run
module Text.ProtocolBuffers.Get
(Get,runGet,runGetAll,Result(..)
-- main primitives
,ensureBytes,getStorable,getLazyByteString,suspendUntilComplete
-- parser state manipulation
,getAvailable,putAvailable
-- lookAhead capabilities
,lookAhead,lookAheadM,lookAheadE
-- below is for implementation of BinaryParser (for Int64 and Lazy bytestrings)
,skip,bytesRead,isEmpty,isReallyEmpty,remaining,spanOf,highBitRun
,getWord8,getByteString
,getWord16be,getWord32be,getWord64be
,getWord16le,getWord32le,getWord64le
,getWordhost,getWord16host,getWord32host,getWord64host
--
-- ,scan
,decode7,decode7size,decode7unrolled
) where
-- The Get monad is an instance of binary-strict's BinaryParser:
-- import qualified Data.Binary.Strict.Class as P(BinaryParser(..))
-- The Get monad is an instance of all of these library classes:
import Control.Applicative(Applicative(pure,(<*>)),Alternative(empty,(<|>)))
import Control.Monad(MonadPlus(mzero,mplus),when)
import Control.Monad.Error.Class(MonadError(throwError,catchError),Error(strMsg))
-- It can be a MonadCont, but the semantics are too broken without a ton of work.
-- implementation imports
--import Control.Monad(replicateM,(>=>)) -- XXX testing
--import qualified Data.ByteString as S(unpack) -- XXX testing
--import qualified Data.ByteString.Lazy as L(pack) -- XXX testing
import Control.Monad(ap) -- instead of Functor.fmap; ap for Applicative
import Data.Bits(Bits((.|.),(.&.)),shiftL)
import qualified Data.ByteString as S(concat,length,null,splitAt,findIndex)
import qualified Data.ByteString.Internal as S(ByteString(..),toForeignPtr,inlinePerformIO)
import qualified Data.ByteString.Unsafe as S(unsafeIndex,unsafeDrop {-,unsafeTake-})
import qualified Data.ByteString.Lazy as L(take,drop,length,span,toChunks,fromChunks,null,findIndex)
import qualified Data.ByteString.Lazy.Internal as L(ByteString(..),chunk)
import qualified Data.Foldable as F(foldr,foldr1) -- used with Seq
import Data.Int(Int32,Int64) -- index type for L.ByteString
import Data.Monoid(Monoid(mempty,mappend)) -- Writer has a Monoid contraint
import Data.Sequence(Seq,null,(|>)) -- used for future queue in handler state
import Data.Word(Word,Word8,Word16,Word32,Word64)
import Foreign.ForeignPtr(withForeignPtr)
import Foreign.Ptr(Ptr,castPtr,plusPtr,minusPtr,nullPtr)
import Foreign.Storable(Storable(peek,sizeOf))
import System.IO.Unsafe(unsafePerformIO)
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
import GHC.Base(Int(..),uncheckedShiftL#)
import GHC.Word(Word16(..),Word32(..),Word64(..),uncheckedShiftL64#)
#endif
--import Debug.Trace(trace)
trace :: a -> b -> b
trace _ = id
-- Simple external return type
data Result a = Failed {-# UNPACK #-} !Int64 String
| Finished !L.ByteString {-# UNPACK #-} !Int64 a
| Partial (Maybe L.ByteString -> Result a)
-- Internal state type, not exposed to the user.
-- Invariant: (S.null _top) implies (L.null _current)
data S = S { _top :: {-# UNPACK #-} !S.ByteString
, _current :: !L.ByteString
, consumed :: {-# UNPACK #-} !Int64
} deriving Show
data T3 s = T3 !Int !s !Int
--data TU s = TU'OK !s !Int | TU'DO (Get s)
data TU s = TU'OK !s !Int
{-# SPECIALIZE decode7unrolled :: Get Int64 #-}
{-# SPECIALIZE decode7unrolled :: Get Int32 #-}
{-# SPECIALIZE decode7unrolled :: Get Word64 #-}
{-# SPECIALIZE decode7unrolled :: Get Word32 #-}
{-# SPECIALIZE decode7unrolled :: Get Int #-}
{-# SPECIALIZE decode7unrolled :: Get Integer #-}
decode7unrolled :: forall s. (Num s,Integral s, Bits s) => Get s
-- NOINLINE decode7unrolled removed to allow SPECIALIZE to work
decode7unrolled = Get $ \ sc sIn@(S ss@(S.PS fp off len) bs n) pc -> trace ("decode7unrolled: "++show (len,n)) $
if S.null ss
then trace ("decode7unrolled: S.null ss") $ unGet decode7 sc sIn pc -- decode7 will try suspend then will fail if still bad
else
let (TU'OK x i) =
unsafePerformIO $ withForeignPtr fp $ \ptr0 -> do
if ptr0 == nullPtr || len < 1 then error "Get.decode7unrolled: ByteString invariant failed" else do
let ok :: s -> Int -> IO (TU s)
ok x0 i0 = return (TU'OK x0 i0)
more,err :: IO (TU s)
more = return (TU'OK 0 0) -- decode7
err = return (TU'OK 0 (-1)) -- throwError
{-# INLINE ok #-}
{-# INLINE more #-}
{-# INLINE err #-}
-- -- Next line is segfault fix for null bytestrings from Nathan Howell <nhowell@alphaheavy.com>
-- if ptr0 == nullPtr then more else do
let start = ptr0 `plusPtr` off :: Ptr Word8
b'1 <- peek start
if b'1 < 128 then ok (fromIntegral b'1) 1 else do
let !val'1 = fromIntegral (b'1 .&. 0x7F)
!end = start `plusPtr` len
!ptr2 = start `plusPtr` 1 :: Ptr Word8
if ptr2 >= end then more else do
b'2 <- peek ptr2
if b'2 < 128 then ok (val'1 .|. (fromIntegral b'2 `shiftL` 7)) 2 else do
let !val'2 = (val'1 .|. (fromIntegral (b'2 .&. 0x7F) `shiftL` 7))
!ptr3 = ptr2 `plusPtr` 1
if ptr3 >= end then more else do
b'3 <- peek ptr3
if b'3 < 128 then ok (val'2 .|. (fromIntegral b'3 `shiftL` 14)) 3 else do
let !val'3 = (val'2 .|. (fromIntegral (b'3 .&. 0x7F) `shiftL` 14))
!ptr4 = ptr3 `plusPtr` 1
if ptr4 >= end then more else do
b'4 <- peek ptr4
if b'4 < 128 then ok (val'3 .|. (fromIntegral b'4 `shiftL` 21)) 4 else do
let !val'4 = (val'3 .|. (fromIntegral (b'4 .&. 0x7F) `shiftL` 21))
!ptr5 = ptr4 `plusPtr` 1
if ptr5 >= end then more else do
b'5 <- peek ptr5
if b'5 < 128 then ok (val'4 .|. (fromIntegral b'5 `shiftL` 28)) 5 else do
let !val'5 = (val'4 .|. (fromIntegral (b'5 .&. 0x7F) `shiftL` 28))
!ptr6 = ptr5 `plusPtr` 1
if ptr6 >= end then more else do
b'6 <- peek ptr6
if b'6 < 128 then ok (val'5 .|. (fromIntegral b'6 `shiftL` 35)) 6 else do
let !val'6 = (val'5 .|. (fromIntegral (b'6 .&. 0x7F) `shiftL` 35))
!ptr7 = ptr6 `plusPtr` 1
if ptr7 >= end then more else do
b'7 <- peek ptr7
if b'7 < 128 then ok (val'6 .|. (fromIntegral b'7 `shiftL` 42)) 7 else do
let !val'7 = (val'6 .|. (fromIntegral (b'7 .&. 0x7F) `shiftL` 42))
!ptr8 = ptr7 `plusPtr` 1
if ptr8 >= end then more else do
b'8 <- peek ptr8
if b'8 < 128 then ok (val'7 .|. (fromIntegral b'8 `shiftL` 49)) 8 else do
let !val'8 = (val'7 .|. (fromIntegral (b'8 .&. 0x7F) `shiftL` 49))
!ptr9 = ptr8 `plusPtr` 1
if ptr9 >= end then more else do
b'9 <- peek ptr9
if b'9 < 128 then ok (val'8 .|. (fromIntegral b'9 `shiftL` 56)) 9 else do
let !val'9 = (val'8 .|. (fromIntegral (b'9 .&. 0x7F) `shiftL` 56))
!ptrA = ptr9 `plusPtr` 1
if ptrA >= end then more else do
b'A <- peek ptrA
if b'A < 128 then ok (val'9 .|. (fromIntegral b'A `shiftL` 63)) 10 else do
err
in if i > 0
then let ss' = (S.unsafeDrop i ss)
n' = n+fromIntegral i
s'safe = make_safe (S ss' bs n')
in sc x s'safe pc
else if i==0 then unGet decode7 sc sIn pc
else unGet (throwError $ "Text.ProtocolBuffers.Get.decode7unrolled: more than 10 bytes needed at bytes read of "++show n) sc sIn pc
{-# SPECIALIZE decode7 :: Get Int64 #-}
{-# SPECIALIZE decode7 :: Get Int32 #-}
{-# SPECIALIZE decode7 :: Get Word64 #-}
{-# SPECIALIZE decode7 :: Get Word32 #-}
{-# SPECIALIZE decode7 :: Get Int #-}
{-# SPECIALIZE decode7 :: Get Integer #-}
decode7 :: forall s. (Integral s, Bits s) => Get s
-- NOINLINE decode7 removed to allow SPECIALIZE to work
decode7 = go 0 0
where
go !s1 !shift1 = trace ("decode7.go: "++show (toInteger s1, shift1)) $ do
let -- scanner's inner loop decodes only in current top strict bytestring, does not advance input state
scanner (S.PS fp off len) =
withForeignPtr fp $ \ptr0 -> do
if ptr0 == nullPtr || len < 1 then error "Get.decode7: ByteString invariant failed" else do
let start = ptr0 `plusPtr` off -- start is a pointer to the next valid byte
end = start `plusPtr` len -- end is a pointer one byte past the last valid byte
inner :: (Ptr Word8) -> s -> Int -> IO (T3 s)
inner !ptr !s !shift
| ptr < end = do
w <- peek ptr
trace ("w: " ++ show w) $ do
if (128>) w
then return $ T3 (succ (ptr `minusPtr` start) ) -- length of capture
(s .|. ((fromIntegral w) `shiftL` shift)) -- put the last bits into high position
(-1) -- negative shift indicates satisfied
else inner (ptr `plusPtr` 1) -- loop on next byte
(s .|. ((fromIntegral (w .&. 0x7F)) `shiftL` shift)) -- put the new bits into high position
(shift+7) -- increase high position for next loop
| otherwise = return $ T3 (ptr `minusPtr` start) -- length so far (ptr past end-of-string so no succ)
s -- value so far
shift -- next shift to use
inner start s1 shift1
(S ss bs n) <- getFull
trace ("getFull says: "++ show ((S.length ss,ss),(L.length bs),n)) $ do
if S.null ss
then do
continue <- suspend
if continue
then go s1 shift1
else fail "Get.decode7: Zero length input" -- XXX can be triggered!
else do
let (T3 i sOut shiftOut) = unsafePerformIO $ scanner ss
t = S.unsafeDrop i ss -- Warning: 't' may be mempty
n' = n + fromIntegral i
trace ("scanner says "++show ((i,toInteger sOut,shiftOut),(S.length t,n'))) $ do
if 0 <= shiftOut
then do
putFull_unsafe (make_state bs n')
if L.null bs
then do
continue <- suspend
if continue
then go sOut shiftOut
else return sOut
else do
go sOut shiftOut
else do
putFull_safe (S t bs n') -- bs from getFull is still valid
return sOut
data T2 = T2 !Int64 !Bool
decode7size :: Get Int64
decode7size = go 0
where
go !len1 = do
let scanner (S.PS fp off len) =
withForeignPtr fp $ \ptr0 -> do
if ptr0 == nullPtr || len < 1 then error "Get.decode7size: ByteString invariant failed" else do
let start = ptr0 `plusPtr` off
end = start `plusPtr` len
inner :: (Ptr Word8) -> IO T2
inner !ptr
| ptr < end = do
w <- peek ptr
if (128>) w
then return $ T2 (fromIntegral (ptr `minusPtr` start)) True
else inner (ptr `plusPtr` 1)
| otherwise = return $ T2 (fromIntegral (ptr `minusPtr` start)) False
inner start
(S ss bs n) <- getFull
if S.null ss
then do
continue <- suspend
if continue
then go len1
else fail "Get.decode7size: zero length input"
else do
let (T2 i ok) = unsafePerformIO $ scanner ss
t = S.unsafeDrop (fromIntegral i) ss
n' = n + i
len2 = len1 + i
if ok
then do
putFull_unsafe (S t bs n')
return len2
else do
putFull_unsafe (make_state bs n')
if L.null bs
then do
continue <- suspend
if continue
then go len2
else return len2
else
go len2
-- Private Internal error handling stack type
-- This must NOT be exposed by this module
--
-- The ErrorFrame is the top-level error handler setup when execution begins.
-- It starts with the Bool set to True: meaning suspend can ask for more input.
-- Once suspend get 'Nothing' in reply the Bool is set to False, which means
-- that 'suspend' should no longer ask for input -- the input is finished.
-- Why store the Bool there? It was handy when I needed to add it.
data FrameStack b = ErrorFrame (String -> S -> Result b) -- top level handler
Bool -- True at start, False if Nothing passed to suspend continuation
| HandlerFrame (Maybe ( S -> FrameStack b -> String -> Result b )) -- encapsulated handler
S -- stored state to pass to handler
(Seq L.ByteString) -- additional input to hass to handler
(FrameStack b) -- earlier/shallower/outer handlers
type Success b a = (a -> S -> FrameStack b -> Result b)
-- Internal monad type
newtype Get a = Get {
unGet :: forall b. -- the forall hides the CPS style (and prevents use of MonadCont)
Success b a -- main continuation
-> S -- parser state
-> FrameStack b -- error handler stack
-> Result b -- operation
}
-- These implement the checkponting needed to store and revive the
-- state for lookAhead. They are fragile because the setCheckpoint
-- must preceed either useCheckpoint or clearCheckpoint but not both.
-- The FutureFrame must be the most recent handler, so the commands
-- must be in the same scope depth. Because of these constraints, the reader
-- value 'r' does not need to be stored and can be taken from the Get
-- parameter.
--
-- IMPORTANT: Any FutureFrame at the top level(s) is discarded by throwError.
setCheckpoint,useCheckpoint,clearCheckpoint :: Get ()
setCheckpoint = Get $ \ sc s pc -> sc () s (HandlerFrame Nothing s mempty pc)
useCheckpoint = Get $ \ sc (S _ _ _) frame ->
case frame of
(HandlerFrame Nothing s future pc) -> sc () (collect s future) pc
_ -> error "Text.ProtocolBuffers.Get: Impossible useCheckpoint frame!"
clearCheckpoint = Get $ \ sc s frame ->
case frame of
(HandlerFrame Nothing _s _future pc) -> sc () s pc
_ -> error "Text.ProtocolBuffers.Get: Impossible clearCheckpoint frame!"
-- | 'lookAhead' runs the @todo@ action and then rewinds only the
-- BinaryParser state. Any new input from 'suspend' or changes from
-- 'putAvailable' are kept. Changes to the user state (MonadState)
-- are kept. The MonadWriter output is retained.
--
-- If an error is thrown then the entire monad state is reset to last
-- catchError as usual.
lookAhead :: Get a -> Get a
lookAhead todo = do
setCheckpoint
a <- todo
useCheckpoint
return a
-- | 'lookAheadM' runs the @todo@ action. If the action returns 'Nothing' then the
-- BinaryParser state is rewound (as in 'lookAhead'). If the action return 'Just' then
-- the BinaryParser is not rewound, and lookAheadM acts as an identity.
--
-- If an error is thrown then the entire monad state is reset to last
-- catchError as usual.
lookAheadM :: Get (Maybe a) -> Get (Maybe a)
lookAheadM todo = do
setCheckpoint
a <- todo
maybe useCheckpoint (const clearCheckpoint) a
return a
-- | 'lookAheadE' runs the @todo@ action. If the action returns 'Left' then the
-- BinaryParser state is rewound (as in 'lookAhead'). If the action return 'Right' then
-- the BinaryParser is not rewound, and lookAheadE acts as an identity.
--
-- If an error is thrown then the entire monad state is reset to last
-- catchError as usual.
lookAheadE :: Get (Either a b) -> Get (Either a b)
lookAheadE todo = do
setCheckpoint
a <- todo
either (const useCheckpoint) (const clearCheckpoint) a
return a
-- 'collect' is used by 'putCheckpoint' and 'throwError'
collect :: S -> Seq L.ByteString -> S
collect s@(S ss bs n) future | Data.Sequence.null future = make_safe $ s
| otherwise = make_safe $ S ss (mappend bs (F.foldr1 mappend future)) n
-- Put the Show instances here
instance (Show a) => Show (Result a) where
showsPrec _ (Failed n msg) = ("(Failed "++) . shows n . (' ':) . shows msg . (")"++)
showsPrec _ (Finished bs n a) =
("(CFinished ("++)
. shows bs . (") ("++)
. shows n . (") ("++)
. shows a . ("))"++)
showsPrec _ (Partial {}) = ("(Partial <Maybe Data.ByteString.Lazy.ByteString-> Result a)"++)
instance Show (FrameStack b) where
showsPrec _ (ErrorFrame _ p) =(++) "(ErrorFrame <e->s->m b> " . shows p . (")"++)
showsPrec _ (HandlerFrame _ s future pc) = ("(HandlerFrame <> ("++)
. shows s . (") ("++) . shows future . (") ("++)
. shows pc . (")"++)
-- | 'runGet' is the simple executor
runGet :: Get a -> L.ByteString -> Result a
runGet (Get f) bsIn = f scIn sIn (ErrorFrame ec True)
where scIn a (S ss bs n) _pc = Finished (L.chunk ss bs) n a
sIn = make_state bsIn 0
ec msg sOut = Failed (consumed sOut) msg
-- | 'runGetAll' is the simple executor, and will not ask for any continuation because this lazy bytestring is all the input
runGetAll :: Get a -> L.ByteString -> Result a
runGetAll (Get f) bsIn = f scIn sIn (ErrorFrame ec False)
where scIn a (S ss bs n) _pc = Finished (L.chunk ss bs) n a
sIn = make_state bsIn 0
ec msg sOut = Failed (consumed sOut) msg
-- | Get the input currently available to the parser.
getAvailable :: Get L.ByteString
getAvailable = Get $ \ sc s@(S ss bs _) pc -> sc (L.chunk ss bs) s pc
-- | 'putAvailable' replaces the bytestream past the current # of read
-- bytes. This will also affect pending MonadError handler and
-- MonadPlus branches. I think all pending branches have to have
-- fewer bytesRead than the current one. If this is wrong then an
-- error will be thrown.
--
-- WARNING : 'putAvailable' is still untested.
putAvailable :: L.ByteString -> Get ()
putAvailable !bsNew = Get $ \ sc (S _ss _bs n) pc ->
let !s' = make_state bsNew n
rebuild (HandlerFrame catcher (S ss1 bs1 n1) future pc') =
HandlerFrame catcher sNew mempty (rebuild pc')
where balance = n - n1
whole | balance < 0 = error "Impossible? Cannot rebuild HandlerFrame in MyGet.putAvailable: balance is negative!"
| otherwise = L.take balance $ L.chunk ss1 bs1 `mappend` F.foldr mappend mempty future
sNew | balance /= L.length whole = error "Impossible? MyGet.putAvailable.rebuild.sNew HandlerFrame assertion failed."
| otherwise = make_state (mappend whole bsNew) n1
rebuild x@(ErrorFrame {}) = x
in sc () s' (rebuild pc)
-- Internal access to full internal state, as helper functions
getFull :: Get S
getFull = Get $ \ sc s pc -> sc s s pc
{-# INLINE putFull_unsafe #-}
putFull_unsafe :: S -> Get ()
putFull_unsafe !s = Get $ \ sc _s pc -> sc () s pc
{-# INLINE make_safe #-}
make_safe :: S -> S
make_safe s@(S ss bs n) =
if S.null ss
then make_state bs n
else s
{-# INLINE make_state #-}
make_state :: L.ByteString -> Int64 -> S
make_state L.Empty n = S mempty mempty n
make_state (L.Chunk ss bs) n = S ss bs n
putFull_safe :: S -> Get ()
putFull_safe= putFull_unsafe . make_safe
-- | Keep calling 'suspend' until Nothing is passed to the 'Partial'
-- continuation. This ensures all the data has been loaded into the
-- state of the parser.
suspendUntilComplete :: Get ()
suspendUntilComplete = do
continue <- suspend
when continue suspendUntilComplete
-- | Call suspend and throw and error with the provided @msg@ if
-- Nothing has been passed to the 'Partial' continuation. Otherwise
-- return ().
suspendMsg :: String -> Get ()
suspendMsg msg = do continue <- suspend
if continue then return ()
else throwError msg
-- | check that there are at least @n@ bytes available in the input.
-- This will suspend if there is to little data.
ensureBytes :: Int64 -> Get ()
ensureBytes n = do
(S ss bs _read) <- getFull
if S.null ss
then suspendMsg "ensureBytes failed" >> ensureBytes n
else do
if n < fromIntegral (S.length ss)
then return ()
else do if n == L.length (L.take n (L.chunk ss bs))
then return ()
else suspendMsg "ensureBytes failed" >> ensureBytes n
{-# INLINE ensureBytes #-}
-- | Pull @n@ bytes from the input, as a lazy ByteString. This will
-- suspend if there is too little data.
getLazyByteString :: Int64 -> Get L.ByteString
getLazyByteString n | n<=0 = return mempty
| otherwise = do
(S ss bs offset) <- getFull
if S.null ss
then do
suspendMsg ("getLazyByteString S.null ss failed with "++show (n,(S.length ss,L.length bs,offset)))
getLazyByteString n
else do
case splitAtOrDie n (L.chunk ss bs) of -- safe use of L.chunk because of S.null ss check above
Just (consume,rest) -> do
putFull_unsafe (make_state rest (offset+n))
return $! consume
Nothing -> do
suspendMsg ("getLazyByteString (Nothing from splitAtOrDie) failed with "++show (n,(S.length ss,L.length bs,offset)))
getLazyByteString n
{-# INLINE getLazyByteString #-} -- important
-- | 'suspend' is supposed to allow the execution of the monad to be
-- halted, awaiting more input. The computation is supposed to
-- continue normally if this returns True, and is supposed to halt
-- without calling suspend again if this returns False. All future
-- calls to suspend will return False automatically and no nothing
-- else.
--
-- These semantics are too specialized to let this escape this module.
class MonadSuspend m where
suspend :: m Bool
-- The instance here is fairly specific to the stack manipluation done
-- by 'addFuture' to ('S' user) and to the packaging of the resumption
-- function in 'IResult'('IPartial').
instance MonadSuspend Get where
suspend = Get (
let checkBool (ErrorFrame _ b) = b
checkBool (HandlerFrame _ _ _ pc) = checkBool pc
-- addFuture puts the new data in 'future' where throwError's collect can find and use it
addFuture bs (HandlerFrame catcher s future pc) =
HandlerFrame catcher s (future |> bs) (addFuture bs pc)
addFuture _bs x@(ErrorFrame {}) = x
-- Once suspend is given Nothing, it remembers this and always returns False
rememberFalse (ErrorFrame ec _) = ErrorFrame ec False
rememberFalse (HandlerFrame catcher s future pc) =
HandlerFrame catcher s future (rememberFalse pc)
in \ sc sIn pcIn ->
if checkBool pcIn -- Has Nothing ever been given to a partial continuation?
then let f Nothing = let pcOut = rememberFalse pcIn
in sc False sIn pcOut
f (Just bs') = let sOut = appendBS sIn bs'
pcOut = addFuture bs' pcIn
in sc True sOut pcOut
in Partial f
else sc False sIn pcIn -- once Nothing has been given suspend is a no-op
)
where appendBS (S ss bs n) bs' = make_safe (S ss (mappend bs bs') n)
-- A unique sort of command...
-- | 'discardInnerHandler' causes the most recent catchError to be
-- discarded, i.e. this reduces the stack of error handlers by removing
-- the top one. These are the same handlers which Alternative((<|>)) and
-- MonadPlus(mplus) use. This is useful to commit to the current branch and let
-- the garbage collector release the suspended handler and its hold on
-- the earlier input.
discardInnerHandler :: Get ()
discardInnerHandler = Get $ \ sc s pcIn ->
let pcOut = case pcIn of ErrorFrame {} -> pcIn
HandlerFrame _ _ _ pc' -> pc'
in sc () s pcOut
{-# INLINE discardInnerHandler #-}
{- Currently unused, commented out to satisfy -Wall
-- | 'discardAllHandlers' causes all catchError handler to be
-- discarded, i.e. this reduces the stack of error handlers to the top
-- level handler. These are the same handlers which Alternative((<|>))
-- and MonadPlus(mplus) use. This is useful to commit to the current
-- branch and let the garbage collector release the suspended handlers
-- and their hold on the earlier input.
discardAllHandlers :: Get ()
discardAllHandlers = Get $ \ sc s pcIn ->
let base pc@(ErrorFrame {}) = pc
base (HandlerFrame _ _ _ pc) = base pc
in sc () s (base pcIn)
{-# INLINE discardAllHandlers #-}
-}
-- The BinaryParser instance:
-- | Discard the next @m@ bytes
skip :: Int64 -> Get ()
skip m | m <=0 = return ()
| otherwise = do
ensureBytes m
(S ss bs n) <- getFull
-- Could ignore impossible S.null ss due to (ensureBytes m) and (0 < m) but be paranoid
let lbs = L.chunk ss bs -- L.chunk is safe
putFull_unsafe (make_state (L.drop m lbs) (n+m)) -- drop will not perform less than 'm' bytes due to ensureBytes above
-- | Return the number of 'bytesRead' so far. Initially 0, never negative.
bytesRead :: Get Int64
bytesRead = fmap consumed getFull
-- | Return the number of bytes 'remaining' before the current input
-- runs out and 'suspend' might be called.
remaining :: Get Int64
remaining = do (S ss bs _) <- getFull
return $ fromIntegral (S.length ss) + (L.length bs)
-- | Return True if the number of bytes 'remaining' is 0. Any futher
-- attempts to read an empty parser will call 'suspend' which might
-- result in more input to consume.
--
-- Compare with 'isReallyEmpty'
isEmpty :: Get Bool
isEmpty = do (S ss _bs _n) <- getFull
return (S.null ss) -- && (L.null bs)
-- | Return True if the input is exhausted and will never be added to.
-- Returns False if there is input left to consume.
--
-- Compare with 'isEmpty'
isReallyEmpty :: Get Bool
isReallyEmpty = isEmpty >>= loop
where loop False = return False
loop True = do
continue <- suspend
if continue
then isReallyEmpty
else return True
-- | get the longest prefix of the input where the high bit is set as well as following byte.
-- This made getVarInt slower.
highBitRun :: Get Int64
{-# INLINE highBitRun #-}
highBitRun = loop where
loop :: Get Int64
{-# INLINE loop #-}
loop = do
(S ss bs _n) <- getFull
-- S.null ss is okay, will lead to Nothing, Nothing, suspend below
let mi = S.findIndex (128>) ss
case mi of
Just i -> return (succ $ fromIntegral i)
Nothing -> do
let mj = L.findIndex (128>) bs
case mj of
Just j -> return (fromIntegral (S.length ss) + succ j)
Nothing -> do
continue <- suspend
if continue then loop
else fail "highBitRun has failed"
-- | get the longest prefix of the input where all the bytes satisfy the predicate.
spanOf :: (Word8 -> Bool) -> Get (L.ByteString)
spanOf f = do let loop = do (S ss bs n) <- getFull
let (pre,post) = L.span f (L.chunk ss bs) -- L.chunk is safe
putFull_unsafe (make_state post (n + L.length pre))
if L.null post
then do continue <- suspend
if continue then fmap ((L.toChunks pre)++) loop
else return (L.toChunks pre)
else return (L.toChunks pre)
fmap L.fromChunks loop
{-# INLINE spanOf #-}
-- | Pull @n@ bytes from the input, as a strict ByteString. This will
-- suspend if there is too little data. If the result spans multiple
-- lazy chunks then the result occupies a freshly allocated strict
-- bytestring, otherwise it fits in a single chunk and refers to the
-- same immutable memory block as the whole chunk.
getByteString :: Int -> Get S.ByteString
getByteString nIn | nIn <= 0 = return mempty
| otherwise = do
(S ss bs n) <- getFull
if nIn < S.length ss -- Leave at least one character of 'ss' in 'post' allowing putFull_unsafe below
then do let (pre,post) = S.splitAt nIn ss
putFull_unsafe (S post bs (n+fromIntegral nIn))
return $! pre
-- Expect nIn to be less than S.length ss the vast majority of times
-- so do not worry about doing anything fancy here.
else do now <- fmap (S.concat . L.toChunks) (getLazyByteString (fromIntegral nIn))
return $! now
{-# INLINE getByteString #-} -- important
getWordhost :: Get Word
getWordhost = getStorable
{-# INLINE getWordhost #-}
getWord8 :: Get Word8
getWord8 = getPtr 1
{-# INLINE getWord8 #-}
getWord16be,getWord16le,getWord16host :: Get Word16
getWord16be = do
s <- getByteString 2
return $! (fromIntegral (s `S.unsafeIndex` 0) `shiftl_w16` 8) .|.
(fromIntegral (s `S.unsafeIndex` 1))
{-# INLINE getWord16be #-}
getWord16le = do
s <- getByteString 2
return $! (fromIntegral (s `S.unsafeIndex` 1) `shiftl_w16` 8) .|.
(fromIntegral (s `S.unsafeIndex` 0) )
{-# INLINE getWord16le #-}
getWord16host = getStorable
{-# INLINE getWord16host #-}
getWord32be,getWord32le,getWord32host :: Get Word32
getWord32be = do
s <- getByteString 4
return $! (fromIntegral (s `S.unsafeIndex` 0) `shiftl_w32` 24) .|.
(fromIntegral (s `S.unsafeIndex` 1) `shiftl_w32` 16) .|.
(fromIntegral (s `S.unsafeIndex` 2) `shiftl_w32` 8) .|.
(fromIntegral (s `S.unsafeIndex` 3) )
{-# INLINE getWord32be #-}
getWord32le = do
s <- getByteString 4
return $! (fromIntegral (s `S.unsafeIndex` 3) `shiftl_w32` 24) .|.
(fromIntegral (s `S.unsafeIndex` 2) `shiftl_w32` 16) .|.
(fromIntegral (s `S.unsafeIndex` 1) `shiftl_w32` 8) .|.
(fromIntegral (s `S.unsafeIndex` 0) )
{-# INLINE getWord32le #-}
getWord32host = getStorable
{-# INLINE getWord32host #-}
getWord64be,getWord64le,getWord64host :: Get Word64
getWord64be = do
s <- getByteString 8
return $! (fromIntegral (s `S.unsafeIndex` 0) `shiftl_w64` 56) .|.
(fromIntegral (s `S.unsafeIndex` 1) `shiftl_w64` 48) .|.
(fromIntegral (s `S.unsafeIndex` 2) `shiftl_w64` 40) .|.
(fromIntegral (s `S.unsafeIndex` 3) `shiftl_w64` 32) .|.
(fromIntegral (s `S.unsafeIndex` 4) `shiftl_w64` 24) .|.
(fromIntegral (s `S.unsafeIndex` 5) `shiftl_w64` 16) .|.
(fromIntegral (s `S.unsafeIndex` 6) `shiftl_w64` 8) .|.
(fromIntegral (s `S.unsafeIndex` 7) )
{-# INLINE getWord64be #-}
getWord64le = do
s <- getByteString 8
return $! (fromIntegral (s `S.unsafeIndex` 7) `shiftl_w64` 56) .|.
(fromIntegral (s `S.unsafeIndex` 6) `shiftl_w64` 48) .|.
(fromIntegral (s `S.unsafeIndex` 5) `shiftl_w64` 40) .|.
(fromIntegral (s `S.unsafeIndex` 4) `shiftl_w64` 32) .|.
(fromIntegral (s `S.unsafeIndex` 3) `shiftl_w64` 24) .|.
(fromIntegral (s `S.unsafeIndex` 2) `shiftl_w64` 16) .|.
(fromIntegral (s `S.unsafeIndex` 1) `shiftl_w64` 8) .|.
(fromIntegral (s `S.unsafeIndex` 0) )
{-# INLINE getWord64le #-}
getWord64host = getStorable
{-# INLINE getWord64host #-}
-- Below here are the class instances
instance Functor Get where
fmap f m = Get (\sc -> unGet m (sc . f))
{-# INLINE fmap #-}
instance Monad Get where
return a = seq a $ Get (\sc -> sc a)
{-# INLINE return #-}
m >>= k = Get (\sc -> unGet m (\ a -> seq a $ unGet (k a) sc))
{-# INLINE (>>=) #-}
fail = throwError . strMsg
instance MonadError String Get where
throwError msg = Get $ \_sc s pcIn ->
let go (ErrorFrame ec _) = ec msg s
go (HandlerFrame (Just catcher) s1 future pc1) = catcher (collect s1 future) pc1 msg
go (HandlerFrame Nothing _s1 _future pc1) = go pc1
in go pcIn
catchError mayFail handler = Get $ \sc s pc ->
let pcWithHandler = let catcher s1 pc1 e1 = unGet (handler e1) sc s1 pc1
in HandlerFrame (Just catcher) s mempty pc
actionWithCleanup = mayFail >>= \a -> discardInnerHandler >> return a
in unGet actionWithCleanup sc s pcWithHandler
instance MonadPlus Get where
mzero = throwError (strMsg "[mzero:no message]")
mplus m1 m2 = catchError m1 (const m2)
instance Applicative Get where
pure = return
(<*>) = ap
instance Alternative Get where
empty = mzero
(<|>) = mplus
-- | I use "splitAt" without tolerating too few bytes, so write a Maybe version.
-- This is the only place I invoke L.Chunk as constructor instead of pattern matching.
-- I claim that the first argument cannot be empty.
splitAtOrDie :: Int64 -> L.ByteString -> Maybe (L.ByteString, L.ByteString)
splitAtOrDie i ps | i <= 0 = Just (mempty, ps)
splitAtOrDie _i L.Empty = Nothing
splitAtOrDie i (L.Chunk x xs) | i < len = let (pre,post) = S.splitAt (fromIntegral i) x
in Just (L.chunk pre mempty, L.chunk post xs)
| otherwise = case splitAtOrDie (i-len) xs of
Nothing -> Nothing
Just (y1,y2) -> Just (L.chunk x y1,y2)
where len = fromIntegral (S.length x)
{-# INLINE splitAtOrDie #-}
------------------------------------------------------------------------
-- getPtr copied from binary's Get.hs
-- helper, get a raw Ptr onto a strict ByteString copied out of the
-- underlying lazy byteString. So many indirections from the raw parser
-- state that my head hurts...
-- Assume n>0
getPtr :: (Storable a) => Int -> Get a
getPtr n = do
(fp,o,_) <- fmap S.toForeignPtr (getByteString n)
return . S.inlinePerformIO $ withForeignPtr fp $ \p -> peek (castPtr $ p `plusPtr` o)
{-# INLINE getPtr #-}
-- I pushed the sizeOf into here (uses ScopedTypeVariables)
-- Assume sizeOf (undefined :: a)) > 0
getStorable :: forall a. (Storable a) => Get a
getStorable = do
(fp,o,_) <- fmap S.toForeignPtr (getByteString (sizeOf (undefined :: a)))
return . S.inlinePerformIO $ withForeignPtr fp $ \p -> peek (castPtr $ p `plusPtr` o)
{-# INLINE getStorable #-}
------------------------------------------------------------------------
------------------------------------------------------------------------
-- Unchecked shifts copied from binary's Get.hs
shiftl_w16 :: Word16 -> Int -> Word16
shiftl_w32 :: Word32 -> Int -> Word32
shiftl_w64 :: Word64 -> Int -> Word64
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
shiftl_w16 (W16# w) (I# i) = W16# (w `uncheckedShiftL#` i)
shiftl_w32 (W32# w) (I# i) = W32# (w `uncheckedShiftL#` i)
#if WORD_SIZE_IN_BITS < 64
shiftl_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftL64#` i)
#if __GLASGOW_HASKELL__ <= 606
-- Exported by GHC.Word in GHC 6.8 and higher
foreign import ccall unsafe "stg_uncheckedShiftL64"
uncheckedShiftL64# :: Word64# -> Int# -> Word64#
#endif
#else
shiftl_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftL#` i)
#endif
#else
shiftl_w16 = shiftL
shiftl_w32 = shiftL
shiftl_w64 = shiftL
#endif
| timjb/protocol-buffers | Text/ProtocolBuffers/Get.hs | apache-2.0 | 38,206 | 0 | 93 | 10,811 | 9,097 | 4,834 | 4,263 | -1 | -1 |
--------------------------------------------------------------------------------
-- |
-- Module : System.Taffybar.Widget.CPUMonitor
-- Copyright : (c) José A. Romero L.
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : José A. Romero L. <escherdragon@gmail.com>
-- Stability : unstable
-- Portability : unportable
--
-- Simple CPU monitor that uses a PollingGraph to visualize variations in the
-- user and system CPU times in one selected core, or in all cores available.
--
--------------------------------------------------------------------------------
module System.Taffybar.Widget.CPUMonitor where
import Control.Monad.IO.Class
import Data.IORef
import qualified GI.Gtk
import System.Taffybar.Information.CPU2 (getCPUInfo)
import System.Taffybar.Information.StreamInfo (getAccLoad)
import System.Taffybar.Widget.Generic.PollingGraph
-- | Creates a new CPU monitor. This is a PollingGraph fed by regular calls to
-- getCPUInfo, associated to an IORef used to remember the values yielded by the
-- last call to this function.
cpuMonitorNew
:: MonadIO m
=> GraphConfig -- ^ Configuration data for the Graph.
-> Double -- ^ Polling period (in seconds).
-> String -- ^ Name of the core to watch (e.g. \"cpu\", \"cpu0\").
-> m GI.Gtk.Widget
cpuMonitorNew cfg interval cpu = liftIO $ do
info <- getCPUInfo cpu
sample <- newIORef info
pollingGraphNew cfg interval $ probe sample cpu
probe :: IORef [Int] -> String -> IO [Double]
probe sample cpuName = do
load <- getAccLoad sample $ getCPUInfo cpuName
case load of
l0:l1:l2:_ -> return [ l0 + l1, l2 ] -- user, system
_ -> return []
| teleshoes/taffybar | src/System/Taffybar/Widget/CPUMonitor.hs | bsd-3-clause | 1,649 | 0 | 12 | 282 | 263 | 148 | 115 | 23 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module SearchRepos where
import qualified Github.Search as Github
import qualified Github.Data as Github
import Control.Monad (forM,forM_)
import Data.Maybe (fromMaybe)
import Data.List (intercalate)
import System.Environment (getArgs)
import Text.Printf (printf)
import Data.Time.Clock (getCurrentTime, UTCTime(..))
import Data.Time.LocalTime (utc,utcToLocalTime,localDay,localTimeOfDay,TimeOfDay(..))
import Data.Time.Calendar (toGregorian)
main = do
args <- getArgs
date <- case args of
(x:_) -> return x
otherwise -> today
let query = "q=language%3Ahaskell created%3A>" ++ date ++ "&per_page=100"
let auth = Nothing
result <- Github.searchRepos' auth query
case result of
Left e -> putStrLn $ "Error: " ++ show e
Right r -> do forM_ (Github.searchReposRepos r) (\r -> do
putStrLn $ formatRepo r
putStrLn ""
)
putStrLn $ "Count: " ++ show n ++ " Haskell repos created since " ++ date
where n = Github.searchReposTotalCount r
-- | return today (in UTC) formatted as YYYY-MM-DD
today :: IO String
today = do
now <- getCurrentTime
let day = localDay $ utcToLocalTime utc now
(y,m,d) = toGregorian day
in return $ printf "%d-%02d-%02d" y m d
formatRepo :: Github.Repo -> String
formatRepo r =
let fields = [ ("Name", Github.repoName)
,("URL", Github.repoHtmlUrl)
,("Description", orEmpty . Github.repoDescription)
,("Created-At", formatMaybeDate . Github.repoCreatedAt)
,("Pushed-At", formatMaybeDate . Github.repoPushedAt)
,("Stars", show . Github.repoStargazersCount)
]
in intercalate "\n" $ map fmt fields
where fmt (s,f) = fill 12 (s ++ ":") ++ " " ++ f r
orEmpty = fromMaybe ""
fill n s = s ++ replicate n' ' '
where n' = max 0 (n - length s)
formatMaybeDate = maybe "???" formatDate
formatDate = show . Github.fromGithubDate
| adarqui/github | samples/Search/SearchRepos.hs | bsd-3-clause | 2,058 | 0 | 18 | 549 | 633 | 337 | 296 | 48 | 3 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Code generation for foreign calls.
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmForeign (
cgForeignCall, loadThreadState, saveThreadState,
emitPrimCall, emitCCall,
emitForeignCall, -- For CmmParse
emitSaveThreadState, -- will be needed by the Cmm parser
emitLoadThreadState, -- ditto
emitCloseNursery, emitOpenNursery
) where
#include "HsVersions.h"
import StgSyn
import StgCmmProf (storeCurCCS, ccsType, curCCS)
import StgCmmEnv
import StgCmmMonad
import StgCmmUtils
import StgCmmClosure
import StgCmmLayout
import Cmm
import CmmUtils
import MkGraph
import Type
import TysPrim
import CLabel
import SMRep
import ForeignCall
import DynFlags
import Maybes
import Outputable
import BasicTypes
import Control.Monad
import Prelude hiding( succ )
-----------------------------------------------------------------------------
-- Code generation for Foreign Calls
-----------------------------------------------------------------------------
-- | emit code for a foreign call, and return the results to the sequel.
--
cgForeignCall :: ForeignCall -- the op
-> [StgArg] -- x,y arguments
-> Type -- result type
-> FCode ReturnKind
cgForeignCall (CCall (CCallSpec target cconv safety)) stg_args res_ty
= do { dflags <- getDynFlags
; let -- in the stdcall calling convention, the symbol needs @size appended
-- to it, where size is the total number of bytes of arguments. We
-- attach this info to the CLabel here, and the CLabel pretty printer
-- will generate the suffix when the label is printed.
call_size args
| StdCallConv <- cconv = Just (sum (map arg_size args))
| otherwise = Nothing
-- ToDo: this might not be correct for 64-bit API
arg_size (arg, _) = max (widthInBytes $ typeWidth $ cmmExprType dflags arg)
(wORD_SIZE dflags)
; cmm_args <- getFCallArgs stg_args
; (res_regs, res_hints) <- newUnboxedTupleRegs res_ty
; let ((call_args, arg_hints), cmm_target)
= case target of
StaticTarget _ _ False ->
panic "cgForeignCall: unexpected FFI value import"
StaticTarget lbl mPkgId True
-> let labelSource
= case mPkgId of
Nothing -> ForeignLabelInThisPackage
Just pkgId -> ForeignLabelInPackage pkgId
size = call_size cmm_args
in ( unzip cmm_args
, CmmLit (CmmLabel
(mkForeignLabel lbl size labelSource IsFunction)))
DynamicTarget -> case cmm_args of
(fn,_):rest -> (unzip rest, fn)
[] -> panic "cgForeignCall []"
fc = ForeignConvention cconv arg_hints res_hints CmmMayReturn
call_target = ForeignTarget cmm_target fc
-- we want to emit code for the call, and then emitReturn.
-- However, if the sequel is AssignTo, we shortcut a little
-- and generate a foreign call that assigns the results
-- directly. Otherwise we end up generating a bunch of
-- useless "r = r" assignments, which are not merely annoying:
-- they prevent the common block elimination from working correctly
-- in the case of a safe foreign call.
-- See Note [safe foreign call convention]
--
; sequel <- getSequel
; case sequel of
AssignTo assign_to_these _ ->
emitForeignCall safety assign_to_these call_target call_args
_something_else ->
do { _ <- emitForeignCall safety res_regs call_target call_args
; emitReturn (map (CmmReg . CmmLocal) res_regs)
}
}
{- Note [safe foreign call convention]
The simple thing to do for a safe foreign call would be the same as an
unsafe one: just
emitForeignCall ...
emitReturn ...
but consider what happens in this case
case foo x y z of
(# s, r #) -> ...
The sequel is AssignTo [r]. The call to newUnboxedTupleRegs picks [r]
as the result reg, and we generate
r = foo(x,y,z) returns to L1 -- emitForeignCall
L1:
r = r -- emitReturn
goto L2
L2:
...
Now L1 is a proc point (by definition, it is the continuation of the
safe foreign call). If L2 does a heap check, then L2 will also be a
proc point.
Furthermore, the stack layout algorithm has to arrange to save r
somewhere between the call and the jump to L1, which is annoying: we
would have to treat r differently from the other live variables, which
have to be saved *before* the call.
So we adopt a special convention for safe foreign calls: the results
are copied out according to the NativeReturn convention by the call,
and the continuation of the call should copyIn the results. (The
copyOut code is actually inserted when the safe foreign call is
lowered later). The result regs attached to the safe foreign call are
only used temporarily to hold the results before they are copied out.
We will now generate this:
r = foo(x,y,z) returns to L1
L1:
r = R1 -- copyIn, inserted by mkSafeCall
goto L2
L2:
... r ...
And when the safe foreign call is lowered later (see Note [lower safe
foreign calls]) we get this:
suspendThread()
r = foo(x,y,z)
resumeThread()
R1 = r -- copyOut, inserted by lowerSafeForeignCall
jump L1
L1:
r = R1 -- copyIn, inserted by mkSafeCall
goto L2
L2:
... r ...
Now consider what happens if L2 does a heap check: the Adams
optimisation kicks in and commons up L1 with the heap-check
continuation, resulting in just one proc point instead of two. Yay!
-}
emitCCall :: [(CmmFormal,ForeignHint)]
-> CmmExpr
-> [(CmmActual,ForeignHint)]
-> FCode ()
emitCCall hinted_results fn hinted_args
= void $ emitForeignCall PlayRisky results target args
where
(args, arg_hints) = unzip hinted_args
(results, result_hints) = unzip hinted_results
target = ForeignTarget fn fc
fc = ForeignConvention CCallConv arg_hints result_hints CmmMayReturn
emitPrimCall :: [CmmFormal] -> CallishMachOp -> [CmmActual] -> FCode ()
emitPrimCall res op args
= void $ emitForeignCall PlayRisky res (PrimTarget op) args
-- alternative entry point, used by CmmParse
emitForeignCall
:: Safety
-> [CmmFormal] -- where to put the results
-> ForeignTarget -- the op
-> [CmmActual] -- arguments
-> FCode ReturnKind
emitForeignCall safety results target args
| not (playSafe safety) = do
dflags <- getDynFlags
let (caller_save, caller_load) = callerSaveVolatileRegs dflags
emit caller_save
target' <- load_target_into_temp target
args' <- mapM maybe_assign_temp args
emit $ mkUnsafeCall target' results args'
emit caller_load
return AssignedDirectly
| otherwise = do
dflags <- getDynFlags
updfr_off <- getUpdFrameOff
target' <- load_target_into_temp target
args' <- mapM maybe_assign_temp args
k <- newLabelC
let (off, _, copyout) = copyInOflow dflags NativeReturn (Young k) results []
-- see Note [safe foreign call convention]
emit $
( mkStore (CmmStackSlot (Young k) (widthInBytes (wordWidth dflags)))
(CmmLit (CmmBlock k))
<*> mkLast (CmmForeignCall { tgt = target'
, res = results
, args = args'
, succ = k
, ret_args = off
, ret_off = updfr_off
, intrbl = playInterruptible safety })
<*> mkLabel k
<*> copyout
)
return (ReturnedTo k off)
load_target_into_temp :: ForeignTarget -> FCode ForeignTarget
load_target_into_temp (ForeignTarget expr conv) = do
tmp <- maybe_assign_temp expr
return (ForeignTarget tmp conv)
load_target_into_temp other_target@(PrimTarget _) =
return other_target
-- What we want to do here is create a new temporary for the foreign
-- call argument if it is not safe to use the expression directly,
-- because the expression mentions caller-saves GlobalRegs (see
-- Note [Register Parameter Passing]).
--
-- However, we can't pattern-match on the expression here, because
-- this is used in a loop by CmmParse, and testing the expression
-- results in a black hole. So we always create a temporary, and rely
-- on CmmSink to clean it up later. (Yuck, ToDo). The generated code
-- ends up being the same, at least for the RTS .cmm code.
--
maybe_assign_temp :: CmmExpr -> FCode CmmExpr
maybe_assign_temp e = do
dflags <- getDynFlags
reg <- newTemp (cmmExprType dflags e)
emitAssign (CmmLocal reg) e
return (CmmReg (CmmLocal reg))
-- -----------------------------------------------------------------------------
-- Save/restore the thread state in the TSO
-- This stuff can't be done in suspendThread/resumeThread, because it
-- refers to global registers which aren't available in the C world.
saveThreadState :: DynFlags -> CmmAGraph
saveThreadState dflags =
-- CurrentTSO->stackobj->sp = Sp;
mkStore (cmmOffset dflags (CmmLoad (cmmOffset dflags stgCurrentTSO (tso_stackobj dflags)) (bWord dflags)) (stack_SP dflags)) stgSp
<*> closeNursery dflags
-- and save the current cost centre stack in the TSO when profiling:
<*> if gopt Opt_SccProfilingOn dflags then
mkStore (cmmOffset dflags stgCurrentTSO (tso_CCCS dflags)) curCCS
else mkNop
emitSaveThreadState :: FCode ()
emitSaveThreadState = do
dflags <- getDynFlags
emit (saveThreadState dflags)
emitCloseNursery :: FCode ()
emitCloseNursery = do
df <- getDynFlags
emit (closeNursery df)
-- CurrentNursery->free = Hp+1;
closeNursery :: DynFlags -> CmmAGraph
closeNursery dflags = mkStore (nursery_bdescr_free dflags) (cmmOffsetW dflags stgHp 1)
loadThreadState :: DynFlags -> LocalReg -> LocalReg -> CmmAGraph
loadThreadState dflags tso stack = do
catAGraphs [
-- tso = CurrentTSO;
mkAssign (CmmLocal tso) stgCurrentTSO,
-- stack = tso->stackobj;
mkAssign (CmmLocal stack) (CmmLoad (cmmOffset dflags (CmmReg (CmmLocal tso)) (tso_stackobj dflags)) (bWord dflags)),
-- Sp = stack->sp;
mkAssign sp (CmmLoad (cmmOffset dflags (CmmReg (CmmLocal stack)) (stack_SP dflags)) (bWord dflags)),
-- SpLim = stack->stack + RESERVED_STACK_WORDS;
mkAssign spLim (cmmOffsetW dflags (cmmOffset dflags (CmmReg (CmmLocal stack)) (stack_STACK dflags))
(rESERVED_STACK_WORDS dflags)),
-- HpAlloc = 0;
-- HpAlloc is assumed to be set to non-zero only by a failed
-- a heap check, see HeapStackCheck.cmm:GC_GENERIC
mkAssign hpAlloc (zeroExpr dflags),
openNursery dflags,
-- and load the current cost centre stack from the TSO when profiling:
if gopt Opt_SccProfilingOn dflags then
storeCurCCS
(CmmLoad (cmmOffset dflags (CmmReg (CmmLocal tso)) (tso_CCCS dflags)) (ccsType dflags))
else mkNop]
emitLoadThreadState :: FCode ()
emitLoadThreadState = do
dflags <- getDynFlags
load_tso <- newTemp (gcWord dflags)
load_stack <- newTemp (gcWord dflags)
emit $ loadThreadState dflags load_tso load_stack
emitOpenNursery :: FCode ()
emitOpenNursery = do
df <- getDynFlags
emit (openNursery df)
openNursery :: DynFlags -> CmmAGraph
openNursery dflags = catAGraphs [
-- Hp = CurrentNursery->free - 1;
mkAssign hp (cmmOffsetW dflags (CmmLoad (nursery_bdescr_free dflags) (bWord dflags)) (-1)),
-- HpLim = CurrentNursery->start +
-- CurrentNursery->blocks*BLOCK_SIZE_W - 1;
mkAssign hpLim
(cmmOffsetExpr dflags
(CmmLoad (nursery_bdescr_start dflags) (bWord dflags))
(cmmOffset dflags
(CmmMachOp (mo_wordMul dflags) [
CmmMachOp (MO_SS_Conv W32 (wordWidth dflags))
[CmmLoad (nursery_bdescr_blocks dflags) b32],
mkIntExpr dflags (bLOCK_SIZE dflags)
])
(-1)
)
)
]
nursery_bdescr_free, nursery_bdescr_start, nursery_bdescr_blocks :: DynFlags -> CmmExpr
nursery_bdescr_free dflags = cmmOffset dflags stgCurrentNursery (oFFSET_bdescr_free dflags)
nursery_bdescr_start dflags = cmmOffset dflags stgCurrentNursery (oFFSET_bdescr_start dflags)
nursery_bdescr_blocks dflags = cmmOffset dflags stgCurrentNursery (oFFSET_bdescr_blocks dflags)
tso_stackobj, tso_CCCS, stack_STACK, stack_SP :: DynFlags -> ByteOff
tso_stackobj dflags = closureField dflags (oFFSET_StgTSO_stackobj dflags)
tso_CCCS dflags = closureField dflags (oFFSET_StgTSO_cccs dflags)
stack_STACK dflags = closureField dflags (oFFSET_StgStack_stack dflags)
stack_SP dflags = closureField dflags (oFFSET_StgStack_sp dflags)
closureField :: DynFlags -> ByteOff -> ByteOff
closureField dflags off = off + fixedHdrSize dflags
stgSp, stgHp, stgCurrentTSO, stgCurrentNursery :: CmmExpr
stgSp = CmmReg sp
stgHp = CmmReg hp
stgCurrentTSO = CmmReg currentTSO
stgCurrentNursery = CmmReg currentNursery
sp, spLim, hp, hpLim, currentTSO, currentNursery, hpAlloc :: CmmReg
sp = CmmGlobal Sp
spLim = CmmGlobal SpLim
hp = CmmGlobal Hp
hpLim = CmmGlobal HpLim
currentTSO = CmmGlobal CurrentTSO
currentNursery = CmmGlobal CurrentNursery
hpAlloc = CmmGlobal HpAlloc
-- -----------------------------------------------------------------------------
-- For certain types passed to foreign calls, we adjust the actual
-- value passed to the call. For ByteArray#/Array# we pass the
-- address of the actual array, not the address of the heap object.
getFCallArgs :: [StgArg] -> FCode [(CmmExpr, ForeignHint)]
-- (a) Drop void args
-- (b) Add foreign-call shim code
-- It's (b) that makes this differ from getNonVoidArgAmodes
getFCallArgs args
= do { mb_cmms <- mapM get args
; return (catMaybes mb_cmms) }
where
get arg | isVoidRep arg_rep
= return Nothing
| otherwise
= do { cmm <- getArgAmode (NonVoid arg)
; dflags <- getDynFlags
; return (Just (add_shim dflags arg_ty cmm, hint)) }
where
arg_ty = stgArgType arg
arg_rep = typePrimRep arg_ty
hint = typeForeignHint arg_ty
add_shim :: DynFlags -> Type -> CmmExpr -> CmmExpr
add_shim dflags arg_ty expr
| tycon == arrayPrimTyCon || tycon == mutableArrayPrimTyCon
= cmmOffsetB dflags expr (arrPtrsHdrSize dflags)
| tycon == smallArrayPrimTyCon || tycon == smallMutableArrayPrimTyCon
= cmmOffsetB dflags expr (smallArrPtrsHdrSize dflags)
| tycon == byteArrayPrimTyCon || tycon == mutableByteArrayPrimTyCon
= cmmOffsetB dflags expr (arrWordsHdrSize dflags)
| otherwise = expr
where
UnaryRep rep_ty = repType arg_ty
tycon = tyConAppTyCon rep_ty
-- should be a tycon app, since this is a foreign call
| frantisekfarka/ghc-dsi | compiler/codeGen/StgCmmForeign.hs | bsd-3-clause | 15,782 | 0 | 20 | 4,329 | 2,774 | 1,435 | 1,339 | 230 | 6 |
module Testsuite.Utils.Test (
Test, ($?), ($$?), TestS(..), summarise, TestM, execTestM, liftIO, runTest
) where
import Test.QuickCheck
import Test.QuickCheck.Batch
import System.IO ( hFlush, stdout )
data Test = Test String Property
| Group String [Test]
($?) :: Testable a => String -> a -> Test
name $? test = Test name (property test)
($$?) :: String -> [Test] -> Test
($$?) = Group
data TestS = TestS {
indent :: Int
, passCount :: !Int
, failCount :: !Int
, exhaustedCount :: !Int
, abortedCount :: !Int
}
passed :: TestS -> TestS
passed t@(TestS {}) = t { passCount = passCount t + 1 }
failed :: TestS -> TestS
failed t@(TestS {}) = t { failCount = failCount t + 1 }
exhausted :: TestS -> TestS
exhausted t@(TestS {}) = t { exhaustedCount = exhaustedCount t + 1 }
aborted :: TestS -> TestS
aborted t@(TestS {}) = t { abortedCount = abortedCount t + 1 }
summarise :: TestS -> [String]
summarise s = concat [ [shows_n (passCount s) "passed"]
, shows_nz (failCount s) "failed"
, shows_nz (exhaustedCount s) "exhausted"
, shows_nz (abortedCount s) "aborted"
]
where
shows_n n s = let t = show n
l = length t
in
replicate (10 - l) ' ' ++ t ++ " " ++ s
shows_nz 0 s = []
shows_nz n s = [shows_n n s]
newtype TestM a = TestM { runTestM :: TestS -> IO (a, TestS) }
instance Monad TestM where
return x = TestM $ \s -> return (x,s)
TestM f >>= g = TestM $ \s ->
do
(x,s') <- f s
runTestM (g x) s'
readTestM :: (TestS -> a) -> TestM a
readTestM f = TestM $ \s -> return (f s, s)
updTestM :: (TestS -> TestS) -> TestM ()
updTestM f = TestM $ \s -> return ((), f s)
execTestM :: TestM a -> IO (a, TestS)
execTestM (TestM f) = f $ TestS {
indent = 0
, passCount = 0
, failCount = 0
, exhaustedCount = 0
, abortedCount = 0
}
liftIO :: IO a -> TestM a
liftIO p = TestM $ \s -> do
x <- p
return (x,s)
runTest :: Test -> TestM ()
runTest (Group name tests)
= do
ind <- readTestM indent
liftIO . putStrLn $ replicate (ind * 2 + 2) '*' ++ " " ++ name
updTestM $ \s -> s { indent = ind + 1 }
mapM_ runTest tests
updTestM $ \s -> s { indent = ind }
runTest (Test name prop)
= do
liftIO $ do putStr $ name ++ replicate (60 - length name) ' ' ++ "... "
hFlush stdout
res <- liftIO $ run prop defOpt
let (s, ss, upd) = result res
liftIO $ do putStrLn s
hFlush stdout
mapM_ (putStrLn . (" " ++)) ss
hFlush stdout
updTestM upd
{-
case res of
TestOk _ n _ -> putStrLn $ "pass (" ++ show n ++ ")"
TestExausted _ n _ -> putStrLn $ "EXHAUSTED (" ++ show n ++ ")"
TestFailed s n ->
do
putStrLn $ "FAIL (" ++ show n ++ ")"
mapM_ putStrLn $ map (" " ++) s
TestAborted e ->
do
putStrLn $ "ABORTED"
putStrLn $ " " ++ show e
-}
result :: TestResult -> (String, [String], TestS -> TestS)
result (TestOk _ _ _) = ("pass", [], passed)
result (TestExausted _ n _) = ("EXHAUSTED", [], exhausted)
result (TestFailed s n) = ("FAIL", s, failed)
result (TestAborted e) = ("ABORTED", [show e], aborted)
| dolio/vector | old-testsuite/Testsuite/Utils/Test.hs | bsd-3-clause | 3,761 | 0 | 15 | 1,492 | 1,287 | 683 | 604 | 89 | 2 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Selenium add-on</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | msrader/zap-extensions | src/org/zaproxy/zap/extension/selenium/resources/help_fa_IR/helpset_fa_IR.hs | apache-2.0 | 961 | 79 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE MagicHash, UnboxedTuples #-}
import GHC.Exts
newtype Eval a = Eval {runEval :: State# RealWorld -> (# State# RealWorld, a #)}
-- inline sequence :: [Eval a] -> Eval [a]
well_sequenced :: [Eval a] -> Eval [a]
well_sequenced = foldr cons nil where
cons e es = Eval $ \s -> case runEval e s of
(# s', a #) -> case runEval es s' of
(# s'', as #) -> (# s'', a : as #)
nil = Eval $ \s -> (# s, [] #)
-- seemingly demonic use of spark#
ill_sequenced :: [Eval a] -> Eval [a]
ill_sequenced as = Eval $ spark# (case well_sequenced as of
Eval f -> case f realWorld# of (# _, a' #) -> a')
-- 'parallelized' version of (show >=> show >=> show >=> show >=> show)
main :: IO ()
main = putStrLn ((layer . layer . layer . layer . layer) (:[]) 'y')
where
layer :: (Char -> String) -> (Char -> String)
layer f = (\(Eval x) -> case x realWorld# of (# _, as #) -> concat as)
. well_sequenced -- [Eval String] -> Eval [String]
. map ill_sequenced -- [[Eval Char]] -> [Eval String];
-- 'map well_sequenced' is fine
. map (map (\x -> Eval $ \s -> (# s, x #))) -- wrap each Char in Eval
. chunk' -- String -> [String]
. concatMap f
. show -- add single quotes
chunk' :: String -> [String]
chunk' [] = []
chunk' xs = as : chunk' bs where (as,bs) = splitAt 3 xs
-- this doesn't work:
-- chunk (a:b:c:xs) = [a,b,c]:chunk xs
-- chunk xs = [xs]
| urbanslug/ghc | testsuite/tests/codeGen/should_run/T10414.hs | bsd-3-clause | 1,526 | 0 | 18 | 473 | 481 | 257 | 224 | 25 | 2 |
module Fast2haskell (
Complex_type, Array_type, Assoc_type, Descr_type,
abortstr, delay, fix, force, iff, iffrev, seQ,
pair, strcmp,
entier,
land_i, lnot_i, lor_i, lshift_i, rshift_i,
descr,
destr_update, indassoc, lowbound, tabulate, upbound, update, valassoc) where {
import Data.Bits;
-- import Word2;
import Data.Word;
import Data.Complex; -- 1.3
import Data.Array; -- 1.3
-- import Data.Int ( Num(fromInt) );
type Complex_type = Complex Double;
type Array_type b = Array Int b;
type Assoc_type a = (Int, a);
type Descr_type = (Int,Int);
abortstr str = error ("abort:"++str); -- abort (OtherError str);
delay x = abortstr "delay not implemented";
fix f = fix_f where {fix_f = f fix_f};
force x = x; -- error "force not implemented";
iff b x y = if b then x else y;
iffrev y x b = if b then x else y;
seQ x y = x `seq` y;
pair [] = False;
pair x = True;
strcmp :: [Char] -> [Char] -> Bool;
strcmp x y = x == y;
entier x = fromIntegral (floor x);
land_i :: Int -> Int -> Int;
land_i x y = x .&. y;
lnot_i :: Int -> Int;
lnot_i x = complement x;
lor_i :: Int -> Int -> Int;
lor_i x y = x .|. y;
lshift_i :: Int -> Int -> Int;
lshift_i x y = x `shiftL` y;
rshift_i :: Int -> Int -> Int;
rshift_i x y = x `shiftR` y;
write x = abortstr "write not implemented";
descr l u = (l,u);
destr_update ar i x = ar // [(i,x)];
indassoc (i,v) = i;
lowbound (l,u) = l;
tabulate f (l,u) = listArray (l,u) [f i | i <- [l..u]];
upbound (l,u) = u;
update ar i x = ar // [(i,x)];
valassoc (i,v) = v;
}
| ghc-android/ghc | testsuite/tests/programs/fast2haskell/Fast2haskell.hs | bsd-3-clause | 2,658 | 6 | 9 | 1,456 | 698 | 413 | 285 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
module Cmm.ActivityAnalysis
( ActivityStorage(..)
, activityAnalysis
) where
import Cmm.DirectedGraph
import Cmm.Backend
(MachineInstr(..), MachineFunction(..), MachinePrg(..))
import Cmm.LabelGenerator (Temp())
import Cmm.ControlFlowGraph (createControlFlowGraph, Unique(..))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import Text.Printf (printf)
import Data.List (foldl', find)
import Data.Maybe (fromJust)
-- | simple data type to store the
-- written temps ~ out
-- accessed temps ~ in
data ActivityStorage = ActivityStorage
{ out_a :: Set Temp
, in_a :: Set Temp
} deriving (Show, Eq, Ord)
emptyActivityStorage :: ActivityStorage
emptyActivityStorage =
ActivityStorage
{ out_a = Set.empty
, in_a = Set.empty
}
-- | analyses the activity of the temporaries
-- by reversing the control flow graph, doing an almost correct
-- depth-first search (the successors are Sets)
-- and applying the proposed algorithm
--
activityAnalysis
:: (MachineInstr i, Ord i, Show i)
=> DirectedGraph (Unique i) -> Map (Unique i) ActivityStorage
activityAnalysis graph =
-- revGraph :: DirectedGraph (Int, i)
let revGraph = reverseGraph graph
lastReturn = (Set.size (nodes revGraph) + 1, ret)
-- revNodes :: (Ord i) => [(Int, i)]
revNodes = toList revGraph lastReturn
-- livelinessMap :: Map (Int, i) ActivityStorage
livelinessMap =
Map.fromList $ zip revNodes (repeat emptyActivityStorage)
-- runUpate :: Map (Int, i) ActivityStorage -> Map (Int, i) ActivityStorage
runUpate lm = fst $ foldl' updateActivities (lm, graph) revNodes
-- newMap :: Map (Int, i) ActivityStorage
solvedMap = repeatUntilSame livelinessMap runUpate
in solvedMap
where
updateActivities (lm, g) i =
-- succs :: [(Int, i)]
let succs = Set.toList $ successors g i
-- activity_ins :: [Set i]
activitiy_ins = map (\i -> in_a $ fromJust $ Map.lookup i lm) succs
-- out_ :: Set Temp
outs = out_a $ fromJust $ Map.lookup i lm
-- in_ :: Set Temp
in_ = ((use . snd) i) `Set.union` (outs `Set.difference` ((def . snd) i))
out_ = (Set.unions $ activitiy_ins)
activitiy =
ActivityStorage
{ out_a = out_
, in_a = in_
}
in (Map.insert i activitiy lm, g)
repeatUntilSame :: Eq s => s -> (s -> s) -> s
repeatUntilSame state transform = do
let newState = transform state
case state == newState of
True -> state
False -> repeatUntilSame newState transform
| cirquit/hjc | src/Cmm/ActivityAnalysis.hs | mit | 2,790 | 0 | 17 | 758 | 672 | 380 | 292 | 54 | 2 |
{-# OPTIONS_GHC -Wall -fno-warn-unused-top-binds #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Okasaki.Stack (
StackF(..)
, Stack
, empty
, push
, pop
, fromList
, toList
, isEmpty
, cat
, update
, suffixes
) where
import Prelude hiding (head, tail)
import Data.Functor.Foldable as RS
import Text.Show.Deriving
data StackF a r =
NilF
| ConsF !a r
deriving (Eq, Functor, Foldable, Traversable, Show)
$(deriveShow1 ''StackF)
type Stack a = Fix (StackF a)
empty :: Stack a
empty = Fix NilF
push :: a -> Stack a -> Stack a
push h t = Fix (ConsF h t)
pop :: Stack a -> Maybe (a, Stack a)
pop s = case project s of
NilF -> Nothing
ConsF h t -> Just (h, t)
fromList :: [a] -> Stack a
fromList = ana coalg where
coalg = \case
[] -> NilF
(h : t) -> ConsF h t
toList :: Stack a -> [a]
toList = ana coalg where
coalg s = case project s of
NilF -> Nil
ConsF h t -> Cons h t
isEmpty :: Stack a -> Bool
isEmpty s = case project s of
NilF -> True
_ -> False
cat :: Stack a -> Stack a -> Stack a
cat l r = apo coalg (project l) where
coalg = \case
ConsF h t -> case project t of
NilF -> ConsF h (Left r)
rest -> ConsF h (Right rest)
NilF -> fmap Left (project r)
update :: Int -> a -> Stack a -> Stack a
update idx x s = apo coalg (idx, s) where
coalg (j, stack) = case project stack of
NilF -> NilF
ConsF h t ->
if j <= 0
then ConsF x (Left t)
else ConsF h (Right (pred j, t))
-- exercise 2.1
suffixes :: Stack a -> Stack (Stack a)
suffixes = ana coalg where
coalg stack = case project stack of
NilF -> NilF
ConsF _ t -> ConsF t t
-- test
test0 :: Stack Int
test0 = fromList [1..3]
test1 :: Stack Int
test1 = fromList [4..7]
test2 :: Stack Int
test2 = update 3 100 (cat test0 test1)
| jtobin/okasaki | lib/Okasaki/Stack.hs | mit | 2,028 | 0 | 15 | 552 | 817 | 422 | 395 | 79 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-|
Module: Capnp.Rpc.Server
Description: handlers for incoming method calls.
The term server in this context refers to a thread that handles method calls for
a particular capability (The capnproto rpc protocol itself has no concept of
clients and servers).
-}
module Capnp.Rpc.Server
( Server(..)
, ServerOps(..)
, CallInfo(..)
, runServer
-- * Handling methods
, MethodHandler
-- ** Working with untyped data
, untypedHandler
, toUntypedHandler
, fromUntypedHandler
) where
import Control.Concurrent.STM
import Data.Word
import Data.Typeable (Typeable)
import Capnp.Message (Mutability(..))
import Capnp.Rpc.Promise (Fulfiller)
import Capnp.Untyped (Ptr)
import qualified Internal.TCloseQ as TCloseQ
-- | a @'MethodHandler' m p r@ handles a method call with parameters @p@
-- and return type @r@, in monad @m@.
--
-- The library represents method handlers via an abstract type
-- 'MethodHandler', parametrized over parameter (@p@) and return (@r@)
-- types, and the monadic context in which it runs (@m@). This allows us
-- to provide different strategies for actually handling methods; there
-- are various helper functions which construct these handlers.
--
-- At some point we will likely additionally provide handlers affording:
--
-- * Working directly with the low-level data types.
-- * Replying to the method call asynchronously, allowing later method
-- calls to be serviced before the current one is finished.
newtype MethodHandler m p r = MethodHandler
{ handleMethod
:: Maybe (Ptr 'Const)
-> Fulfiller (Maybe (Ptr 'Const))
-> m ()
}
-- | Convert a 'MethodHandler' for any parameter and return types into
-- one that deals with untyped pointers.
toUntypedHandler
:: MethodHandler m p r
-> MethodHandler m (Maybe (Ptr 'Const)) (Maybe (Ptr 'Const))
toUntypedHandler MethodHandler{..} = MethodHandler{..}
-- | Inverse of 'toUntypedHandler'
fromUntypedHandler
:: MethodHandler m (Maybe (Ptr 'Const)) (Maybe (Ptr 'Const))
-> MethodHandler m p r
fromUntypedHandler MethodHandler{..} = MethodHandler{..}
-- | Construct a method handler from a function accepting an untyped
-- pointer for the method's parameter, and a 'Fulfiller' which accepts
-- an untyped pointer for the method's return value.
untypedHandler
:: (Maybe (Ptr 'Const) -> Fulfiller (Maybe (Ptr 'Const)) -> m ())
-> MethodHandler m (Maybe (Ptr 'Const)) (Maybe (Ptr 'Const))
untypedHandler = MethodHandler
-- | Base class for things that can act as capnproto servers.
class Monad m => Server m a | a -> m where
-- | Called when the last live reference to a server is dropped.
shutdown :: a -> m ()
shutdown _ = pure ()
-- | Try to extract a value of a given type. The default implementation
-- always fails (returns 'Nothing'). If an instance chooses to implement
-- this, it will be possible to use "reflection" on clients that point
-- at local servers to dynamically unwrap the server value. A typical
-- implementation will just call Typeable's @cast@ method, but this
-- needn't be the case -- a server may wish to allow local peers to
-- unwrap some value that is not exactly the data the server has access
-- to.
unwrap :: Typeable b => a -> Maybe b
unwrap _ = Nothing
-- | The operations necessary to receive and handle method calls, i.e.
-- to implement an object. It is parametrized over the monadic context
-- in which methods are serviced.
data ServerOps m = ServerOps
{ handleCall
:: Word64
-> Word16
-> MethodHandler m (Maybe (Ptr 'Const)) (Maybe (Ptr 'Const))
-- ^ Handle a method call; takes the interface and method id and returns
-- a handler for the specific method.
, handleStop :: m ()
-- ^ Handle shutting-down the receiver; this is called when the last
-- reference to the capability is dropped.
, handleCast :: forall a. Typeable a => Maybe a
-- ^ used to unwrap the server when reflecting on a local client.
}
-- | A 'CallInfo' contains information about a method call.
data CallInfo = CallInfo
{ interfaceId :: !Word64
-- ^ The id of the interface whose method is being called.
, methodId :: !Word16
-- ^ The method id of the method being called.
, arguments :: Maybe (Ptr 'Const)
-- ^ The arguments to the method call.
, response :: Fulfiller (Maybe (Ptr 'Const))
-- ^ A 'Fulfiller' which accepts the method's return value.
}
-- | Handle incoming messages for a given object.
--
-- Accepts a queue of messages to handle, and 'ServerOps' used to handle them.
-- returns when it receives a 'Stop' message.
runServer :: TCloseQ.Q CallInfo -> ServerOps IO -> IO ()
runServer q ops = go
where
go = atomically (TCloseQ.read q) >>= \case
Nothing ->
pure ()
Just CallInfo{interfaceId, methodId, arguments, response} -> do
handleMethod
(handleCall ops interfaceId methodId)
arguments
response
go
| zenhack/haskell-capnp | lib/Capnp/Rpc/Server.hs | mit | 5,431 | 0 | 16 | 1,266 | 808 | 455 | 353 | -1 | -1 |
{- | Values implemented named terms with explicit substitutions.
-}
{-# LANGUAGE FlexibleContexts, FlexibleInstances, TypeSynonymInstances, MultiParamTypeClasses,
OverlappingInstances, IncoherentInstances, UndecidableInstances,
PatternGuards, TupleSections #-}
module NamedExplSubst where
import Prelude hiding (pi,abs,mapM)
import Control.Applicative
import Control.Monad.Reader hiding (mapM)
{-
import Data.Map (Map)
import qualified Data.Map as Map
-}
import Data.Traversable
import qualified Abstract as A
import qualified ListEnv as Env
-- import qualified MapEnv as Env
import Signature
import Util
import Value
import Fresh
-- * Values
type Var = A.UID
-- | Heads are identifiers excluding @A.Def@.
type Head = A.Ident
data Val
= Ne Head Val [Val] -- ^ @x^a vs^-1 | c^a vs^-1@
| Df A.Name Val Val [Val] -- ^ @d=v^a vs^-1@ a,v are closed!
| App Val [Val] -- ^ @v vs^-1@ non-canonical
-- last argument first in list!
| Sort Sort -- ^ @s@
| K Val -- ^ constant function
| Abs A.Name Val -- ^ @\xv@ abstraction
| Fun Val Val -- ^ @Pi a b@
| Clos Val Env -- ^ @v[rho]@
| DontCare
-- * Environments (Values for expression (=bound) variables)
type Env = Env.Env Var Val
-- * Application, Substitution
-- | @app f v@ computes the whnf of @f v@ without expanding definitions
app :: Val -> Val -> Val
app f v =
case f of
K w -> w
Ne h t vs -> Ne h t (v:vs)
Df h w t vs -> Df h w t (v:vs)
App w ws -> rapps w (v:ws) -- evaluate apps
Abs x w -> substFree v (A.uid x) w
Clos (Abs x w) sigma -> substs (Env.update sigma (A.uid x) v) w
Clos w sigma -> substs sigma w `app` v
rapps :: Val -> [Val] -> Val
rapps f vs = foldr (flip app) f vs
-- | @substFree v x w = [v/x]w@ single substitution
substFree :: Val -> Var -> Val -> Val
substFree w x = substs (Env.singleton x w)
-- | parallel substitution, computing whnf
substs :: Env -> Val -> Val
substs sigma = subst where
subst (Ne h@(A.Var y) a vs) = case Env.lookup (A.uid y) sigma of
Just w -> rapps w $ map subst vs
Nothing -> Ne h (subst a) $ map subst vs
subst (Ne h a vs) = Ne h a $ map subst vs -- a is closed
subst (Df h v a vs) = Df h v a $ map subst vs -- a,v are closed
subst (App v vs) = rapps (subst v) (map subst vs)
-- subst (rapps v vs) -- OR: first compute application ?
subst (Sort s) = Sort s
subst (K v) = K $ subst v
subst (Abs x v) = Clos (Abs x v) sigma
subst (Clos v tau) = flip substs v $ flip Env.union sigma $ Env.map subst tau
-- composing two substitutions (first tau, then sigma) :
-- apply sigma to tau
-- add all bindings from sigma that are not yet present
-- thus, we can take sigma and overwrite it with [sigma]tau
subst (Fun a b) = Fun (subst a) $ subst b
-- | computing the whnf of a term, pushing a delayed substitution in
whnf :: Val -> Val
whnf (Clos v rho) = substs rho v
whnf (App f vs) = rapps f vs
whnf v = v
-- * Smart Constructors for values.
var :: A.Name -> Val -> Val
var x t = Ne (A.Var x) t []
var_ :: A.Name -> Val
var_ x = var x DontCare
con :: A.Name -> Val -> Val
con x t = Ne (A.Con x) t []
def :: A.Name -> Val -> Val -> Val
def x v t = Df x v t []
-- non-computing application
application :: Val -> Val -> Val
application f v =
case f of
Ne h t vs -> Ne h t (v:vs)
Df x w t vs -> Df x w t (v:vs)
App w vs -> App w (v:vs)
K w -> w -- because K comes from non-dep fun types
_ -> App f [v]
-- * projections
boundName :: Val -> A.Name
boundName (Abs n _) = n
boundName _ = A.noName
-- * Translation
-- | @translate e rho = v@ where @rho@ is a renaming.
translate :: (Applicative m, Monad m, Signature Val sig, MonadReader sig m, MonadFresh m) =>
A.Expr -> Renaming -> m Val
translate e rho =
case e of
A.Ident (A.Con x) -> con x . symbType . sigLookup' (A.uid x) <$> ask
A.Ident (A.Def x) -> do
~(SigDef t v) <- sigLookup' (A.uid x) <$> ask
return $ def x v t
A.Ident (A.Var x) -> return $ var_ $ Env.lookupSafe (A.uid x) rho
A.App f e -> application <$> (evaluate f rho) <*> (evaluate e rho)
A.Lam x mt e -> do y <- fresh x
Abs y <$> translate e (Env.update rho (A.uid x) y)
A.Pi mx e e' -> Fun <$> (evaluate e rho) <*> case mx of
Just x -> do
y <- fresh x
Abs y <$> translate e (Env.update rho (A.uid x) y)
Nothing -> K <$> evaluate e' rho
A.Typ -> typ
-- * Evaluation monad
instance (Applicative m, Monad m, Signature Val sig, MonadReader sig m, MonadFresh m) => MonadEval Head Val Renaming m where
typ = return $ Sort Type
kind = return $ Sort Kind
freeVar h t = return $ Ne h t []
valView v = return $
case (whnf v) of
Fun a b -> VPi a b
Sort s -> VSort s
Ne h t vs -> VNe h t (reverse vs)
Df x v t vs -> VDef (A.Def x) t (reverse vs)
_ -> VAbs
apply f v = return $ app f v
evaluate e rho = error "NYI: NamedExplSubst.evaluate"
-- evaluate e rho = whnf <$> translate e rho
evaluate' e = whnf <$> (translate e =<< renaming)
unfold v =
case v of
Df x f t vs -> appsR f vs
_ -> return v
unfolds v =
case v of
Df x f t vs -> unfolds =<< appsR f vs -- unfolding application
_ -> return v
abstractPi a (n, Ne (A.Var x) _ []) b = return $ Fun a $ Abs x b
reify v = quote v
-- * Reification
-- quote :: Val -> A.SysNameCounter -> EvalM A.Expr
quote :: (Applicative m, Monad m, MonadFresh m, MonadEval Head Val Renaming m) =>
Val -> m A.Expr
quote v =
case v of
Ne h a vs -> foldr (flip A.App) (A.Ident h) <$> mapM quote vs
Df x f a vs -> foldr (flip A.App) (A.Ident (A.Def x)) <$> mapM quote vs
App f vs -> foldr (flip A.App) <$> quote f <*> mapM quote vs
Sort Type -> return A.Typ
Sort Kind -> error "cannot quote sort kind"
DontCare -> error "cannot quote the dontcare value"
Fun a (K b) -> A.Pi Nothing <$> quote a <*> quote b
Fun a f -> do
u <- quote a
(x,t) <- quoteFun f
return $ A.Pi (Just x) u t
f -> do
(x,e) <- quoteFun f
return $ A.Lam x Nothing e
-- | @quoteFun n v@ expects @v@ to be a function and returns and its
-- body as an expression.
-- quoteFun :: Val -> A.SysNameCounter -> EvalM (A.Name, A.Expr)
quoteFun :: (Applicative m, Monad m, MonadFresh m, MonadEval Head Val Renaming m) =>
Val -> m (A.Name, A.Expr)
quoteFun f = do
x <- fresh $ boundName f
v <- f `apply` (var_ x)
(x,) <$> quote v
| andreasabel/helf | src/NamedExplSubst.hs | mit | 7,012 | 0 | 20 | 2,285 | 2,561 | 1,277 | 1,284 | 143 | 10 |
import Control.Monad
import Control.Applicative
solve ev od = solve' ev od
where
solve' :: String -> String -> String
solve' [] [] = []
solve' (x:xs) [] = [x]
solve' (x:xs) (y:ys) = x : y : solve' xs ys
main :: IO ()
main = do
ev <- getLine
od <- getLine
putStrLn $ solve ev od
| pogin503/vbautil | atcoder/beg058/beg058b.hs | mit | 304 | 0 | 9 | 84 | 157 | 79 | 78 | 12 | 3 |
{-# LANGUAGE TypeOperators, FlexibleInstances, FlexibleContexts #-}
module Data.Functor.Classes.Show.Generic
( Show1(..)
, genericLiftShowsPrec
, genericLiftShowList
, gliftShowsPrec
, gliftShowList
) where
import Data.Functor.Classes
import GHC.Generics
import Text.Show
-- | Generically-derivable lifting of the 'Show' class to unary type constructors.
class GShow1 f where
-- | showsPrec function for an application of the type constructor based on showsPrec and showList functions for the argument type.
gliftShowsPrec :: (Int -> a -> ShowS) -> ([a] -> ShowS) -> Int -> f a -> ShowS
-- | showList function for an application of the type constructor based on showsPrec and showList functions for the argument type. The default implementation using standard list syntax is correct for most types.
gliftShowList :: GShow1 f => (Int -> a -> ShowS) -> ([a] -> ShowS) -> [f a] -> ShowS
gliftShowList sp sl = showListWith (gliftShowsPrec sp sl 0)
-- | A suitable implementation of Show1’s liftShowsPrec for Generic1 types.
genericLiftShowsPrec :: (Generic1 f, GShow1 (Rep1 f)) => (Int -> a -> ShowS) -> ([a] -> ShowS) -> Int -> f a -> ShowS
genericLiftShowsPrec sp sl d = gliftShowsPrec sp sl d . from1
-- | A suitable implementation of Show1’s liftShowsPrec for Generic1 types.
genericLiftShowList :: (Generic1 f, GShow1 (Rep1 f)) => (Int -> a -> ShowS) -> ([a] -> ShowS) -> [f a] -> ShowS
genericLiftShowList sp sl = gliftShowList sp sl . map from1
-- Show1 instances
instance GShow1 [] where gliftShowsPrec = liftShowsPrec
instance GShow1 Maybe where gliftShowsPrec = liftShowsPrec
instance Show a => GShow1 ((,) a) where gliftShowsPrec = liftShowsPrec
instance Show a => GShow1 (Either a) where gliftShowsPrec = liftShowsPrec
-- Generics
instance GShow1 U1 where
gliftShowsPrec _ _ _ _ = id
instance GShow1 Par1 where
gliftShowsPrec sp _ d (Par1 a) = sp d a
instance Show c => GShow1 (K1 i c) where
gliftShowsPrec _ _ d (K1 a) = showsPrec d a
instance Show1 f => GShow1 (Rec1 f) where
gliftShowsPrec sp sl d (Rec1 a) = liftShowsPrec sp sl d a
instance GShow1 f => GShow1 (M1 D c f) where
gliftShowsPrec sp sl d (M1 a) = gliftShowsPrec sp sl d a
instance (Constructor c, GShow1 f) => GShow1 (M1 C c f) where
gliftShowsPrec sp sl d m = showsUnaryWith (gliftShowsPrec sp sl) (conName m) d (unM1 m)
instance GShow1 f => GShow1 (M1 S c f) where
gliftShowsPrec sp sl d (M1 a) = gliftShowsPrec sp sl d a
instance (GShow1 f, GShow1 g) => GShow1 (f :+: g) where
gliftShowsPrec sp sl d (L1 l) = gliftShowsPrec sp sl d l
gliftShowsPrec sp sl d (R1 r) = gliftShowsPrec sp sl d r
instance (GShow1 f, GShow1 g) => GShow1 (f :*: g) where
gliftShowsPrec sp sl d (a :*: b) = gliftShowsPrec sp sl d a . showChar ' ' . gliftShowsPrec sp sl d b
instance (Show1 f, GShow1 g) => GShow1 (f :.: g) where
gliftShowsPrec sp sl d (Comp1 a) = liftShowsPrec (gliftShowsPrec sp sl) (gliftShowList sp sl) d a
| tclem/lilo | src/Data/Functor/Classes/Show/Generic.hs | mit | 2,930 | 0 | 11 | 549 | 1,021 | 525 | 496 | 43 | 1 |
module SimpleArgvParser (pairArguments) where
import Prelude hiding (map)
import qualified Data.Map.Strict as Map
strIsOption :: String -> Bool
strIsOption (a:b:_) = (a == '-') && (b == '-')
strIsOption _ = False
-- TODO: use either here
pairArguments :: [String] -> Maybe (Map.Map String String)
pairArguments args = collect args Map.empty
where
collect [] map = Just map
collect [_] _ = Nothing
collect (k:v:rst) map = if strIsOption k then collect rst (Map.insert key v map) else Nothing
where
(_,key) = splitAt 2 k
| davidfontenot/haskell-hashtag-viewer | src/SimpleArgvParser.hs | mit | 550 | 0 | 11 | 116 | 220 | 119 | 101 | 12 | 4 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Client where
import Control.Applicative
import Control.Monad
import Control.Monad.Catch (MonadThrow)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson
import Data.Aeson.Lens
-- applyBasicAuth expects a strict ByteString
import Data.ByteString.Lazy.Char8 hiding (filter, foldl)
import qualified Data.ByteString.Char8 as BC
import Data.Maybe
import Data.Text as T hiding (foldl)
import Network.HTTP.Conduit
--------------------------------------------------------------------------------
-- Response
--------------------------------------------------------------------------------
-- | Some hacking around....
| wayofthepie/tc-rest-client | src/Client.hs | mit | 839 | 0 | 5 | 83 | 121 | 84 | 37 | 17 | 0 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_list_ops (
version,
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,2] []
bindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/bin"
libdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/lib/x86_64-osx-ghc-8.0.2/list-ops-0.1.0.2-JKI50BvNEqOLJomxL6kjuW"
dynlibdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/lib/x86_64-osx-ghc-8.0.2"
datadir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/share/x86_64-osx-ghc-8.0.2/list-ops-0.1.0.2"
libexecdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/libexec"
sysconfdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/etc"
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "list_ops_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "list_ops_libdir") (\_ -> return libdir)
getDynLibDir = catchIO (getEnv "list_ops_dynlibdir") (\_ -> return dynlibdir)
getDataDir = catchIO (getEnv "list_ops_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "list_ops_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "list_ops_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| c19/Exercism-Haskell | list-ops/.stack-work/dist/x86_64-osx/Cabal-1.24.2.0/build/autogen/Paths_list_ops.hs | mit | 2,371 | 0 | 10 | 239 | 410 | 238 | 172 | 33 | 1 |
module Web.YahooPortfolioManager.App
( module Web.YahooPortfolioManager.Foundation
, module Web.YahooPortfolioManager.Dispatch
, module Web.YahooPortfolioManager.Handlers
) where
import Web.YahooPortfolioManager.Foundation
import Web.YahooPortfolioManager.Dispatch ()
import Web.YahooPortfolioManager.Handlers
| lhoghu/intranet | Web/YahooPortfolioManager/App.hs | mit | 327 | 0 | 5 | 37 | 51 | 35 | 16 | 7 | 0 |
--
-- If the numbers 1 to 5 are written out in words: one, two, three, four, five, then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total.
--
-- If all the numbers from 1 to 1000 (one thousand) inclusive were written out in words, how many letters would be used?
--
-- NOTE: Do not count spaces or hyphens. For example, 342 (three hundred and forty-two) contains 23 letters and 115 (one hundred and fifteen) contains 20 letters. The use of "and" when writing out numbers is in compliance with British usage.
--
english :: Int -> String
english n
| n < 0 = "minus " ++ english (negate n)
| n < 20 =
[ "zero", "one", "two", "three", "four"
, "five", "six", "seven", "eight", "nine"
, "ten", "eleven", "twelve", "thirteen", "fourteen"
, "fifteen", "sixteen", "seventeen", "eighteen", "nineteen"
] !! n
| n < 100 =
[ "?", "?", "twenty", "thirty", "forty"
, "fifty", "sixty", "seventy", "eighty", "ninety"
] !! (n `div` 10)
++ (if n `mod` 10 == 0 then "" else "-" ++ english (n `mod` 10))
| n < 1000 =
(english (n `div` 100)) ++ " hundred"
++ (if n `mod` 100 == 0 then "" else " and " ++ english (n `mod` 100))
| n < 1000000 =
(english (n `div` 1000)) ++ " thousand"
++ (if n `mod` 1000 == 0 then "" else " " ++ english (n `mod` 1000))
main = putStrLn $ show $ length $ filter (`elem` ['a'..'z']) $ foldr (++) [] $ map english [1..1000]
| stu-smith/project-euler-haskell | Euler-017.hs | mit | 1,563 | 0 | 12 | 495 | 426 | 246 | 180 | 21 | 4 |
module Properties where
main :: IO ()
main = return ()
| nickspinale/wmonad | tests/Properties.hs | mit | 56 | 0 | 6 | 12 | 24 | 13 | 11 | 3 | 1 |
{-# LANGUAGE DeriveDataTypeable, TypeSynonymInstances, FlexibleInstances, MultiParamTypeClasses #-}
module Graph.Cross where
import Graph.Util
import Autolib.Graph.Basic
import Autolib.Dot
import Autolib.Util.Splits
import Autolib.Hash
import Autolib.FiniteMap
import Autolib.Boxing.Position
import Inter.Types
import Autolib.Reporter
import qualified Challenger as C
import Data.Typeable
import Data.Maybe ( fromMaybe, isNothing )
import Data.List ( tails )
type Punkt = ( Integer, Integer )
type Strecke = ( Punkt, Punkt )
-- | straight line drawing
type Karte a = FiniteMap a Punkt
instance GraphC a
=> C.Measure Cross ( Int, Graph a ) ( Karte a ) where
measure p (c, g) f =
1000 * fromIntegral (length $ crossings g f) + extension f
extension :: Karte a -> Integer
extension f =
let range :: [ Integer ] -> Integer
range [] = 0
range xs = maximum xs - minimum xs
xys = eltsFM f
in max (range $ map fst xys)
(range $ map snd xys)
data Cross = Cross deriving ( Eq, Ord, Show, Read, Typeable )
instance OrderScore Cross where
scoringOrder _ = Increasing
instance ( Show a, GraphC a, Typeable a )
=> C.Partial Cross ( Int, Graph a ) ( Karte a ) where
report p (c, g) = do
inform $ vcat
[ text "Ordnen Sie den Knoten dieses Graphen"
, nest 4 $ toDoc g
]
peng g
inform $ vcat
[ text "ganzzahlige Koordinaten zu,"
, text "so daß sich eine Zeichnung mit höchstens"
<+> toDoc c <+> text "Kreuzungen"
, text "und mit geringer Ausdehnung ergibt."
, parens $ text "Bewertung: 1000 * Kreuzungszahl + größte Ausdehnung"
]
initial p (c, g) = listToFM $ do
(k, x) <- zip [ 0 .. ] $ lknoten g
return (x, (k, k^2))
partial p (c, g) b = do
alle_zugeordnet ( knoten g ) b
-- alle_verschieden b
keiner_auf_strecke b
total p (c, g) b = do
inform $ text "Ihre Zeichnung ist:"
peng $ Pin g b
let crs = crossings g b
inform $ vcat
[ text "Ihre Zeichnung ergibt diese Kreuzungen:"
, nest 4 $ toDoc crs
]
when ( length crs > c ) $ reject
$ text "Das sind zuviele."
make :: Make
make = direct Cross (1 :: Int, clique $ mkSet [1:: Int .. 5])
g :: Graph Int
g = clique $ mkSet [1 :: Int .. 5]
b :: Karte Int
b = C.initial Cross (1 :: Int, g)
--------------------------------------------------------------
data Pin a = Pin ( Graph a ) ( Karte a )
deriving ( Show , Eq, Ord
)
instance GraphC a => Hash ( Pin a ) where
hash (Pin g f) = hash (g, f)
instance ( GraphC a, Show a ) => ToDot ( Pin a ) where
toDot ( Pin g f ) = toDot $ pin g f
toDotProgram _ = Neato
toDotOptions _ = unwords [ "-s" ]
pin :: GraphC a
=> Graph a
-> Karte a
-> Graph a
pin = pinN 7
pinN :: GraphC a => Position -> Graph a -> Karte a -> Graph a
pinN sz g f =
let
h = mapFM ( \ k v -> toPos v ) f
( ul, or ) = minimax $ eltsFM h
scale = sz / abs ( or - ul )
in g { graph_layout = mapFM ( \ k p -> ( p - ul ) * scale ) h
, bounding = or
, show_labels = True
}
toPos ( x , y ) = Position { width = fromIntegral x
, height = fromIntegral y
}
---------------------------------------------------------------
alle_zugeordnet v b = do
inform $ text "Haben Sie jedem Knoten einen Punkt zugeordnet?"
let missing = do
k <- setToList v
guard $ isNothing $ lookupFM b k
return k
if null missing
then inform $ text "Ja."
else reject $ text "Nein, diesen nicht:" <+> toDoc missing
---------------------------------------------------------------
keiner_auf_strecke b = sequence_ $ do
( pre0 , ap @ ( a , p ) : post0 ) <- splits $ fmToList b
( pre1 , bq @ ( b , q ) : post1 ) <- splits $ pre0 ++ post0
cr @ ( c, r ) <- pre1 ++ post1
let present (a,p) = hsep [ text "Knoten", toDoc a, parens ( text "Position" <+> toDoc p ) ]
return $ when ( between p q r ) $ reject $ text "Fehler:" <+> vcat
[ present bq
, text "liegt auf der Strecke"
, text "zwischen" <+> present ap
, text "und " <+> present cr
]
between p q r =
let a = dist2 p q
b = dist2 q r
c = dist2 p r
in 4 * a * b == ( c - a - b ) ^ 2
dist2 (px,py) (qx,qy) = (px-qx)^2 + (py-qy)^2
-----------------------------------------------------------
alle_verschieden b = do
inform $ text "Sind alle Punkte verschieden?"
let multis = do
( pos, ks ) <- fmToList $ addListToFM_C (++) emptyFM $ do
( k, pos ) <- fmToList b
return ( pos, [k] )
guard $ length ks > 1
return ( pos, ks )
when ( not $ null multis ) $ reject $ vcat
[ text "nein, diese Punkte gehören zu mehreren Knoten:"
, nest 4 $ vcat $ map toDoc multis
]
crossings :: GraphC a
=> Graph a
-> Karte a
-> [(Kante a, Kante a)]
crossings g f = do
let look x = fromMaybe (error "Graph.Cross.crossings.look") $ lookupFM f x
e : es <- tails $ lkanten g
let a = look $ von e
b = look $ nach e
e' <- es
guard $ isEmptySet $ intersect ( mkSet [ von e , nach e ] )
( mkSet [ von e', nach e' ] )
let a' = look $ von e'
b' = look $ nach e'
guard $ is_crossing (a, b) (a', b')
return (e, e')
-- | strecken schneiden sich
is_crossing :: Strecke -> Strecke -> Bool
is_crossing ab cd
| disjunct (bbox ab) (bbox cd) = False
is_crossing ab cd = trennt ab cd && trennt cd ab
-- | liegen auf verschiedenen Seiten der Geraden
trennt :: Strecke -> Strecke -> Bool
trennt (a, b) (c, d) = 0 < area2 a c d * area2 b d c
-- | doppeltes des orientierten Flächeninhalts
area2 :: Punkt -> Punkt -> Punkt -> Integer
area2 (x1,x2) (y1,y2) (z1,z2) =
det3 [[x1, x2, 1], [y1, y2, 1], [z1, z2, 1]]
det3 :: Num a => [[a]] -> a
det3 [ [a1,a2,a3], [b1,b2,b3], [c1,c2,c3] ]
= a1 * ( b2 * c3 - b3 * c2 )
- a2 * ( b1 * c3 - b3 * c1 )
+ a3 * ( b1 * c2 - b2 * c1 )
-- | Rechteck durch linke untere Ecke und rechte obere Ecke dargestellt
type Box = (Punkt,Punkt)
-- | zwei rechtecke haben nichts gemeinsam, gdw. die größere der beiden
-- linken unteren ecken größer als die kleinere der beiden rechten
-- oberen ecken ist
disjunct :: Box -> Box -> Bool
disjunct (alr,atr) (blr,btr) = max alr blr >= min atr btr
-- | bounding box durch linke untere Ecke und rechte obere Ecke
bbox :: Strecke -> (Punkt,Punkt)
bbox ((x1,y1),(x2,y2)) = ((min x1 x2,min y1 y2),(max x1 x2,max y1 y2))
| Erdwolf/autotool-bonn | src/Graph/Cross.hs | gpl-2.0 | 6,797 | 42 | 18 | 2,130 | 2,408 | 1,280 | 1,128 | 162 | 2 |
module Main where
import Prelude hiding (readFile)
import Numerical
import Vec3 hiding (normalize)
import Scene
import Speaker
import Impulse
import Control.Applicative
import System.Environment
import Data.List (transpose)
import Pipes
import Pipes.Parse
import qualified Pipes.ByteString as P
import System.IO hiding (readFile)
import Pipes.Aeson hiding (decoded, decode)
import Pipes.Aeson.Unchecked
import Control.Monad.State.Strict
import Lens.Family (view)
import Lens.Family.State.Strict
import Data.Array.IO
import qualified Pipes.Prelude as PP
import Container (C3)
import qualified Sound.File.Sndfile as SF
import Data.Vector.Generic (fromList)
import Sound.File.Sndfile.Buffer.Vector (toBuffer)
spk :: [Speaker]
spk = [Speaker (Vec3 0 1 0) 0.5, Speaker (Vec3 1 0 0) 0.5]
lastSampleTimeForFile :: String -> IO Flt
lastSampleTimeForFile f =
withFile f ReadMode $ \hIn ->
evalStateT (zoom decoded (foldAll maxSample 0 id)) (P.fromHandle hIn)
maxSample :: Flt -> RayTrace -> Flt
maxSample = flip $ max . time . last . impulses
channelForFile :: FilePath -> Int -> Flt -> Speaker -> IO (IOArray Int (C3 Flt))
channelForFile f l sr speaker = withFile f ReadMode $ \hIn -> do
t <- newArray (0, l) (pure 0)
evalStateT (parseChannel t sr speaker) (P.fromHandle hIn)
produceRayTraces
:: Producer P.ByteString IO ()
-> Producer RayTrace IO (Either (DecodingError, Producer P.ByteString IO ()) ())
produceRayTraces = view decoded
produceRayTraces' :: Producer P.ByteString IO () -> Producer RayTrace IO ()
produceRayTraces' p = void $ produceRayTraces p
whatever :: FilePath -> Int -> Flt -> Speaker -> IO (IOArray Int (C3 Flt))
whatever f l sr speaker = withFile f ReadMode $ \hIn -> do
t <- newArray (0, l) (pure 0)
PP.foldM (\ _ rt -> channelForRayTrace sr rt speaker t)
(return ())
return
(produceRayTraces' $ P.fromHandle hIn)
return t
whatever' :: FilePath -> Int -> Flt -> Speaker -> IO (IOArray Int (C3 Flt))
whatever' f l sr speaker = withFile f ReadMode $ \hIn -> do
t <- newArray (0, l) (pure 0)
runEffect $ produceRayTraces' (P.fromHandle hIn)
>-> PP.mapM (\ rt -> channelForRayTrace sr rt speaker t)
>-> PP.drain
return t
parseChannel
:: IOArray Int (C3 Flt)
-> Flt
-> Speaker
-> StateT (Producer P.ByteString IO r) IO (IOArray Int (C3 Flt))
parseChannel t sr speaker =
zoom decoded $ foldAllM
(\ _ rt -> channelForRayTrace sr rt speaker t)
(return ())
(\_ -> return t)
createAndProcessChannelForFile
:: String -> Int -> Flt -> Speaker -> IO (IOUArray Int Flt)
createAndProcessChannelForFile f samples sr speaker = do
channel <- whatever' f samples sr speaker
bands <- splitBands channel
filterBands sr bands
out <- compileBands bands
hipass sr 20 out
return out
createAllChannelsForFile :: String -> Flt -> [Speaker] -> IO [[Flt]]
createAllChannelsForFile f sr speakers = do
lastTime <- lastSampleTimeForFile f
let finalSample = secondsToSamples sr lastTime
a <- mapM (createAndProcessChannelForFile f finalSample sr) speakers
normalize a
mapM getElems a
flatten :: String -> [Speaker] -> Flt -> String -> IO ()
flatten infile speakers sampleRate outFile = do
channels <- createAllChannelsForFile infile sampleRate speakers
let f = SF.Format SF.HeaderFormatAiff SF.SampleFormatFloat SF.EndianFile
let info = SF.Info
(length (head channels))
(round sampleRate)
(length channels)
f
1
True
let sampleData = toBuffer $ fromList $ concat $ transpose channels
_ <- SF.writeFile info outFile sampleData
return ()
main :: IO ()
main = do
args <- getArgs
case args of
[input, output] -> flatten input spk 44100 output
_ -> putStrLn "program takes two arguments"
| reuk/rayverb | flattener/Flattener.hs | gpl-2.0 | 3,961 | 0 | 15 | 939 | 1,413 | 717 | 696 | 104 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.