code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
import Types
import Genetic
import Runner
import Control.Monad.State
import Control.Lens
import System.Random
main = do
-- Create a prog with some haskell code
let prog = (def :: StateMachine) & code .~ (read "[+>++<].")
-- Execute the prog
let res = execState (exec 5000) prog
-- Now we have access to everithing (strip state, code, output, etc.)
showVm res
-- Display the strip around the current position, the output, and
-- the illness state (tell if the vm "crashed").
showVm res = do
putStrLn . showStrip 5 $ (res ^. strip)
putStrLn $ (res ^. output)
putStrLn . show $ (res ^. illField)
main' str = do
-- Create a prog with some haskell code
let prog = (def :: StateMachine) & code .~ (read str)
-- Execute the prog
let res = execState (exec 5000) prog
-- Now we have access to everything (strip state, code, output, etc.)
showVm res
-- Generate a small sample of 50 instruction, and try to run it
randomTry = do
randLi <- sequence (take 100 $ repeat randomIO) :: IO [Sym]
let vmState = (def :: StateMachine) & code .~ (Code randLi)
let vm = execState (exec 800) vmState
showVm $ vm
return vm
| Zenol/brainf_gen | Main.hs | bsd-2-clause | 1,212 | 0 | 12 | 314 | 318 | 161 | 157 | 24 | 1 |
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module MySite
( MySite (..)
, MySiteRoute (..)
, resourcesMySite
, Handler
, Widget
, maybeAuth
, requireAuth
, module Yesod.Helpers.Static
, module Yesod.Handler
, module Yesod.Widget
, module Yesod.Dispatch
, module Yesod.Persist
, module Yesod.Content
, module Yesod.Core
, module Text.Blaze
, module Control.Monad.IO.Class
, module Control.Monad.Trans
, module Settings
, module Model
, StaticRoute (..)
, AuthRoute (..)
, Sex(..)
, Helgon(..)
, Farg(..)
, isNollkUser
) where
-- import Yesod
import Yesod.Helpers.Static
import Yesod.Handler
import Yesod.Widget
import Yesod.Dispatch
import Yesod.Persist
import Yesod.Core
import Yesod.Content
import Text.Blaze
import Control.Monad.Trans
import Control.Monad.IO.Class
import Yesod.Auth
import Yesod.Auth.OpenId
import Yesod.Auth.Email
import qualified Settings
import System.Directory
import qualified Data.ByteString.Lazy as L
import Database.Persist.GenericSql
import Settings (hamletFile, cassiusFile, luciusFile, juliusFile, widgetFile)
import Settings (isNollk)
import Model
import StaticFiles
import Data.Maybe (isJust)
import Control.Monad (join, unless)
import Network.Mail.Mime
import qualified Data.Text.Lazy
import qualified Data.Text.Lazy.Encoding
import Text.Jasmine (minifym)
import qualified Data.Text as T
import qualified Yesod.Form as F
import Yesod.Form.Jquery
import Yesod.Form.Nic
import Types
import Kerberos
isNollkUser :: User -> Bool
isNollkUser = isNollk . userIdent
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data MySite = MySite
{ getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Settings.ConnectionPool -- ^ Database connection pool.
}
-- | A useful synonym; most of the handler functions in your application
-- will need to be of this type.
type Handler = GHandler MySite MySite
-- | A useful synonym; most of the widgets functions in your application
-- will need to be of this type.
type Widget = GWidget MySite MySite
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://docs.yesodweb.com/book/web-routes-quasi/
--
-- This function does three things:
--
-- * Creates the route datatype MySiteRoute. Every valid URL in your
-- application can be represented as a value of this type.
-- * Creates the associated type:
-- type instance Route MySite = MySiteRoute
-- * Creates the value resourcesMySite which contains information on the
-- resources declared below. This is used in Controller.hs by the call to
-- mkYesodDispatch
--
-- What this function does *not* do is create a YesodSite instance for
-- MySite. Creating that instance requires all of the handler functions
-- for our application to be in scope. However, the handler functions
-- usually require access to the MySiteRoute datatype. Therefore, we
-- split these actions into two functions and place them in separate files.
mkYesodData "MySite" $(parseRoutesFile "config/routes")
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod MySite where
approot _ = Settings.approot
defaultLayout widget = do
mmsg <- getMessage
pc <- widgetToPageContent $ do
widget
addCassius $(Settings.cassiusFile "default-layout")
hamletToRepHtml $(Settings.hamletFile "default-layout")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticroot setting in Settings.hs
urlRenderOverride a (StaticR s) =
Just $ uncurry (joinPath a Settings.staticroot) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent ext' _ content = do
let fn = base64md5 content ++ '.' : T.unpack ext'
let content' =
if ext' == "js"
then case minifym content of
Left _ -> content
Right y -> y
else content
let statictmp = Settings.staticdir ++ "/tmp/"
liftIO $ createDirectoryIfMissing True statictmp
let fn' = statictmp ++ fn
exists <- liftIO $ doesFileExist fn'
unless exists $ liftIO $ L.writeFile fn' content'
return $ Just $ Right (StaticR $ StaticRoute ["tmp", T.pack fn] [], [])
-- How to run database actions.
instance YesodPersist MySite where
type YesodDB MySite = SqlPersist
runDB db = liftIOHandler
$ fmap connPool getYesod >>= Settings.runConnectionPool db
instance YesodAuth MySite where
type AuthId MySite = UserId
-- Where to send a user after successful login
loginDest _ = ViewR
-- Where to send a user after logout
logoutDest _ = RootR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (uid, _) -> return $ Just uid
Nothing -> do
fmap Just $ insert $ User (credsIdent creds)
authPlugins = [ authKerberos
]
instance YesodNic MySite
instance YesodJquery MySite
instance RenderMessage MySite F.FormMessage where
renderMessage _ _ F.MsgSelectNone = "(obesvarad)"
renderMessage _ _ other = F.defaultFormMessage other
| Tarrasch/Nollform | MySite.hs | bsd-2-clause | 6,094 | 0 | 17 | 1,394 | 1,058 | 601 | 457 | 116 | 1 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
module Language.Drasil.CodeExpr (new, newWithNamedArgs, message,
msgWithNamedArgs) where
import Language.Drasil
import Language.Drasil.Chunk.Code (CodeIdea)
import Control.Lens ((^.))
new :: (Callable f, HasUID f, CodeIdea f) => f -> [Expr] -> Expr
new c ps = New (c ^. uid) ps []
newWithNamedArgs :: (Callable f, HasUID f, CodeIdea f, HasUID a,
IsArgumentName a) => f -> [Expr] -> [(a, Expr)] -> Expr
newWithNamedArgs c ps ns = New (c ^. uid) ps (zip (map ((^. uid) . fst) ns)
(map snd ns))
message :: (Callable f, HasUID f, CodeIdea f, HasUID c, HasSpace c, CodeIdea c)
=> c -> f -> [Expr] -> Expr
message o m ps = checkObj (o ^. typ)
where checkObj (Actor _) = Message (o ^. uid) (m ^. uid) ps []
checkObj _ = error $ "Invalid actor message: Actor should have " ++
"Actor space"
msgWithNamedArgs :: (Callable f, HasUID f, CodeIdea f, HasUID c, HasSpace c,
CodeIdea c, HasUID a, IsArgumentName a) => c -> f -> [Expr] -> [(a, Expr)] ->
Expr
msgWithNamedArgs o m ps as = checkObj (o ^. typ)
where checkObj (Actor _) = Message (o ^. uid) (m ^. uid) ps
(zip (map ((^. uid) . fst) as) (map snd as))
checkObj _ = error $ "Invalid actor message: Actor should have " ++
"Actor space"
| JacquesCarette/literate-scientific-software | code/drasil-code/Language/Drasil/CodeExpr.hs | bsd-2-clause | 1,307 | 0 | 14 | 298 | 573 | 308 | 265 | 26 | 2 |
module Graphics.XHB.Connection.Types where
import Graphics.XHB.Shared
import Graphics.XHB.Connection.Open
import Control.Concurrent.STM
import Control.Concurrent
import System.IO
import Data.Word
import Data.Map(Map)
import Graphics.XHB.Gen.Xproto.Types
data Connection = Connection
{conn_error_queue :: TChan SomeError -- read only
,conn_event_queue :: TChan SomeEvent -- read only
,conn_read_loop_tid :: ThreadId
,conn_handle :: MVar Handle -- write only
,conn_reps :: TChan PendedReply -- insert only
,conn_conf :: ConnectionConfig
,conn_next_sequence :: TVar SequenceId
,conn_resource_ids :: TVar [Xid]
,conn_extensions :: TVar ExtensionMap
,conn_dispInfo :: DispName -- what we were told to connect to
}
type ExtensionMap = Map ExtensionId QueryExtensionReply
data ConnectionConfig = ConnectionConfig
{ conf_setup :: Setup
}
type SequenceId = Word16
data PendedReply = PendedReply
{pended_sequence :: SequenceId
,pended_reply :: RawReceipt
}
| aslatter/xhb | Graphics/XHB/Connection/Types.hs | bsd-3-clause | 1,021 | 0 | 10 | 185 | 211 | 131 | 80 | 27 | 0 |
-- | HTML elements
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
module VirtualHom.Internal.Element where
import Control.Applicative
import Control.Lens hiding (children)
import Control.Monad.Cont
import Data.Bifunctor
import Data.Foldable
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
-- Event data, see http://api.jquery.com/category/events/event-object/ for other data that we could get depending on event type
data GenericEventData = GenericEventData{
_timestamp :: !Int,
_pageX :: !Int,
_pageY :: !Int }
deriving (Eq, Ord, Show)
makeLenses ''GenericEventData
data ValueChangedData = ValueChangedData{
_valueGenericData :: !GenericEventData,
_value :: !Text
}
deriving (Eq, Ord, Show)
makeLenses ''ValueChangedData
data KeyboardEventData = KeyboardEventData {
_keyboardGenericData :: !GenericEventData,
_key :: !Text
} deriving (Eq, Ord, Show)
makeLenses ''KeyboardEventData
-- | Collection of callbacks of an element
data Callbacks cb = Callbacks{
_blur :: !(Maybe (GenericEventData -> cb)),
_click :: !(Maybe (GenericEventData -> cb)),
_change :: !(Maybe (ValueChangedData -> cb)),
_contextmenu :: !(Maybe (GenericEventData -> cb)),
_dblclick :: !(Maybe (GenericEventData -> cb)),
_error :: !(Maybe (GenericEventData -> cb)),
_focus :: !(Maybe (GenericEventData -> cb)),
_focusin :: !(Maybe (GenericEventData -> cb)),
_focusout :: !(Maybe (GenericEventData -> cb)),
_hover :: !(Maybe (GenericEventData -> cb)),
_keydown :: !(Maybe (KeyboardEventData -> cb)),
_keypress :: !(Maybe (KeyboardEventData -> cb)),
_keyup :: !(Maybe (KeyboardEventData -> cb)),
_load :: !(Maybe (GenericEventData -> cb)),
_mousedown :: !(Maybe (GenericEventData -> cb)),
_mouseenter :: !(Maybe (GenericEventData -> cb)),
_mouseleave :: !(Maybe (GenericEventData -> cb)),
_mousemove :: !(Maybe (GenericEventData -> cb)),
_mouseout :: !(Maybe (GenericEventData -> cb)),
_mouseover :: !(Maybe (GenericEventData -> cb)),
_mouseup :: !(Maybe (GenericEventData -> cb)),
_ready :: !(Maybe (GenericEventData -> cb)),
_resize :: !(Maybe (GenericEventData -> cb)),
_scroll :: !(Maybe (GenericEventData -> cb)),
_select :: !(Maybe (GenericEventData -> cb)),
_submit :: !(Maybe (GenericEventData -> cb)),
_elementCreated :: !(Maybe (T.Text -> IO ())) -- callback for when this element has been inserted into the DOM. The supplied text is the id of the element.
}
deriving (Functor)
makeLenses ''Callbacks
emptyCb :: Callbacks cb
emptyCb = Callbacks
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
data Elem cb a = Elem{
_elementType :: !Text,
_attributes :: !(Map Text Text),
_content :: !Text,
_children :: [Elem cb a],
_elemID :: !a,
_callbacks :: !(Callbacks cb),
_namespace :: !Text
}
deriving (Functor, Foldable, Traversable)
makeLenses ''Elem
instance Bifunctor Elem where
bimap f g = mapCallbacks f . fmap g
-- | Transform the callbacks in an Elem
mapCallbacks :: (cb -> cc) -> Elem cb a -> Elem cc a
mapCallbacks f elm = elm{
_children = fmap (mapCallbacks f) $ elm^.children,
_callbacks = fmap f $ elm^.callbacks
}
type ElementID = Text
type ElementType = Text
type VirtualElem = Elem () ElementID -- an element whose callbacks are of type ()
-- | Create an element with the specified type and HTML namespace
elm :: Text -> Elem cb ()
elm t = Elem t mempty mempty [] () emptyCb htmlNamespace where
htmlNamespace = "http://www.w3.org/1999/xhtml"
-- | Where to insert an element - before another elem, or as (last) child of an
-- elem
data InsertWhere = InsertBefore Text | InsertAsChildOf Text | InsertAfter Text
deriving (Eq, Show)
data RenderingAction c =
DeleteElement{ _elementId :: !ElementID }
| NewElement{ _insertWhere :: !InsertWhere, _elemType :: !Text, _elementId :: !ElementID, _elemNamespace :: !Text }
| SetTextContent{ _elementId :: !ElementID, _text :: !Text }
| RemoveAttribute{ _elementId :: !ElementID, _attribute :: !Text }
| SetAttribute{ _elementId :: !ElementID, _attribute :: !Text, _attrValue :: !Text }
| SetGenericEventCallback{ _elementId :: !ElementID, _callbackName :: !Text, _genericEventCallback :: !(GenericEventData -> c) }
| SetValueCallback{ _elementId :: !ElementID, _callbackName :: !Text, _valueChangedCallback :: !(ValueChangedData -> c) }
| SetKeyEventCallback { _elementId :: !ElementID, _callbackName :: !Text, _keyEventCallback :: !(KeyboardEventData -> c) }
| RemoveCallback{ _elementId :: !ElementID, _callbackName :: !Text }
| GenericIOAction{ _action :: !(IO ()) }
| NoAction
deriving (Functor)
makeLenses ''RenderingAction | j-mueller/virtual-hom | src/VirtualHom/Internal/Element.hs | bsd-3-clause | 5,477 | 0 | 15 | 1,253 | 1,472 | 816 | 656 | 259 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RecordWildCards #-}
module FreeAgent.Plugins.Nagios
( Command(..)
, CheckTCP(..)
, CommandResult(..)
, NagiosConfig(..)
, CheckProcs(..)
, pluginDef
) where
import FreeAgent.Core.Action
import FreeAgent.Core.Action.ShellCommand
import FreeAgent.Core
import FreeAgent.Core.Internal.Lenses
import FreeAgent.AgentPrelude
import Data.Default (Default (..))
import Data.Bifoldable (biList)
-- | Plugin-specific configuration
data NagiosConfig = NagiosConfig {nagiosPluginsPath :: FilePath}
deriving (Show, Eq, Typeable, Generic)
instance Default NagiosConfig where
def = NagiosConfig "/usr/lib/nagios/plugins/"
makeFields ''NagiosConfig
data Command = Command { cmdKey :: Key
, cmdBin :: FilePath
, cmdArgs :: [(Text, Text)]
} deriving (Show, Eq, Typeable, Generic)
deriveSerializers ''Command
instance Stashable Command where
key = cmdKey
data CommandResult = OK | Warning | Critical | Unknown
deriving (Show, Eq, Typeable, Generic)
deriveSerializers ''CommandResult
-- | Provides the PluginDef for the Nagios plugin. Provide this to
-- 'addPlugin' in the 'registerPlugins' block in your app config/main.
-- Provide a NagiosConfig record - use 'def' for default values
--
-- > addPlugin $ Nagios.pluginDef def { _nagiosPluginPath = ... }
data CheckTCP = CheckTCP { _checktcpHost :: Text
, _checktcpPort :: Int
} deriving (Show, Eq, Typeable, Generic)
makeFields ''CheckTCP
deriveSerializers ''CheckTCP
pluginDef :: NagiosConfig -> PluginDef
pluginDef conf = definePlugin "Nagios" conf (return []) [] $
do registerAction (actionType :: Proxy Command)
registerAction (actionType :: Proxy CheckTCP)
registerAction (actionType :: Proxy CheckProcs)
extractConfig' :: (ContextReader m) => m NagiosConfig
extractConfig' = extractConfig $ pluginDef def ^. name
instance Runnable Command where
type RunnableResult Command = CommandResult
exec cmd@Command{..} =
do cmdPath <- commandPath
let shell = (defaultShellCommand cmdKey) {
shellCommand = cmdPath
, shellArgs = foldl' (\xs x -> biList x ++ xs ) [] cmdArgs
, shellSuccessCodes = [0,1,2]
}
runExceptT $
do result' <- tryExecET shell
let Just raw = extractResult result'
let nagresult cmdres = result' {resultWrapped = wrap cmdres, resultResultOf = toAction cmd}
return $ case shellExitCode raw of
0 -> nagresult OK
1 -> nagresult Warning
2 -> nagresult Critical
_ -> error "ShellCommand should have failed ExitCode match."
where
commandPath = do
nagconf <- extractConfig'
return $ nagiosPluginsPath nagconf </> cmdBin
instance Stashable CheckTCP where
key c = c ^. host ++ ":" ++ tshow (c ^. port)
instance Runnable CheckTCP where
type RunnableResult CheckTCP = CommandResult
exec cmd =
runExceptT $
do result' <- tryExecET (Command (key cmd) "check_tcp" makeArgs)
return $ result' {resultResultOf = toAction cmd}
where
makeArgs = [("-H", cmd ^. host), ("-p", tshow $ cmd ^. port)]
data CheckProcs = CheckProcs
{ procKey :: Key
, procName :: Text
, procMin :: Int
, procMax :: Int
, procAllMin :: Int
, procAllMax :: Int
} deriving (Show, Eq, Typeable, Generic)
deriveSerializers ''CheckProcs
instance Stashable CheckProcs where
key = procKey
instance Runnable CheckProcs where
type RunnableResult CheckProcs = CommandResult
exec cmd@CheckProcs{..} =
runExceptT $ do
result' <- tryExecET (Command procKey "check_procs" makeArgs)
return $ result' {resultResultOf = toAction cmd}
where makeArgs = [("-w", nameMinMax), ("-c", allMinMax), ("-C", procName) ]
allMinMax = tshow procAllMin ++ ":" ++ tshow procAllMax
nameMinMax = tshow procMin ++ ":" ++ tshow procMax
| jeremyjh/free-agent | examples/famon/FreeAgent/Plugins/Nagios.hs | bsd-3-clause | 4,621 | 0 | 17 | 1,323 | 1,103 | 595 | 508 | 100 | 1 |
module BitD.Util.State ( modify'
) where
import qualified Control.Monad.State as State
modify' :: State.MonadState s m => (s -> s) -> m ()
modify' f = do v <- State.get
State.put $! f v
| benma/bitd | src/BitD/Util/State.hs | bsd-3-clause | 226 | 0 | 8 | 74 | 82 | 44 | 38 | 5 | 1 |
module Common.Utils (
if'
, (?)
, isqrt
, modifyArray
, initArray
, submasks
, combmasks
) where
import Control.Monad (forM_)
import Data.Array.MArray (MArray, newArray, readArray, writeArray)
import Data.Bits (shiftL, shiftR, complement, (.&.), (.|.))
import Data.Ix (Ix)
if' :: Bool -> t -> t -> t
{-# INLINE if' #-}
if' p a b = if p
then a
else b
infixl 2 ?
{-# INLINE (?) #-}
p ? t = if' p (const t) id
isqrt :: (Integral a) => a -> a
{-# INLINABLE isqrt #-}
isqrt = floor . sqrt . fromIntegral
modifyArray :: (MArray a e m, Ix i) => a i e -> (e -> e) -> i -> m ()
{-# INLINABLE modifyArray #-}
modifyArray a f i = readArray a i >>= writeArray a i . f
initArray :: (MArray a e m, Ix i) => (i, i) -> e -> [(i, e)] -> m (a i e)
initArray (l, u) initValue setValues = do
a <- newArray (l, u) initValue
forM_ setValues $ \(i, e) -> writeArray a i e
return a
submasks :: Int -> [Int]
{-# INLINE submasks #-}
submasks mask = 0 : takeWhile (/= 0) (iterate (\sub -> (sub - 1) .&. mask) mask)
combmasks :: Int -> Int -> [Int]
{-# INLINE combmasks #-}
combmasks n k = takeWhile (< limit) $ iterate iter $ (1 `shiftL` k) - 1
where
limit = 1 `shiftL` n
iter comb = (((comb .&. complement y) `div` x) `shiftR` 1) .|. y
where
x = comb .&. (-comb)
y = comb + x
| foreverbell/project-euler-solutions | lib/Common/Utils.hs | bsd-3-clause | 1,307 | 0 | 14 | 321 | 610 | 340 | 270 | 39 | 2 |
{-# LANGUAGE PolyKinds #-}
module Data.Vinyl.Class.Implication where
import Data.Constraint
import Data.List.TypeLevel.Cmp (eqTProxy)
import Data.List.TypeLevel.Constraint (ListAll)
import Data.Proxy (Proxy (Proxy))
import Data.Tagged.Functor (TaggedFunctor (..))
import Data.Tuple.TypeLevel (ConstrainSnd, Snd)
import Data.Type.Equality ((:~:) (Refl))
import Data.Typeable
import Data.Vinyl.Core (Rec (..))
import Data.Vinyl.TypeLevel (RecAll)
recAllEq' :: Rec f rs -> (RecAll f rs Eq :- Eq (Rec f rs))
recAllEq' RNil = Sub Dict
recAllEq' (_ :& rs) = Sub $ case recAllEq' rs of
Sub Dict -> Dict
recAllEq :: Proxy f -> Rec proxy rs -> (RecAll f rs Eq :- Eq (Rec f rs))
recAllEq _ RNil = Sub Dict
recAllEq p (_ :& rs) = Sub $ case recAllEq p rs of
Sub Dict -> Dict
recAllOrd' :: Rec f rs -> (RecAll f rs Ord :- Ord (Rec f rs))
recAllOrd' RNil = Sub Dict
recAllOrd' (_ :& rs) = Sub $ case recAllOrd' rs of
Sub Dict -> Dict
recAllOrd :: Proxy f -> Rec proxy rs -> (RecAll f rs Ord :- Ord (Rec f rs))
recAllOrd _ RNil = Sub Dict
recAllOrd p (_ :& rs) = Sub $ case recAllOrd p rs of
Sub Dict -> Dict
listAllOrd :: forall f rs proxy.
Proxy f -> Rec proxy rs -> (forall a. Ord a :- Ord (f a)) -> (ListAll rs Ord :- Ord (Rec f rs))
listAllOrd f pRec cEntail =
Sub $ case listAllToRecAll (Proxy :: Proxy Ord) f pRec cEntail of
Sub Dict -> case recAllOrd f pRec of
Sub Dict -> Dict
listAllToRecAll :: forall c f rs proxy.
Proxy c -> Proxy f -> Rec proxy rs -> (forall a. c a :- c (f a)) -> (ListAll rs c :- RecAll f rs c)
listAllToRecAll _ _ RNil _ = Sub Dict
listAllToRecAll c f ((_ :: proxy r) :& rs) cEntail =
Sub $ case listAllToRecAll c f rs cEntail of
Sub Dict -> case (cEntail :: (c r :- c (f r))) of
Sub Dict -> Dict
listAllToTaggedRecAll :: forall (c :: * -> Constraint) f (rs :: [(k,*)]) proxy.
Proxy c -> Proxy f -> Rec proxy rs
-> (forall (a :: (k,*)). c (Snd a) :- c (TaggedFunctor f a))
-> (ListAll rs (ConstrainSnd c) :- RecAll (TaggedFunctor f) rs c)
listAllToTaggedRecAll _ _ RNil _ = Sub Dict
listAllToTaggedRecAll c f ((_ :: proxy r) :& rs) cEntail =
Sub $ case listAllToTaggedRecAll c f rs cEntail of
Sub Dict -> case (cEntail :: (c (Snd r) :- c (TaggedFunctor f r))) of
Sub Dict -> Dict
eqTRec :: (ListAll rs Typeable, ListAll ss Typeable)
=> Rec proxy rs -> Rec proxy ss -> Maybe (rs :~: ss)
eqTRec RNil RNil = Just Refl
eqTRec (r :& rs) (s :& ss) = case eqTProxy r s of
Nothing -> Nothing
Just Refl -> case eqTRec rs ss of
Nothing -> Nothing
Just Refl -> Just Refl
| andrewthad/vinyl-vectors | src/Data/Vinyl/Class/Implication.hs | bsd-3-clause | 2,741 | 0 | 15 | 745 | 1,234 | 633 | 601 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Web.Spock.FrameworkSpecHelper where
import Test.Hspec
import Test.Hspec.Wai
import Data.Monoid
import Data.Word
import qualified Data.ByteString.Lazy.Char8 as BSLC
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Network.Wai as Wai
sizeLimitSpec :: (Word64 -> IO Wai.Application) -> Spec
sizeLimitSpec app =
with (app maxSize) $
describe "Request size limit" $
do it "allows small enough requests the way" $
do post "/size" okBs `shouldRespondWith` matcher 200 okBs
post "/size" okBs2 `shouldRespondWith` matcher 200 okBs2
it "denys large requests the way" $
post "/size" tooLongBs `shouldRespondWith` 413
where
matcher s b =
ResponseMatcher
{ matchStatus = s
, matchBody = Just b
, matchHeaders = []
}
maxSize = 1024
okBs = BSLC.replicate (fromIntegral maxSize - 50) 'i'
okBs2 = BSLC.replicate (fromIntegral maxSize) 'j'
tooLongBs = BSLC.replicate (fromIntegral maxSize + 100) 'k'
frameworkSpec :: IO Wai.Application -> Spec
frameworkSpec app =
with app $
do routingSpec
actionSpec
headerTest
cookieTest
routingSpec :: SpecWith Wai.Application
routingSpec =
describe "Routing Framework" $
do it "allows root actions" $
get "/" `shouldRespondWith` "root" { matchStatus = 200 }
it "routes different HTTP-verbs to different actions" $
do verbTest get "GET"
verbTest (`post` "") "POST"
verbTest (`put` "") "PUT"
verbTest delete "DELETE"
verbTest (`patch` "") "PATCH"
verbTestGp get "GETPOST"
verbTestGp (`post` "") "GETPOST"
it "can extract params from routes" $
get "/param-test/42" `shouldRespondWith` "int42" { matchStatus = 200 }
it "can handle multiple matching routes" $
get "/param-test/static" `shouldRespondWith` "static" { matchStatus = 200 }
it "ignores trailing slashes" $
get "/param-test/static/" `shouldRespondWith` "static" { matchStatus = 200 }
it "works with subcomponents" $
do get "/subcomponent/foo" `shouldRespondWith` "foo" { matchStatus = 200 }
get "/subcomponent/subcomponent2/bar" `shouldRespondWith` "bar" { matchStatus = 200 }
it "allows the definition of a fallback handler" $
get "/askldjas/aklsdj" `shouldRespondWith` "askldjas/aklsdj" { matchStatus = 200 }
it "detected the preferred format" $
request "GET" "/preferred-format" [("Accept", "text/html,application/xml;q=0.9,image/webp,*/*;q=0.8")] "" `shouldRespondWith` "html" { matchStatus = 200 }
it "/test-slash and test-noslash are the same thing" $
do get "/test-slash" `shouldRespondWith` "ok" { matchStatus = 200 }
get "test-slash" `shouldRespondWith` "ok" { matchStatus = 200 }
get "/test-noslash" `shouldRespondWith` "ok" { matchStatus = 200 }
get "test-noslash" `shouldRespondWith` "ok" { matchStatus = 200 }
where
verbTestGp verb verbVerbose =
verb "/verb-test-gp" `shouldRespondWith` (verbVerbose { matchStatus = 200 })
verbTest verb verbVerbose =
verb "/verb-test" `shouldRespondWith` (verbVerbose { matchStatus = 200 })
actionSpec :: SpecWith Wai.Application
actionSpec =
describe "Action Framework" $ return ()
cookieTest :: SpecWith Wai.Application
cookieTest =
describe "Cookies" $
do it "sets single cookies correctly" $
get "/cookie/single" `shouldRespondWith`
"set"
{ matchStatus = 200
, matchHeaders =
[ matchCookie "single" "test"
]
}
it "sets multiple cookies correctly" $
get "/cookie/multiple" `shouldRespondWith`
"set"
{ matchStatus = 200
, matchHeaders =
[ matchCookie "multiple1" "test1"
, matchCookie "multiple2" "test2"
]
}
headerTest :: SpecWith Wai.Application
headerTest =
describe "Headers" $
do it "supports custom headers" $
get "/set-header" `shouldRespondWith`
"ok"
{ matchStatus = 200
, matchHeaders =
[ "X-FooBar" <:> "Baz"
]
}
it "supports multi headers" $
get "/set-multi-header" `shouldRespondWith`
"ok"
{ matchStatus = 200
, matchHeaders =
[ "Content-Language" <:> "de"
, "Content-Language" <:> "en"
]
}
matchCookie :: T.Text -> T.Text -> MatchHeader
matchCookie name val =
MatchHeader $ \headers ->
let relevantHeaders = filter (\h -> fst h == "Set-Cookie") headers
loop [] =
Just ("No cookie named " ++ T.unpack name ++ " with value "
++ T.unpack val ++ " found")
loop (x:xs) =
let (cname, cval) = T.breakOn "=" $ fst $ T.breakOn ";" $ T.decodeUtf8 $ snd x
in if cname == name && cval == "=" <> val
then Nothing
else loop xs
in loop relevantHeaders
| nmk/Spock | test/Web/Spock/FrameworkSpecHelper.hs | bsd-3-clause | 5,541 | 0 | 20 | 1,892 | 1,269 | 664 | 605 | 118 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Data.IHO.S57.DSID where
import Control.Lens
import Data.Text (Text)
import Data.Data (Data)
import Data.Typeable (Typeable)
import Data.Tree
import Data.IHO.S57.Types
data DataStructure =
CartographicSpaghetti |
ChainNode |
PlanarGraph |
FullTopo |
TopoIrrelevant
deriving (Show, Eq, Data, Typeable)
instance Enum DataStructure where
toEnum 1 = CartographicSpaghetti
toEnum 2 = ChainNode
toEnum 3 = PlanarGraph
toEnum 4 = FullTopo
toEnum 255 = TopoIrrelevant
toEnum t = error $ "toEnum: " ++ show t ++ " is not a DataStructure"
fromEnum CartographicSpaghetti = 1
fromEnum ChainNode = 2
fromEnum PlanarGraph = 3
fromEnum FullTopo = 4
fromEnum TopoIrrelevant = 255
instance FromS57Value DataStructure where
fromS57Value (S57CharData "CS") = CartographicSpaghetti
fromS57Value (S57CharData "CN") = ChainNode
fromS57Value (S57CharData "PG") = PlanarGraph
fromS57Value (S57CharData "FT") = FullTopo
fromS57Value (S57CharData "NO") = TopoIrrelevant
fromS57Value (S57Int i) = toEnum i
fromS57Value v = error $ "fromS57Value DataStructure undefined for " ++ show v
data DSSI =
DSSI { _dssiDataStructure :: ! DataStructure
, _dssiATTFLexicalLevel :: ! Int
, _dssiNATFLexicalLevel :: ! Int
, _dssiMetaRecords :: ! Int
, _dssiCartographicRecords :: ! Int
, _dssiGeoRecords :: ! Int
, _dssiCollectionRecords :: ! Int
, _dssiIsolatedNodeRecords :: ! Int
, _dssiConnectedNodeRecords :: ! Int
, _dssiEdgeRecords :: ! Int
, _dssiFaceRecords :: ! Int
} deriving (Show, Eq, Data, Typeable)
makeClassy ''DSSI
readDSSI :: Tree S57Structure -> DSSI
readDSSI r
| ((structureFieldName . rootLabel $ r) /= "DSSI") =
error $ "not an DSSI record: " ++ show r
| otherwise =
DSSI { _dssiDataStructure = lookupField r "DSTR"
, _dssiATTFLexicalLevel = lookupField r "AALL"
, _dssiNATFLexicalLevel = lookupField r "NALL"
, _dssiMetaRecords = lookupField r "NOMR"
, _dssiCartographicRecords = lookupField r "NOCR"
, _dssiGeoRecords = lookupField r "NOGR"
, _dssiCollectionRecords = lookupField r "NOLR"
, _dssiIsolatedNodeRecords = lookupField r "NOIN"
, _dssiConnectedNodeRecords = lookupField r "NOCN"
, _dssiEdgeRecords = lookupField r "NOED"
, _dssiFaceRecords = lookupField r "NOFA"
}
data ExchangePurpose = New | Revision
deriving (Show, Eq, Data, Typeable)
instance Enum ExchangePurpose where
toEnum 1 = New
toEnum 2 = Revision
toEnum t = error $ "toEnum: " ++ show t ++ " is not a ExchangePurpose"
fromEnum New = 1
fromEnum Revision = 2
instance FromS57Value ExchangePurpose where
fromS57Value (S57CharData "N") = New
fromS57Value (S57CharData "R") = Revision
fromS57Value (S57Int i) = toEnum i
fromS57Value v = error $ "fromS57Value ExchangePurpose undefined for " ++ show v
data ProductSpecification = ENC | ODD
deriving (Show, Eq, Data, Typeable)
instance Enum ProductSpecification where
toEnum 1 = ENC
toEnum 2 = ODD
toEnum t = error $ "toEnum: " ++ show t ++ " is not a ProductSpecification"
fromEnum ENC = 1
fromEnum ODD = 2
instance FromS57Value ProductSpecification where
fromS57Value (S57CharData "ENC") = ENC
fromS57Value (S57CharData "ODD") = ODD
fromS57Value (S57Int i) = toEnum i
fromS57Value v = error $ "fromS57Value ProducatSpecification undefined for " ++ show v
data ApplicationProfile = ENCNew | ENCRevision | IHODataDictionary
deriving (Show, Eq, Data, Typeable)
instance Enum ApplicationProfile where
toEnum 1 = ENCNew
toEnum 2 = ENCRevision
toEnum 3 = IHODataDictionary
toEnum t = error $ "toEnum: " ++ show t ++ " is not a ApplicationProfile"
fromEnum ENCNew = 1
fromEnum ENCRevision = 2
fromEnum IHODataDictionary = 3
instance FromS57Value ApplicationProfile where
fromS57Value (S57CharData "EN") = ENCNew
fromS57Value (S57CharData "ER") = ENCRevision
fromS57Value (S57CharData "DD") = IHODataDictionary
fromS57Value (S57Int i) = toEnum i
fromS57Value v = error $ "fromS57Value ProducatSpecification undefined for " ++ show v
data DSID =
DSID { _dsidExchangePurpose :: ! ExchangePurpose
, _dsidIntendedUsage :: ! Int
, _dsidDataSetName :: ! Text
, _dsidEdition :: ! Text
, _dsidUpdate :: ! Text
, _dsidUpdateApplicationDate :: ! Text
, _dsidIssueDate :: ! Text
, _dsidS57Edition :: ! Double
, _dsidProductSpecification :: ! ProductSpecification
, _dsidProductSpecificationDescrption :: ! Text
, _dsidProductSpecificationEdition :: ! Text
, _dsidApplicationProfile :: ! ApplicationProfile
, _dsidProducingAgency :: ! Int
, _dsidDSSI :: ! DSSI
} deriving (Show, Eq, Data, Typeable)
makeLenses ''DSID
instance HasDSSI DSID where
dSSI = dsidDSSI
instance FromS57FileRecord DSID where
fromS57FileDataRecord r
| ((structureFieldName . rootLabel $ r) /= "DSID") =
error $ "not an DSID record: " ++ show r
| otherwise =
DSID { _dsidExchangePurpose = lookupField r "EXPP"
, _dsidIntendedUsage = lookupField r "INTU"
, _dsidDataSetName = lookupField r "DSNM"
, _dsidEdition = lookupField r "EDTN"
, _dsidUpdate = lookupField r "UPDN"
, _dsidUpdateApplicationDate = lookupField r "UADT"
, _dsidIssueDate = lookupField r "ISDT"
, _dsidS57Edition = lookupField r "STED"
, _dsidProductSpecification = lookupField r "PRSP"
, _dsidProductSpecificationDescrption = lookupField r "PSDN"
, _dsidProductSpecificationEdition = lookupField r "PRED"
, _dsidApplicationProfile = lookupField r "PROF"
, _dsidProducingAgency = lookupField r "AGEN"
, _dsidDSSI = readDSSI $ lookupChildField "DSID" r "DSSI"
}
| alios/iho-s57 | library/Data/IHO/S57/DSID.hs | bsd-3-clause | 6,252 | 50 | 13 | 1,640 | 1,540 | 817 | 723 | 197 | 1 |
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS -fno-warn-orphans #-}
module Database.Sqroll.Internal
( NamedTable (..)
, HasTable (..)
, aliasTable
, Key (..)
, Entity (..)
, Stmt (..)
, Sqroll (sqrollSql)
, sqrollOpen
, sqrollOpenReadOnly
, sqrollOpenWith
, sqrollClose
, withSqroll
, withSqrollReadOnly
, withSqrollWith
, sqrollCheckpoint
, sqrollTransaction
, sqrollAppend
, sqrollAppend_
, makeSelectStatement
, makeSelectByKeyStatement
, sqrollSelectEntity
, sqrollSelectFromRowId
, sqrollRebindKey
, sqrollGetList
, sqrollGetLazyList
, sqrollFold
, sqrollFoldAll
, sqrollGetOne
, sqrollGetMaybe
, prepareTable
, mkSelectPeek
, sqrollInitializeTable
) where
import Control.Applicative (pure, (<$>), (<*>), (<$))
import Control.Concurrent.MVar (MVar, newMVar, putMVar, takeMVar)
import Control.Exception.Lifted (bracket)
import Control.Monad (unless)
import Control.Monad.Base (liftBase)
import Control.Monad.Trans (MonadIO, liftIO)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.HashMap.Lazy (HashMap)
import qualified Data.HashMap.Lazy as HM
import Data.IORef (IORef, newIORef, readIORef, writeIORef)
import Data.Int (Int64)
import Foreign.ForeignPtr
import GHC.Generics (Generic, Rep, from, to)
import Unsafe.Coerce (unsafeCoerce)
import System.IO.Unsafe (unsafeInterleaveIO)
import Database.Sqroll.Sqlite3
import Database.Sqroll.Table
import Database.Sqroll.Table.Field
import Database.Sqroll.Table.Generic
class HasTable t where
table :: NamedTable t
default table :: (Generic t, GNamedTable (Rep t)) => NamedTable t
table = gNamedTable to from
instance (HasTable a, HasTable b) => HasTable (Key a, b) where
table = let (NamedTable name_a _) = table :: NamedTable a
(NamedTable name_b tbl) = table :: NamedTable b
name = name_b ++ "__of__" ++ name_a
key = Primitive $ FieldInfo (name_a ++ "__id") fst
in NamedTable name ((,) <$> key <*> rebuild tbl)
where
rebuild :: Table b x -> Table (Key a, b) x
rebuild (Primitive (FieldInfo n a)) = Primitive (FieldInfo n (a . snd))
rebuild (Map f t) = Map f (rebuild t)
rebuild (Pure a) = Pure a
rebuild (App f t) = App (rebuild f) (rebuild t)
instance HasTable a => Field [a] where
fieldTypes = const []
fieldDefault = []
fieldPoke _ _ _ = return ()
fieldPeek _ _ = return []
-- | Useful for creating tables for newtypes
aliasTable :: HasTable t => String -> (t -> u) -> (u -> t) -> NamedTable u
aliasTable name mk unmk =
let NamedTable _ table' = table
in NamedTable name $ mapTable mk unmk table'
-- Foreign key
newtype Key a = Key {unKey :: SqlRowId}
deriving (Eq, Show, Enum, Ord)
-- | Sql statement with insertion support
newtype IStmt a = IStmt (SqlFStmt, SqlStmt -> a -> IO ())
-- | Sql statement with peek support, first argument shows result type
-- which you can get from this Stmt, second one shows if this statement operates with
-- specific foreign key
newtype Stmt a b = Stmt { unStmt :: (SqlFStmt, SqlStmt -> IO (Maybe a)) }
instance Show (Stmt a b) where
show _ = "Sqroll Stmt"
data Entity a
= Entity
{ entityKey :: (Key a)
, entityVal :: a
} deriving (Eq, Show, Ord)
instance forall a. HasTable a => Field (Key a) where
fieldTypes = const [SqlInteger]
fieldIndexes = const [IndexFK $ tableName (table :: NamedTable a)]
fieldDefault = Key (-1)
fieldPoke stmt n (Key x) = sqlBindInt64 stmt n x
{-# INLINE fieldPoke #-}
fieldPeek stmt = fmap Key . sqlColumnInt64 stmt
{-# INLINE fieldPeek #-}
data SqrollCache a = SqrollCache
{ sqrollCacheInsert :: MVar (IStmt a)
}
-- | Create tables and indexes (if not exist...), ensure we have the correct
-- defaults
prepareTable :: HasTable a
=> Sqroll -> Maybe a -> IO (NamedTable a)
prepareTable sqroll defaultRecord = do
unless (sqrollReadOnly sqroll) $ do
sqlExecute sql $ tableCreate table'
mapM_ (sqlExecute sql) $ tableIndexes table'
tableMakeDefaults sql defaultRecord table'
where
sql = sqrollSql sqroll
table' = table
makeSqrollCacheFor :: HasTable a => Sqroll -> Maybe a -> IO (SqrollCache a)
makeSqrollCacheFor sqroll defaultRecord = do
table' <- prepareTable sqroll defaultRecord
stmt <- sqlPrepare (sqrollSql sqroll) (tableInsert table')
cache <- newMVar $ IStmt (stmt, tablePoke table')
return SqrollCache
{ sqrollCacheInsert = cache
}
mkSelectPeek :: NamedTable a -> SqlStmt -> IO (Maybe a)
mkSelectPeek table' stmt = do
hasData <- sqlStep stmt
if hasData
then Just <$> tablePeek table' stmt
else do sqlReset stmt
return Nothing
-- | Make a statement to select every item of the given type
--
-- You can pass a default value - its fields will be used to replace those
-- missing from the database. If Nothing is passed instead missing fields will
-- be derived automatically - most likely empty strings, 0 values and so on.
makeSelectStatement :: HasTable a => Sqroll -> Maybe a -> IO (Stmt a a)
makeSelectStatement sqroll defaultRecord = do
table' <- prepareTable sqroll defaultRecord
stmt <- sqlPrepare (sqrollSql sqroll) (tableSelect table'
++ " WHERE rowid >= ? ORDER BY rowid")
withForeignPtr stmt $ \raw -> sqlBindInt64 raw 1 0
return $ Stmt (stmt, mkSelectPeek table')
-- | Make a statement to select every item of the given type taking
-- only those where foreign key value matches to a given one.
makeSelectByKeyStatement :: forall a b. (HasTable a, HasTable b)
=> Sqroll -> Maybe a -> Key b -> IO (Stmt a (Key b))
makeSelectByKeyStatement sqroll defaultRecord key = do
table' <- prepareTable sqroll defaultRecord
case tableRefers foreignTable table' of
[c] -> do
stmt <- sqlPrepare (sqrollSql sqroll) (tableSelect table'
++ " WHERE rowid >= ? AND " ++ c ++ " = ? ORDER BY rowid")
withForeignPtr stmt $ \raw -> do
sqlBindInt64 raw 1 0
sqlBindInt64 raw 2 (unKey key)
return $ Stmt (stmt, mkSelectPeek table')
[] -> error' $ "Table " ++ tableName table' ++
" does not refer to Table " ++ tableName foreignTable
_ -> error' $ "There is more than one reference from " ++ tableName table' ++
" to " ++ tableName foreignTable ++ " so I don't know which one to use."
where
error' = error . ("Database.Sqroll.Internal.makeSelectByKeyStatement: " ++)
foreignTable = table :: NamedTable b
-- | By default select statements return raw values.
-- Use this to get Entires instead.
sqrollSelectEntity :: HasTable a => Stmt a b -> Stmt (Entity a) b
sqrollSelectEntity (Stmt (stmt, peek)) = -- {{{
let peek' s = do mVal <- peek s
case mVal of
Just entityVal -> do
entityKey <- Key <$> sqlGetRowId s
return $ Just Entity {..}
Nothing -> return Nothing
in (Stmt (stmt, peek'))-- }}}
-- | Start from given rowid other than the first one
sqrollSelectFromRowId :: Stmt a b -> Int64 -> IO ()
sqrollSelectFromRowId (Stmt (stmt, _)) i = withForeignPtr stmt $ \raw -> sqlBindInt64 raw 1 i
-- | Bind a new value for the foreign key specified in this statement
sqrollRebindKey :: HasTable b => Stmt a (Key b) -> Int64 -> IO ()
sqrollRebindKey (Stmt (stmt, _)) i = withForeignPtr stmt $ \raw -> sqlBindInt64 raw 2 i
-- | Get all available results from given statement as a one strict list
sqrollGetList :: Stmt a b -> IO [a]
sqrollGetList (Stmt (stmt, peek)) = go-- {{{
where
go = withForeignPtr stmt $ \raw -> do
mPeekResult <- peek raw
case mPeekResult of
Just v -> do
rest <- go
return $ v : rest
Nothing -> sqlReset raw >> return []-- }}}
-- | Get all available results from given statement as a one lazy list
sqrollGetLazyList :: Stmt a b -> IO [a]
sqrollGetLazyList (Stmt (stmt, peek)) = go-- {{{
where
go = withForeignPtr stmt $ \raw -> do
mPeekResult <- peek raw
case mPeekResult of
Just v -> do
rest <- unsafeInterleaveIO go
return $ v : rest
Nothing -> sqlReset raw >> return []-- }}}
-- | Fold over all available results in given statement
sqrollFoldAll :: MonadIO m => (b -> a -> m b) -> b -> Stmt a c -> m b
sqrollFoldAll f initialValue (Stmt (stmt, peek)) = go initialValue-- {{{
where
go b = do
mPeekResult <- liftIO $ withForeignPtr stmt peek
case mPeekResult of
Just a -> do
b' <- f b a
go b'
Nothing -> liftIO (withForeignPtr stmt sqlReset) >> return b-- }}}
-- | Fold over all available results in given statement with option to interrupt computation
-- (return False as second element of the pair to interrupt), after interruption statement
-- will point to the next available row
sqrollFold :: MonadIO m => (b -> a -> m (b, Bool)) -> b -> Stmt a c -> m b
sqrollFold f initialValue fstmt@(Stmt (stmt, peek)) = go initialValue-- {{{
where
go b = do
mPeekResult <- liftIO $ withForeignPtr stmt peek
case mPeekResult of
Just a -> do
(b', continueFolding) <- f b a
if continueFolding
then go b'
else liftIO $ withForeignPtr stmt $ \raw -> do
rowId <- sqlGetRowId raw
sqlReset raw
sqrollSelectFromRowId fstmt rowId
return b'
Nothing -> liftIO (withForeignPtr stmt sqlReset) >> return b-- }}}
-- | Get one value from the statement, will die with error in case of failure
sqrollGetOne :: Stmt a b -> IO a
sqrollGetOne (Stmt (stmt, peek)) = withForeignPtr stmt $ \raw -> do-- {{{
mPeekResult <- peek raw
case mPeekResult of
Just a -> sqlReset raw >> return a
Nothing -> error "Expected to get at least one value in sqrollGetOne, but got none"-- }}}
-- | Get one value if it's available
sqrollGetMaybe :: Stmt a b -> IO (Maybe a)
sqrollGetMaybe (Stmt (stmt, peek)) = withForeignPtr stmt $ \raw -> do
result <- peek raw
sqlReset raw
return result
data Sqroll = Sqroll
{ sqrollSql :: Sql
, sqrollOpenFlags :: [SqlOpenFlag]
, sqrollLock :: MVar ()
, sqrollCache :: IORef (HashMap String (SqrollCache ()))
}
-- | Open sqroll log with sefault settings.
--
-- For most cases, it is recommended to use 'withSqroll' instead,
-- which guarantees properly closing the database, even in the
-- presence of exceptions.
--
-- If you do not need to write to the database, prefer 'sqrollOpenReadOnly'
-- / 'withSqrollReadOnly' to prevent SQLite from creating @-shm@ and @-wal@
-- files.
sqrollOpen :: FilePath -> IO Sqroll
sqrollOpen filePath = sqrollOpenWith filePath sqlDefaultOpenFlags
-- | Same as @'sqrollOpen' filePath ['SqlOpenReadOnly']@. See 'sqrollOpen' for benefits.
sqrollOpenReadOnly :: FilePath -> IO Sqroll
sqrollOpenReadOnly filePath = sqrollOpenWith filePath [SqlOpenReadOnly]
-- | @withSqroll path act@ opens a sqroll database using 'sqrollOpen' and passes
-- the resulting handle to the computation @act@. The handle will be
-- closed on exit from 'withSqroll', whether by normal termination or by
-- raising an exception. If closing the handle raises an exception, then
-- this exception will be raised by 'withSqroll' rather than any exception
-- raised by 'act'.
withSqroll :: (MonadBaseControl IO m) => FilePath -> (Sqroll -> m a) -> m a
withSqroll path = withSqrollWith path sqlDefaultOpenFlags
-- | Same as @'withSqrollReadOnly' filePath ['SqlOpenReadOnly'] action@. See 'sqrollOpen' for benefits.
withSqrollReadOnly :: (MonadBaseControl IO m) => FilePath -> (Sqroll -> m a) -> m a
withSqrollReadOnly path = withSqrollWith path [SqlOpenReadOnly]
-- | Same as withSqroll with custom settings.
withSqrollWith :: (MonadBaseControl IO m) => FilePath -> [SqlOpenFlag] -> (Sqroll -> m a) -> m a
withSqrollWith flags filename = bracket (liftBase $ sqrollOpenWith flags filename) (liftBase . sqrollClose)
-- | Open sqroll log with custom settings
sqrollOpenWith :: FilePath -> [SqlOpenFlag] -> IO Sqroll
sqrollOpenWith filePath flags = do
s <- Sqroll <$> sqlOpen filePath flags <*> pure flags <*> newMVar ()
<*> newIORef HM.empty
return s
-- | Close sqroll log. All running statements will be finalized automatically
sqrollClose :: Sqroll -> IO ()
sqrollClose = sqlClose . sqrollSql
-- | Move all the data from WAL file to the main db file. Checkpoint
-- will be performed automatically when database closed.
-- Doing checkpoints manually might make logging performance more predictable.
sqrollCheckpoint :: Sqroll -> IO ()
sqrollCheckpoint = sqlCheckpoint . sqrollSql
sqrollReadOnly :: Sqroll -> Bool
sqrollReadOnly = (SqlOpenReadOnly `elem`) . sqrollOpenFlags
sqrollGetCache :: forall a. HasTable a => Sqroll -> IO (SqrollCache a)
sqrollGetCache sqroll = do
cache <- readIORef (sqrollCache sqroll)
case HM.lookup name cache of
Just sq -> return $ unsafeCoerce sq
Nothing -> do
sq <- makeSqrollCacheFor sqroll Nothing
writeIORef (sqrollCache sqroll) $
HM.insert name (unsafeCoerce sq) cache
return sq
where
table' = table :: NamedTable a
name = tableName table'
sqrollInitializeTable :: forall a. HasTable a => Sqroll -> a -> IO ()
sqrollInitializeTable sqroll _ = () <$ (sqrollGetCache sqroll :: IO (SqrollCache a))
-- | Perform set of logging actions inside sqlite transaction
sqrollTransaction :: MonadIO m => Sqroll -> m a -> m a
sqrollTransaction sqroll f = do
() <- liftIO $ takeMVar (sqrollLock sqroll)
liftIO $ sqlExecute (sqrollSql sqroll) "BEGIN"
x <- f
liftIO $ sqlExecute (sqrollSql sqroll) "COMMIT"
liftIO $ putMVar (sqrollLock sqroll) ()
return x
-- | Append value to sqroll db, retrive a key for that value. In most cases
-- you want to use 'sqrollAppend_'
sqrollAppend :: HasTable a => Sqroll -> a -> IO (Key a)
sqrollAppend sqroll x = do
sqlExecute (sqrollSql sqroll) "SAVEPOINT getrowid"
sqrollAppend_ sqroll x
stmt <- sqlPrepare (sqrollSql sqroll) "SELECT last_insert_rowid();"
rowId <- withForeignPtr stmt $ \raw -> do
sqlStep_ raw
sqlColumnInt64 raw 0
sqlExecute (sqrollSql sqroll) "RELEASE SAVEPOINT getrowid"
return (Key rowId)
{-# INLINE sqrollAppend #-}
-- | Appends a new value to sqroll db, cheaper than 'sqrollAppend'
sqrollAppend_ :: HasTable a => Sqroll -> a -> IO ()
sqrollAppend_ sqroll x = do
cache <- sqrollGetCache sqroll
cc@(IStmt (stmt, poker)) <- takeMVar (sqrollCacheInsert cache)
withForeignPtr stmt $ \raw -> do
poker raw x
sqlStep_ raw
sqlReset raw
putMVar (sqrollCacheInsert cache) cc
{-# INLINE sqrollAppend_ #-}
| pacak/sqroll | src/Database/Sqroll/Internal.hs | bsd-3-clause | 15,751 | 0 | 20 | 4,188 | 4,090 | 2,085 | 2,005 | 291 | 3 |
-- | Calculate 2D distance fields via the Grevera improved 8SED (Danielsson)
-- using small kernel sweeps across the data stored as a 2D array.
module Grevera8SEDFast1
(
grevera8SED
)
where
import Prelude as P
import Data.Vector.Unboxed as U
dX, dY, dXY :: Double
dX = 1
dY = 1
dXY = sqrt (dX * dX + dY * dY)
minDist :: Double -> Double -> Double -> Double
minDist offset y1 y = min (y1 + offset) y
-- | check(data, x, y, x, y[+|-]1, dy )
-- y1 is y +|- 1 and y is the current y
pass1 :: U.Vector Double -> U.Vector Double -> U.Vector Double
pass1 y1 y = U.zipWith (minDist dY) y1 y
-- | check(data, x, y, x-1, y, dx ), check(data, x, y, x+1, y, dx );
pass2s1 :: U.Vector Double -> U.Vector Double
pass2s1 y = U.scanl (minDist dX) (U.head y) (U.tail y)
-- | check(data, x, y, x-1, y[+|-]1, dxy ), check(data, x, y, x+1, y[+|-]1, dxy );
-- y1 is y +|- 1 and y is the current y
pass2s2 :: U.Vector Double -> U.Vector Double -> U.Vector Double
pass2s2 y1 y = U.head y `U.cons` U.zipWith (minDist dXY) y1 (U.tail y)
-- | pass y-1 y, pass y+1 y
-- y1 is y +|- 1 and y is the rest
grevera8SEDpass :: U.Vector Double -> U.Vector Double -> U.Vector Double
grevera8SEDpass y1 = U.reverse . pass2s2 (U.reverse y1) . pass2s1 . U.reverse .
pass2s2 y1 . pass2s1 . pass1 y1
-- | Return the distance field, same array size as the incoming array
grevera8SED :: [U.Vector Double] -> [U.Vector Double]
grevera8SED x =
let y = P.reverse $
P.scanl grevera8SEDpass (P.head x) (P.tail x) -- | pass y-1 y
in P.scanl grevera8SEDpass (P.head y) (P.tail y) -- | pass y+1 y
| tau-tao/DistField-2D | src/Grevera8SEDFast1.hs | bsd-3-clause | 1,633 | 0 | 14 | 389 | 483 | 250 | 233 | 25 | 1 |
module System.Keychain (getLogin, setLogin, askAndSetLogin) where
import Control.Monad
import Control.Monad.IO.Class
import System.Console.Haskeline
import System.Exit (ExitCode (..))
import System.Process (readProcess,
readProcessWithExitCode)
import Text.Regex.TDFA
getLogin :: String -> IO (Maybe (String, String))
getLogin root = do
(exitCode, out, err) <- readProcessWithExitCode "security" ["find-generic-password", "-g", "-s", root] ""
let loginRegex = "\"acct\"<blob>=\"(.*)\""
passwordRegex = "password: \"(.*)\""
-- TODO this is very ugly
[[_, login]] = out =~ loginRegex
[[_, password]] = err =~ passwordRegex
if exitCode == ExitSuccess
then return (Just (login, password))
else return Nothing
setLogin :: String -> String -> String -> IO ()
setLogin root login password = do
void $ readProcessWithExitCode "security" ["delete-generic-password", "-s", root] ""
void $ readProcess "security" ["add-generic-password", "-s", root, "-a", login, "-w", password] ""
askAndSetLogin :: String -> IO (String, String)
askAndSetLogin hue = runInputT defaultSettings $ do
Just login <- getInputLine "login: "
Just password <- getPassword (Just '*') "pass: "
liftIO (setLogin hue login password)
return (login, password)
| madjar/fhue | src/System/Keychain.hs | bsd-3-clause | 1,406 | 0 | 11 | 342 | 408 | 221 | 187 | 28 | 2 |
{-# LANGUAGE CPP, OverloadedStrings #-}
-- | DNS Resolver and generic (lower-level) lookup functions.
module CacheDNS.DNS.Resolver (
-- * Documentation
-- ** Configuration for resolver
FileOrNumericHost(..), ResolvConf(..), defaultResolvConf
-- ** Intermediate data type for resolver
, ResolvSeed, makeResolvSeed
-- ** Type and function for resolver
, Resolver(..), withResolver, withResolvers
-- ** Looking up functions
, lookup
, lookupAuth
-- ** Raw looking up function
, lookupRaw
, lookupRawAD
, fromDNSMessage
, fromDNSFormat
) where
import Control.Exception (bracket)
import Data.Char (isSpace)
import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
import qualified Data.ByteString.Char8 as BS
import Network.BSD (getProtocolNumber)
import Network.Socket (HostName, Socket, SocketType(Stream, Datagram))
import Network.Socket (AddrInfoFlag(..), AddrInfo(..), SockAddr(..))
import Network.Socket (Family(AF_INET, AF_INET6), PortNumber(..))
import Network.Socket (close, socket, connect, getPeerName, getAddrInfo)
import Network.Socket (defaultHints, defaultProtocol)
import Prelude hiding (lookup)
import System.Random (getStdRandom, randomR)
import System.Timeout (timeout)
import CacheDNS.DNS.Decode
import CacheDNS.DNS.Encode
import CacheDNS.DNS.Internal
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>), (<*>), pure)
#endif
#if mingw32_HOST_OS == 1
import Network.Socket (send)
import qualified Data.ByteString.Lazy.Char8 as LB
import Control.Monad (when)
#else
import Network.Socket.ByteString.Lazy (sendAll)
#endif
----------------------------------------------------------------
-- | Union type for 'FilePath' and 'HostName'. Specify 'FilePath' to
-- \"resolv.conf\" or numeric IP address in 'String' form.
--
-- /Warning/: Only numeric IP addresses are valid @RCHostName@s.
--
-- Example (using Google's public DNS cache):
--
-- >>> let cache = RCHostName "8.8.8.8"
--
data FileOrNumericHost = RCFilePath FilePath -- ^ A path for \"resolv.conf\"
| RCHostName HostName -- ^ A numeric IP address
| RCHostPort HostName PortNumber -- ^ A numeric IP address and port number
-- | Type for resolver configuration. The easiest way to construct a
-- @ResolvConf@ object is to modify the 'defaultResolvConf'.
data ResolvConf = ResolvConf {
resolvInfo :: FileOrNumericHost
-- | Timeout in micro seconds.
, resolvTimeout :: Int
-- | The number of retries including the first try.
, resolvRetry :: Int
-- | This field was obsoleted.
, resolvBufsize :: Integer
}
-- | Return a default 'ResolvConf':
--
-- * 'resolvInfo' is 'RCFilePath' \"\/etc\/resolv.conf\".
--
-- * 'resolvTimeout' is 3,000,000 micro seconds.
--
-- * 'resolvRetry' is 3.
--
-- * 'resolvBufsize' is 512. (obsoleted)
--
-- Example (use Google's public DNS cache instead of resolv.conf):
--
-- >>> let cache = RCHostName "8.8.8.8"
-- >>> let rc = defaultResolvConf { resolvInfo = cache }
--
-- 使用默认的DNS解析器
defaultResolvConf :: ResolvConf
defaultResolvConf = ResolvConf {
resolvInfo = RCFilePath "/etc/resolv.conf"
, resolvTimeout = 3 * 1000 * 1000
, resolvRetry = 3
, resolvBufsize = 512
}
----------------------------------------------------------------
-- | Abstract data type of DNS Resolver seed.
-- When implementing a DNS cache, this should be re-used.
data ResolvSeed = ResolvSeed {
addrInfo :: AddrInfo
, rsTimeout :: Int
, rsRetry :: Int
, rsBufsize :: Integer
}
-- | Abstract data type of DNS Resolver
-- When implementing a DNS cache, this MUST NOT be re-used.
data Resolver = Resolver {
genId :: IO Int
, dnsSock :: Socket
, dnsTimeout :: Int
, dnsRetry :: Int
, dnsBufsize :: Integer
}
----------------------------------------------------------------
-- | Make a 'ResolvSeed' from a 'ResolvConf'.
--
-- Examples:
--
-- >>> rs <- makeResolvSeed defaultResolvConf
--
-- 从默认配置中的到一个Resolv的配置
makeResolvSeed :: ResolvConf -> IO ResolvSeed
makeResolvSeed conf = ResolvSeed <$> addr
<*> pure (resolvTimeout conf)
<*> pure (resolvRetry conf)
<*> pure (resolvBufsize conf)
where
addr = case resolvInfo conf of
RCHostName numhost -> makeAddrInfo numhost Nothing
RCHostPort numhost mport -> makeAddrInfo numhost $ Just mport
RCFilePath file -> toAddr <$> readFile file >>= \i -> makeAddrInfo i Nothing
toAddr cs = let l:_ = filter ("nameserver" `isPrefixOf`) $ lines cs
in extract l
extract = reverse . dropWhile isSpace . reverse . dropWhile isSpace . drop 11
makeAddrInfo :: HostName -> Maybe PortNumber -> IO AddrInfo
makeAddrInfo addr mport = do
proto <- getProtocolNumber "udp"
let hints = defaultHints {
addrFlags = [AI_ADDRCONFIG, AI_NUMERICHOST, AI_PASSIVE]
, addrSocketType = Datagram
, addrProtocol = proto
}
a:_ <- getAddrInfo (Just hints) (Just addr) (Just "domain")
let connectPort = case addrAddress a of
SockAddrInet pn ha -> SockAddrInet (fromMaybe pn mport) ha
SockAddrInet6 pn fi ha sid -> SockAddrInet6 (fromMaybe pn mport) fi ha sid
unixAddr -> unixAddr
return $ a { addrAddress = connectPort }
----------------------------------------------------------------
-- | Giving a thread-safe 'Resolver' to the function of the second
-- argument. A socket for UDP is opened inside and is surely closed.
-- Multiple 'withResolver's can be used concurrently.
-- Multiple lookups must be done sequentially with a given
-- 'Resolver'. If multiple 'Resolver's are necessary for
-- concurrent purpose, use 'withResolvers'.
withResolver :: ResolvSeed -> (Resolver -> IO a) -> IO a
withResolver seed func = bracket (openSocket seed) close $ \sock -> do
connectSocket sock seed
-- 调用回调函数
-- 函数执行完之后,会立刻关闭相关socket的
func $ makeResolver seed sock
-- | Giving thread-safe 'Resolver's to the function of the second
-- argument. Sockets for UDP are opened inside and are surely closed.
-- For each 'Resolver', multiple lookups must be done sequentially.
-- 'Resolver's can be used concurrently.
-- 此处进行了DNS均衡
withResolvers :: [ResolvSeed] -> ([Resolver] -> IO a) -> IO a
withResolvers seeds func = bracket openSockets closeSockets $ \socks -> do
mapM_ (uncurry connectSocket) $ zip socks seeds
let resolvs = zipWith makeResolver seeds socks
func resolvs
where
openSockets = mapM openSocket seeds
closeSockets = mapM close
openSocket :: ResolvSeed -> IO Socket
openSocket seed = socket (addrFamily ai) (addrSocketType ai) (addrProtocol ai)
where
ai = addrInfo seed
connectSocket :: Socket -> ResolvSeed -> IO ()
connectSocket sock seed = connect sock (addrAddress ai)
where
ai = addrInfo seed
makeResolver :: ResolvSeed -> Socket -> Resolver
makeResolver seed sock = Resolver {
genId = getRandom
, dnsSock = sock
, dnsTimeout = rsTimeout seed
, dnsRetry = rsRetry seed
, dnsBufsize = rsBufsize seed
}
getRandom :: IO Int
getRandom = getStdRandom (randomR (0,65535))
----------------------------------------------------------------
-- | Looking up resource records of a domain. The first parameter is one of
-- the field accessors of the 'DNSMessage' type -- this allows you to
-- choose which section (answer, authority, or additional) you would like
-- to inspect for the result.
lookupSection :: (DNSMessage -> [ResourceRecord])
-> Resolver
-> Domain
-> TYPE
-> IO (Either DNSError [RData])
lookupSection section rlv dom typ = do
eans <- lookupRaw rlv dom typ
case eans of
Left err -> return $ Left err
Right ans -> return $ fromDNSMessage ans toRData
where
{- CNAME hack
dom' = if "." `isSuffixOf` dom then dom else dom ++ "."
correct r = rrname r == dom' && rrtype r == typ
-}
correct r = rrtype r == typ
toRData = map rdata . filter correct . section
-- | Extract necessary information from 'DNSMessage'
fromDNSMessage :: DNSMessage -> (DNSMessage -> a) -> Either DNSError a
fromDNSMessage ans conv = case errcode ans of
NoErr -> Right $ conv ans
FormatErr -> Left FormatError
ServFail -> Left ServerFailure
NameErr -> Left NameError
NotImpl -> Left NotImplemented
Refused -> Left OperationRefused
BadOpt -> Left BadOptRecord
where
-- 取出头部的errcode
errcode = rcode . flags . header
-- | For backward compatibility.
fromDNSFormat :: DNSMessage -> (DNSMessage -> a) -> Either DNSError a
fromDNSFormat = fromDNSMessage
-- | Look up resource records for a domain, collecting the results
-- from the ANSWER section of the response.
--
-- We repeat an example from "Network.DNS.Lookup":
--
-- >>> let hostname = Data.ByteString.Char8.pack "www.example.com"
-- >>> rs <- makeResolvSeed defaultResolvConf
-- >>> withResolver rs $ \resolver -> lookup resolver hostname A
-- Right [93.184.216.34]
--
lookup :: Resolver -> Domain -> TYPE -> IO (Either DNSError [RData])
lookup = lookupSection answer
-- | Look up resource records for a domain, collecting the results
-- from the AUTHORITY section of the response.
lookupAuth :: Resolver -> Domain -> TYPE -> IO (Either DNSError [RData])
lookupAuth = lookupSection authority
-- | Look up a name and return the entire DNS Response. If the
-- initial UDP query elicits a truncated answer, the query is
-- retried over TCP. The TCP retry may extend the total time
-- taken by one more timeout beyond timeout * tries.
--
-- Sample output is included below, however it is /not/ tested
-- the sequence number is unpredictable (it has to be!).
--
-- The example code:
--
-- @
-- let hostname = Data.ByteString.Char8.pack \"www.example.com\"
-- rs <- makeResolvSeed defaultResolvConf
-- withResolver rs $ \resolver -> lookupRaw resolver hostname A
-- @
--
-- And the (formatted) expected output:
--
-- @
-- Right (DNSMessage
-- { header = DNSHeader
-- { identifier = 1,
-- flags = DNSFlags
-- { qOrR = QR_Response,
-- opcode = OP_STD,
-- authAnswer = False,
-- trunCation = False,
-- recDesired = True,
-- recAvailable = True,
-- rcode = NoErr,
-- authenData = False
-- },
-- },
-- question = [Question { qname = \"www.example.com.\",
-- qtype = A}],
-- answer = [ResourceRecord {rrname = \"www.example.com.\",
-- rrtype = A,
-- rrttl = 800,
-- rdlen = 4,
-- rdata = 93.184.216.119}],
-- authority = [],
-- additional = []})
-- @
--
lookupRaw :: Resolver -> Domain -> TYPE -> IO (Either DNSError DNSMessage)
lookupRaw = lookupRawInternal receive False
-- | Same as lookupRaw, but the query sets the AD bit, which solicits the
-- the authentication status in the server reply. In most applications
-- (other than diagnostic tools) that want authenticated data It is
-- unwise to trust the AD bit in the responses of non-local servers, this
-- interface should in most cases only be used with a loopback resolver.
--
lookupRawAD :: Resolver -> Domain -> TYPE -> IO (Either DNSError DNSMessage)
lookupRawAD = lookupRawInternal receive True
-- Lookup loop, we try UDP until we get a response. If the response
-- is truncated, we try TCP once, with no further UDP retries.
-- EDNS0 support would significantly reduce the need for TCP retries.
--
-- For now, we optimize for low latency high-availability caches
-- (e.g. running on a loopback interface), where TCP is cheap
-- enough. We could attempt to complete the TCP lookup within the
-- original time budget of the truncated UDP query, by wrapping both
-- within a a single 'timeout' thereby staying within the original
-- time budget, but it seems saner to give TCP a full opportunity to
-- return results. TCP latency after a truncated UDP reply will be
-- atypical.
--
-- Future improvements might also include support for TCP on the
-- initial query, and of course support for multiple nameservers.
lookupRawInternal ::
(Socket -> IO DNSMessage)
-> Bool
-> Resolver
-> Domain
-> TYPE
-> IO (Either DNSError DNSMessage)
lookupRawInternal _ _ _ dom _
| isIllegal dom = return $ Left IllegalDomain
lookupRawInternal rcv ad rlv dom typ = do
-- 获取查询的ID
seqno <- genId rlv
-- 打包 query
let query = (if ad then composeQueryAD else composeQuery) seqno [q]
checkSeqno = check seqno
loop query checkSeqno 0 False
where
loop query checkSeqno cnt mismatch
-- 查询重试的次数上限了
| cnt == retry = do
let ret | mismatch = SequenceNumberMismatch
| otherwise = TimeoutExpired
return $ Left ret
| otherwise = do
-- 发送请求
sendAll sock query
-- 获取响应的结果
response <- timeout tm (rcv sock)
case response of
-- 返回的是Nothing
-- 那么重新查询一次
Nothing -> loop query checkSeqno (cnt + 1) False
Just res -> do
-- seq是否匹配
-- 如果匹配则表示成功了
let valid = checkSeqno res
case valid of
False -> loop query checkSeqno (cnt + 1) False
True | not $ trunCation $ flags $ header res
-> return $ Right res
_ -> tcpRetry query sock tm
sock = dnsSock rlv
tm = dnsTimeout rlv
retry = dnsRetry rlv
-- 创建请求的包体
q = makeQuestion dom typ
-- 检查seqno和包体中的seqno是否相同
check seqno res = identifier (header res) == seqno
-- Create a TCP socket `just like` our UDP socket and retry the same
-- query over TCP. Since TCP is a reliable transport, and we just
-- got a (truncated) reply from the server over UDP (so it has the
-- answer, but it is just too large for UDP), we expect to succeed
-- quickly on the first try. There will be no further retries.
tcpRetry ::
Query
-> Socket
-> Int
-> IO (Either DNSError DNSMessage)
tcpRetry query sock tm = do
peer <- getPeerName sock
bracket (tcpOpen peer)
(maybe (return ()) close)
(tcpLookup query peer tm)
-- Create a TCP socket with the given socket address (taken from a
-- corresponding UDP socket). This might throw an I/O Exception
-- if we run out of file descriptors. Should this use tryIOError,
-- and return "Nothing" also in that case? If so, perhaps similar
-- code is needed in openSocket, but that has to wait until we
-- refactor `withResolver` to not do "early" socket allocation, and
-- instead allocate a fresh UDP socket for each `lookupRawInternal`
-- invocation. It would be bad to fail an entire `withResolver`
-- action, if the socket shortage is transient, and the user intends
-- to make many DNS queries with the same resolver handle.
tcpOpen :: SockAddr -> IO (Maybe Socket)
tcpOpen peer = do
case (peer) of
SockAddrInet _ _ ->
socket AF_INET Stream defaultProtocol >>= return . Just
SockAddrInet6 _ _ _ _ ->
socket AF_INET6 Stream defaultProtocol >>= return . Just
_ -> return Nothing -- Only IPv4 and IPv6 are possible
-- Perform a DNS query over TCP, if we were successful in creating
-- the TCP socket. The socket creation can only fail if we run out
-- of file descriptors, we're not making connections here. Failure
-- is reported as "server" failure, though it is really our stub
-- resolver that's failing. This is likely good enough.
tcpLookup ::
Query
-> SockAddr
-> Int
-> Maybe Socket
-> IO (Either DNSError DNSMessage)
tcpLookup _ _ _ Nothing = return $ Left ServerFailure
tcpLookup query peer tm (Just vc) = do
response <- timeout tm $ do
connect vc $ peer
sendAll vc $ encodeVC query
receiveVC vc
case response of
Nothing -> return $ Left TimeoutExpired
Just res -> return $ Right res
#if mingw32_HOST_OS == 1
-- win32没有SendAll的函数
-- Windows does not support sendAll in Network.ByteString.Lazy.
-- This implements sendAll with Haskell Strings.
sendAll sock bs = do
sent <- send sock (LB.unpack bs)
when (sent < fromIntegral (LB.length bs)) $ sendAll sock (LB.drop (fromIntegral sent) bs)
#endif
isIllegal :: Domain -> Bool
isIllegal "" = True
isIllegal dom
| '.' `BS.notElem` dom = True
| ':' `BS.elem` dom = True
| '/' `BS.elem` dom = True
| BS.length dom > 253 = True
| any (\x -> BS.length x > 63)
(BS.split '.' dom) = True
isIllegal _ = False
| DavidAlphaFox/CacheDNS | src/CacheDNS/DNS/Resolver.hs | bsd-3-clause | 17,592 | 1 | 23 | 4,633 | 2,972 | 1,615 | 1,357 | 217 | 7 |
module AlphabetSoup where
import Rumpus
start :: Start
start = do
let n = 0.3
forM_ (take 100 $ cycle ['!'..'~']) $ \letter -> do
pos <- V3 <$> randomRange (-n,n)
<*> randomRange (-n,n)
<*> randomRange (-n,n)
let (V3 x y z) = pos
spawnChild_ $ do
myPose ==> position pos
mySize ==> 0.01
myText ==> [letter]
myTextPose ==> position (V3 0 1 0)
myUpdate ==> do
now <- getNow
let n = (now + pos ^. _x + pos ^. _y) * 0.5
setPositionRotationSize
(pos & _x +~ sin n & _y +~ cos n & (*0.5))
(axisAngle pos n)
(realToFrac (sin n*0.1)) | lukexi/rumpus | pristine/Room/AlphabetSoup.hs | bsd-3-clause | 820 | 0 | 25 | 398 | 298 | 146 | 152 | 22 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
-------------------------------------------------------------------------------
--
-- | Dynamic flags
--
-- Most flags are dynamic flags, which means they can change from compilation
-- to compilation using @OPTIONS_GHC@ pragmas, and in a multi-session GHC each
-- session can be using different dynamic flags. Dynamic flags can also be set
-- at the prompt in GHCi.
--
-- (c) The University of Glasgow 2005
--
-------------------------------------------------------------------------------
{-# OPTIONS_GHC -fno-cse #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
module DynFlags (
-- * Dynamic flags and associated configuration types
DumpFlag(..),
GeneralFlag(..),
WarningFlag(..), WarnReason(..),
Language(..),
PlatformConstants(..),
FatalMessager, LogAction, FlushOut(..), FlushErr(..),
ProfAuto(..),
glasgowExtsFlags,
dopt, dopt_set, dopt_unset,
gopt, gopt_set, gopt_unset, setGeneralFlag', unSetGeneralFlag',
wopt, wopt_set, wopt_unset,
wopt_fatal,
xopt, xopt_set, xopt_unset,
lang_set,
useUnicodeSyntax,
whenGeneratingDynamicToo, ifGeneratingDynamicToo,
whenCannotGenerateDynamicToo,
dynamicTooMkDynamicDynFlags,
DynFlags(..),
FlagSpec(..),
HasDynFlags(..), ContainsDynFlags(..),
OverridingBool(..), overrideWith,
RtsOptsEnabled(..),
HscTarget(..), isObjectTarget, defaultObjectTarget,
targetRetainsAllBindings,
GhcMode(..), isOneShot,
GhcLink(..), isNoLink,
PackageFlag(..), PackageArg(..), ModRenaming(..),
IgnorePackageFlag(..), TrustFlag(..),
PkgConfRef(..),
Option(..), showOpt,
DynLibLoader(..),
fFlags, fLangFlags, xFlags,
wWarningFlags,
dynFlagDependencies,
tablesNextToCode, mkTablesNextToCode,
makeDynFlagsConsistent,
Way(..), mkBuildTag, wayRTSOnly, addWay', updateWays,
wayGeneralFlags, wayUnsetGeneralFlags,
thisPackage, thisComponentId, thisUnitIdInsts,
-- ** Safe Haskell
SafeHaskellMode(..),
safeHaskellOn, safeImportsOn, safeLanguageOn, safeInferOn,
packageTrustOn,
safeDirectImpsReq, safeImplicitImpsReq,
unsafeFlags, unsafeFlagsForInfer,
-- ** System tool settings and locations
Settings(..),
targetPlatform, programName, projectVersion,
ghcUsagePath, ghciUsagePath, topDir, tmpDir, rawSettings,
versionedAppDir,
extraGccViaCFlags, systemPackageConfig,
pgm_L, pgm_P, pgm_F, pgm_c, pgm_s, pgm_a, pgm_l, pgm_dll, pgm_T,
pgm_windres, pgm_libtool, pgm_lo, pgm_lc, pgm_i,
opt_L, opt_P, opt_F, opt_c, opt_a, opt_l, opt_i,
opt_windres, opt_lo, opt_lc,
-- ** Manipulating DynFlags
defaultDynFlags, -- Settings -> DynFlags
defaultWays,
interpWays,
interpreterProfiled, interpreterDynamic,
initDynFlags, -- DynFlags -> IO DynFlags
defaultFatalMessager,
defaultLogAction,
defaultLogActionHPrintDoc,
defaultLogActionHPutStrDoc,
defaultFlushOut,
defaultFlushErr,
getOpts, -- DynFlags -> (DynFlags -> [a]) -> [a]
getVerbFlags,
updOptLevel,
setTmpDir,
setUnitId,
interpretPackageEnv,
canonicalizeHomeModule,
-- ** Parsing DynFlags
parseDynamicFlagsCmdLine,
parseDynamicFilePragma,
parseDynamicFlagsFull,
-- ** Available DynFlags
allNonDeprecatedFlags,
flagsAll,
flagsDynamic,
flagsPackage,
flagsForCompletion,
supportedLanguagesAndExtensions,
languageExtensions,
-- ** DynFlags C compiler options
picCCOpts, picPOpts,
-- * Compiler configuration suitable for display to the user
compilerInfo,
#ifdef GHCI
rtsIsProfiled,
#endif
dynamicGhc,
#include "GHCConstantsHaskellExports.hs"
bLOCK_SIZE_W,
wORD_SIZE_IN_BITS,
tAG_MASK,
mAX_PTR_TAG,
tARGET_MIN_INT, tARGET_MAX_INT, tARGET_MAX_WORD,
unsafeGlobalDynFlags, setUnsafeGlobalDynFlags,
-- * SSE and AVX
isSseEnabled,
isSse2Enabled,
isSse4_2Enabled,
isAvxEnabled,
isAvx2Enabled,
isAvx512cdEnabled,
isAvx512erEnabled,
isAvx512fEnabled,
isAvx512pfEnabled,
-- * Linker/compiler information
LinkerInfo(..),
CompilerInfo(..),
) where
#include "HsVersions.h"
#if defined mingw32_HOST_OS && !defined WINAPI
# if defined i386_HOST_ARCH
# define WINAPI stdcall
# elif defined x86_64_HOST_ARCH
# define WINAPI ccall
# else
# error unknown architecture
# endif
#endif
import Platform
import PlatformConstants
import Module
import PackageConfig
import {-# SOURCE #-} Hooks
import {-# SOURCE #-} PrelNames ( mAIN )
import {-# SOURCE #-} Packages (PackageState, emptyPackageState)
import DriverPhases ( Phase(..), phaseInputExt )
import Config
import CmdLineParser
import Constants
import Panic
import Util
import Maybes
import MonadUtils
import qualified Pretty
import SrcLoc
import BasicTypes ( IntWithInf, treatZeroAsInf )
import FastString
import Outputable
import Foreign.C ( CInt(..) )
import System.IO.Unsafe ( unsafeDupablePerformIO )
import {-# SOURCE #-} ErrUtils ( Severity(..), MsgDoc, mkLocMessageAnn )
import System.IO.Unsafe ( unsafePerformIO )
import Data.IORef
import Control.Arrow ((&&&))
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.Trans.Writer
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Except
import Control.Exception (catch, throwIO)
import Data.Ord
import Data.Bits
import Data.Char
import Data.Int
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Word
import System.FilePath
import System.Directory
import System.Environment (getEnv)
import System.IO
import System.IO.Error
#if defined MIN_VERSION_terminfo
import System.Console.Terminfo (SetupTermError, Terminal, getCapability,
setupTermFromEnv, termColors)
import System.Posix (queryTerminal, stdError)
#elif defined mingw32_HOST_OS
import Foreign (Ptr, with, peek)
import System.Environment (lookupEnv)
import qualified Graphics.Win32 as Win32
#endif
import Text.ParserCombinators.ReadP hiding (char)
import Text.ParserCombinators.ReadP as R
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
import GHC.Foreign (withCString, peekCString)
import qualified GHC.LanguageExtensions as LangExt
-- Note [Updating flag description in the User's Guide]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- If you modify anything in this file please make sure that your changes are
-- described in the User's Guide. Usually at least two sections need to be
-- updated:
--
-- * Flag Reference section generated from the modules in
-- utils/mkUserGuidePart/Options
--
-- * Flag description in docs/users_guide/using.rst provides a detailed
-- explanation of flags' usage.
-- Note [Supporting CLI completion]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- The command line interface completion (in for example bash) is an easy way
-- for the developer to learn what flags are available from GHC.
-- GHC helps by separating which flags are available when compiling with GHC,
-- and which flags are available when using GHCi.
-- A flag is assumed to either work in both these modes, or only in one of them.
-- When adding or changing a flag, please consider for which mode the flag will
-- have effect, and annotate it accordingly. For Flags use defFlag, defGhcFlag,
-- defGhciFlag, and for FlagSpec use flagSpec or flagGhciSpec.
-- Note [Adding a language extension]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- There are a few steps to adding (or removing) a language extension,
--
-- * Adding the extension to GHC.LanguageExtensions
--
-- The Extension type in libraries/ghc-boot-th/GHC/LanguageExtensions/Type.hs
-- is the canonical list of language extensions known by GHC.
--
-- * Adding a flag to DynFlags.xFlags
--
-- This is fairly self-explanatory. The name should be concise, memorable,
-- and consistent with any previous implementations of the similar idea in
-- other Haskell compilers.
--
-- * Adding the flag to the documentation
--
-- This is the same as any other flag. See
-- Note [Updating flag description in the User's Guide]
--
-- * Adding the flag to Cabal
--
-- The Cabal library has its own list of all language extensions supported
-- by all major compilers. This is the list that user code being uploaded
-- to Hackage is checked against to ensure language extension validity.
-- Consequently, it is very important that this list remains up-to-date.
--
-- To this end, there is a testsuite test (testsuite/tests/driver/T4437.hs)
-- whose job it is to ensure these GHC's extensions are consistent with
-- Cabal.
--
-- The recommended workflow is,
--
-- 1. Temporarily add your new language extension to the
-- expectedGhcOnlyExtensions list in T4437 to ensure the test doesn't
-- break while Cabal is updated.
--
-- 2. After your GHC change is accepted, submit a Cabal pull request adding
-- your new extension to Cabal's list (found in
-- Cabal/Language/Haskell/Extension.hs).
--
-- 3. After your Cabal change is accepted, let the GHC developers know so
-- they can update the Cabal submodule and remove the extensions from
-- expectedGhcOnlyExtensions.
--
-- * Adding the flag to the GHC Wiki
--
-- There is a change log tracking language extension additions and removals
-- on the GHC wiki: https://ghc.haskell.org/trac/ghc/wiki/LanguagePragmaHistory
--
-- See Trac #4437 and #8176.
-- -----------------------------------------------------------------------------
-- DynFlags
data DumpFlag
-- See Note [Updating flag description in the User's Guide]
-- debugging flags
= Opt_D_dump_cmm
| Opt_D_dump_cmm_from_stg
| Opt_D_dump_cmm_raw
| Opt_D_dump_cmm_verbose
-- All of the cmm subflags (there are a lot!) automatically
-- enabled if you run -ddump-cmm-verbose
-- Each flag corresponds to exact stage of Cmm pipeline.
| Opt_D_dump_cmm_cfg
| Opt_D_dump_cmm_cbe
| Opt_D_dump_cmm_switch
| Opt_D_dump_cmm_proc
| Opt_D_dump_cmm_sp
| Opt_D_dump_cmm_sink
| Opt_D_dump_cmm_caf
| Opt_D_dump_cmm_procmap
| Opt_D_dump_cmm_split
| Opt_D_dump_cmm_info
| Opt_D_dump_cmm_cps
-- end cmm subflags
| Opt_D_dump_asm
| Opt_D_dump_asm_native
| Opt_D_dump_asm_liveness
| Opt_D_dump_asm_regalloc
| Opt_D_dump_asm_regalloc_stages
| Opt_D_dump_asm_conflicts
| Opt_D_dump_asm_stats
| Opt_D_dump_asm_expanded
| Opt_D_dump_llvm
| Opt_D_dump_core_stats
| Opt_D_dump_deriv
| Opt_D_dump_ds
| Opt_D_dump_foreign
| Opt_D_dump_inlinings
| Opt_D_dump_rule_firings
| Opt_D_dump_rule_rewrites
| Opt_D_dump_simpl_trace
| Opt_D_dump_occur_anal
| Opt_D_dump_parsed
| Opt_D_dump_rn
| Opt_D_dump_shape
| Opt_D_dump_simpl
| Opt_D_dump_simpl_iterations
| Opt_D_dump_spec
| Opt_D_dump_prep
| Opt_D_dump_stg
| Opt_D_dump_call_arity
| Opt_D_dump_stranal
| Opt_D_dump_str_signatures
| Opt_D_dump_tc
| Opt_D_dump_types
| Opt_D_dump_rules
| Opt_D_dump_cse
| Opt_D_dump_worker_wrapper
| Opt_D_dump_rn_trace
| Opt_D_dump_rn_stats
| Opt_D_dump_opt_cmm
| Opt_D_dump_simpl_stats
| Opt_D_dump_cs_trace -- Constraint solver in type checker
| Opt_D_dump_tc_trace
| Opt_D_dump_ec_trace -- Pattern match exhaustiveness checker
| Opt_D_dump_if_trace
| Opt_D_dump_vt_trace
| Opt_D_dump_splices
| Opt_D_th_dec_file
| Opt_D_dump_BCOs
| Opt_D_dump_vect
| Opt_D_dump_ticked
| Opt_D_dump_rtti
| Opt_D_source_stats
| Opt_D_verbose_stg2stg
| Opt_D_dump_hi
| Opt_D_dump_hi_diffs
| Opt_D_dump_mod_cycles
| Opt_D_dump_mod_map
| Opt_D_dump_view_pattern_commoning
| Opt_D_verbose_core2core
| Opt_D_dump_debug
deriving (Eq, Show, Enum)
-- | Enumerates the simple on-or-off dynamic flags
data GeneralFlag
-- See Note [Updating flag description in the User's Guide]
= Opt_DumpToFile -- ^ Append dump output to files instead of stdout.
| Opt_D_faststring_stats
| Opt_D_dump_minimal_imports
| Opt_DoCoreLinting
| Opt_DoStgLinting
| Opt_DoCmmLinting
| Opt_DoAsmLinting
| Opt_DoAnnotationLinting
| Opt_NoLlvmMangler -- hidden flag
| Opt_WarnIsError -- -Werror; makes warnings fatal
| Opt_ShowWarnGroups -- Show the group a warning belongs to
| Opt_HideSourcePaths -- Hide module source/object paths
| Opt_PrintExplicitForalls
| Opt_PrintExplicitKinds
| Opt_PrintExplicitCoercions
| Opt_PrintExplicitRuntimeReps
| Opt_PrintEqualityRelations
| Opt_PrintUnicodeSyntax
| Opt_PrintExpandedSynonyms
| Opt_PrintPotentialInstances
| Opt_PrintTypecheckerElaboration
-- optimisation opts
| Opt_CallArity
| Opt_Strictness
| Opt_LateDmdAnal
| Opt_KillAbsence
| Opt_KillOneShot
| Opt_FullLaziness
| Opt_FloatIn
| Opt_Specialise
| Opt_SpecialiseAggressively
| Opt_CrossModuleSpecialise
| Opt_StaticArgumentTransformation
| Opt_CSE
| Opt_LiberateCase
| Opt_SpecConstr
| Opt_DoLambdaEtaExpansion
| Opt_IgnoreAsserts
| Opt_DoEtaReduction
| Opt_CaseMerge
| Opt_UnboxStrictFields
| Opt_UnboxSmallStrictFields
| Opt_DictsCheap
| Opt_EnableRewriteRules -- Apply rewrite rules during simplification
| Opt_Vectorise
| Opt_VectorisationAvoidance
| Opt_RegsGraph -- do graph coloring register allocation
| Opt_RegsIterative -- do iterative coalescing graph coloring register allocation
| Opt_PedanticBottoms -- Be picky about how we treat bottom
| Opt_LlvmTBAA -- Use LLVM TBAA infastructure for improving AA (hidden flag)
| Opt_LlvmPassVectorsInRegisters -- Pass SIMD vectors in registers (requires a patched LLVM) (hidden flag)
| Opt_LlvmFillUndefWithGarbage -- Testing for undef bugs (hidden flag)
| Opt_IrrefutableTuples
| Opt_CmmSink
| Opt_CmmElimCommonBlocks
| Opt_OmitYields
| Opt_FunToThunk -- allow WwLib.mkWorkerArgs to remove all value lambdas
| Opt_DictsStrict -- be strict in argument dictionaries
| Opt_DmdTxDictSel -- use a special demand transformer for dictionary selectors
| Opt_Loopification -- See Note [Self-recursive tail calls]
| Opt_CprAnal
| Opt_WorkerWrapper
-- Interface files
| Opt_IgnoreInterfacePragmas
| Opt_OmitInterfacePragmas
| Opt_ExposeAllUnfoldings
| Opt_WriteInterface -- forces .hi files to be written even with -fno-code
-- profiling opts
| Opt_AutoSccsOnIndividualCafs
| Opt_ProfCountEntries
-- misc opts
| Opt_Pp
| Opt_ForceRecomp
| Opt_ExcessPrecision
| Opt_EagerBlackHoling
| Opt_NoHsMain
| Opt_SplitObjs
| Opt_SplitSections
| Opt_StgStats
| Opt_HideAllPackages
| Opt_HideAllPluginPackages
| Opt_PrintBindResult
| Opt_Haddock
| Opt_HaddockOptions
| Opt_BreakOnException
| Opt_BreakOnError
| Opt_PrintEvldWithShow
| Opt_PrintBindContents
| Opt_GenManifest
| Opt_EmbedManifest
| Opt_SharedImplib
| Opt_BuildingCabalPackage
| Opt_IgnoreDotGhci
| Opt_GhciSandbox
| Opt_GhciHistory
| Opt_LocalGhciHistory
| Opt_HelpfulErrors
| Opt_DeferTypeErrors
| Opt_DeferTypedHoles
| Opt_DeferOutOfScopeVariables
| Opt_PIC
| Opt_SccProfilingOn
| Opt_Ticky
| Opt_Ticky_Allocd
| Opt_Ticky_LNE
| Opt_Ticky_Dyn_Thunk
| Opt_RPath
| Opt_RelativeDynlibPaths
| Opt_Hpc
| Opt_FlatCache
| Opt_ExternalInterpreter
| Opt_OptimalApplicativeDo
| Opt_VersionMacros
-- PreInlining is on by default. The option is there just to see how
-- bad things get if you turn it off!
| Opt_SimplPreInlining
-- output style opts
| Opt_ErrorSpans -- Include full span info in error messages,
-- instead of just the start position.
| Opt_PprCaseAsLet
| Opt_PprShowTicks
-- Suppress all coercions, them replacing with '...'
| Opt_SuppressCoercions
| Opt_SuppressVarKinds
-- Suppress module id prefixes on variables.
| Opt_SuppressModulePrefixes
-- Suppress type applications.
| Opt_SuppressTypeApplications
-- Suppress info such as arity and unfoldings on identifiers.
| Opt_SuppressIdInfo
-- Suppress separate type signatures in core, but leave types on
-- lambda bound vars
| Opt_SuppressUnfoldings
-- Suppress the details of even stable unfoldings
| Opt_SuppressTypeSignatures
-- Suppress unique ids on variables.
-- Except for uniques, as some simplifier phases introduce new
-- variables that have otherwise identical names.
| Opt_SuppressUniques
-- temporary flags
| Opt_AutoLinkPackages
| Opt_ImplicitImportQualified
-- keeping stuff
| Opt_KeepHiDiffs
| Opt_KeepHcFiles
| Opt_KeepSFiles
| Opt_KeepTmpFiles
| Opt_KeepRawTokenStream
| Opt_KeepLlvmFiles
| Opt_KeepHiFiles
| Opt_KeepOFiles
| Opt_BuildDynamicToo
-- safe haskell flags
| Opt_DistrustAllPackages
| Opt_PackageTrust
deriving (Eq, Show, Enum)
-- | Used when outputting warnings: if a reason is given, it is
-- displayed. If a warning isn't controlled by a flag, this is made
-- explicit at the point of use.
data WarnReason = NoReason | Reason !WarningFlag
data WarningFlag =
-- See Note [Updating flag description in the User's Guide]
Opt_WarnDuplicateExports
| Opt_WarnDuplicateConstraints
| Opt_WarnRedundantConstraints
| Opt_WarnHiShadows
| Opt_WarnImplicitPrelude
| Opt_WarnIncompletePatterns
| Opt_WarnIncompleteUniPatterns
| Opt_WarnIncompletePatternsRecUpd
| Opt_WarnOverflowedLiterals
| Opt_WarnEmptyEnumerations
| Opt_WarnMissingFields
| Opt_WarnMissingImportList
| Opt_WarnMissingMethods
| Opt_WarnMissingSignatures
| Opt_WarnMissingLocalSignatures
| Opt_WarnNameShadowing
| Opt_WarnOverlappingPatterns
| Opt_WarnTypeDefaults
| Opt_WarnMonomorphism
| Opt_WarnUnusedTopBinds
| Opt_WarnUnusedLocalBinds
| Opt_WarnUnusedPatternBinds
| Opt_WarnUnusedImports
| Opt_WarnUnusedMatches
| Opt_WarnUnusedTypePatterns
| Opt_WarnUnusedForalls
| Opt_WarnContextQuantification -- remove in 8.2
| Opt_WarnWarningsDeprecations
| Opt_WarnDeprecatedFlags
| Opt_WarnAMP -- Introduced in GHC 7.8, obsolete since 7.10
| Opt_WarnMissingMonadFailInstances -- since 8.0
| Opt_WarnSemigroup -- since 8.0
| Opt_WarnDodgyExports
| Opt_WarnDodgyImports
| Opt_WarnOrphans
| Opt_WarnAutoOrphans
| Opt_WarnIdentities
| Opt_WarnTabs
| Opt_WarnUnrecognisedPragmas
| Opt_WarnDodgyForeignImports
| Opt_WarnUnusedDoBind
| Opt_WarnWrongDoBind
| Opt_WarnAlternativeLayoutRuleTransitional
| Opt_WarnUnsafe
| Opt_WarnSafe
| Opt_WarnTrustworthySafe
| Opt_WarnMissedSpecs
| Opt_WarnAllMissedSpecs
| Opt_WarnUnsupportedCallingConventions
| Opt_WarnUnsupportedLlvmVersion
| Opt_WarnInlineRuleShadowing
| Opt_WarnTypedHoles
| Opt_WarnPartialTypeSignatures
| Opt_WarnMissingExportedSignatures
| Opt_WarnUntickedPromotedConstructors
| Opt_WarnDerivingTypeable
| Opt_WarnDeferredTypeErrors
| Opt_WarnDeferredOutOfScopeVariables
| Opt_WarnNonCanonicalMonadInstances -- since 8.0
| Opt_WarnNonCanonicalMonadFailInstances -- since 8.0
| Opt_WarnNonCanonicalMonoidInstances -- since 8.0
| Opt_WarnMissingPatternSynonymSignatures -- since 8.0
| Opt_WarnUnrecognisedWarningFlags -- since 8.0
| Opt_WarnSimplifiableClassConstraints -- Since 8.2
| Opt_WarnCPPUndef -- Since 8.2
deriving (Eq, Show, Enum)
data Language = Haskell98 | Haskell2010
deriving (Eq, Enum, Show)
instance Outputable Language where
ppr = text . show
-- | The various Safe Haskell modes
data SafeHaskellMode
= Sf_None
| Sf_Unsafe
| Sf_Trustworthy
| Sf_Safe
deriving (Eq)
instance Show SafeHaskellMode where
show Sf_None = "None"
show Sf_Unsafe = "Unsafe"
show Sf_Trustworthy = "Trustworthy"
show Sf_Safe = "Safe"
instance Outputable SafeHaskellMode where
ppr = text . show
-- | Contains not only a collection of 'GeneralFlag's but also a plethora of
-- information relating to the compilation of a single file or GHC session
data DynFlags = DynFlags {
ghcMode :: GhcMode,
ghcLink :: GhcLink,
hscTarget :: HscTarget,
settings :: Settings,
verbosity :: Int, -- ^ Verbosity level: see Note [Verbosity levels]
optLevel :: Int, -- ^ Optimisation level
debugLevel :: Int, -- ^ How much debug information to produce
simplPhases :: Int, -- ^ Number of simplifier phases
maxSimplIterations :: Int, -- ^ Max simplifier iterations
maxPmCheckIterations :: Int, -- ^ Max no iterations for pm checking
ruleCheck :: Maybe String,
strictnessBefore :: [Int], -- ^ Additional demand analysis
parMakeCount :: Maybe Int, -- ^ The number of modules to compile in parallel
-- in --make mode, where Nothing ==> compile as
-- many in parallel as there are CPUs.
enableTimeStats :: Bool, -- ^ Enable RTS timing statistics?
ghcHeapSize :: Maybe Int, -- ^ The heap size to set.
maxRelevantBinds :: Maybe Int, -- ^ Maximum number of bindings from the type envt
-- to show in type error messages
maxUncoveredPatterns :: Int, -- ^ Maximum number of unmatched patterns to show
-- in non-exhaustiveness warnings
simplTickFactor :: Int, -- ^ Multiplier for simplifier ticks
specConstrThreshold :: Maybe Int, -- ^ Threshold for SpecConstr
specConstrCount :: Maybe Int, -- ^ Max number of specialisations for any one function
specConstrRecursive :: Int, -- ^ Max number of specialisations for recursive types
-- Not optional; otherwise ForceSpecConstr can diverge.
liberateCaseThreshold :: Maybe Int, -- ^ Threshold for LiberateCase
floatLamArgs :: Maybe Int, -- ^ Arg count for lambda floating
-- See CoreMonad.FloatOutSwitches
historySize :: Int, -- ^ Simplification history size
importPaths :: [FilePath],
mainModIs :: Module,
mainFunIs :: Maybe String,
reductionDepth :: IntWithInf, -- ^ Typechecker maximum stack depth
solverIterations :: IntWithInf, -- ^ Number of iterations in the constraints solver
-- Typically only 1 is needed
thisInstalledUnitId :: InstalledUnitId,
thisComponentId_ :: Maybe ComponentId,
thisUnitIdInsts_ :: Maybe [(ModuleName, Module)],
-- ways
ways :: [Way], -- ^ Way flags from the command line
buildTag :: String, -- ^ The global \"way\" (e.g. \"p\" for prof)
rtsBuildTag :: String, -- ^ The RTS \"way\"
-- For object splitting
splitInfo :: Maybe (String,Int),
-- paths etc.
objectDir :: Maybe String,
dylibInstallName :: Maybe String,
hiDir :: Maybe String,
stubDir :: Maybe String,
dumpDir :: Maybe String,
objectSuf :: String,
hcSuf :: String,
hiSuf :: String,
canGenerateDynamicToo :: IORef Bool,
dynObjectSuf :: String,
dynHiSuf :: String,
-- Packages.isDllName needs to know whether a call is within a
-- single DLL or not. Normally it does this by seeing if the call
-- is to the same package, but for the ghc package, we split the
-- package between 2 DLLs. The dllSplit tells us which sets of
-- modules are in which package.
dllSplitFile :: Maybe FilePath,
dllSplit :: Maybe [Set String],
outputFile :: Maybe String,
dynOutputFile :: Maybe String,
outputHi :: Maybe String,
dynLibLoader :: DynLibLoader,
-- | This is set by 'DriverPipeline.runPipeline' based on where
-- its output is going.
dumpPrefix :: Maybe FilePath,
-- | Override the 'dumpPrefix' set by 'DriverPipeline.runPipeline'.
-- Set by @-ddump-file-prefix@
dumpPrefixForce :: Maybe FilePath,
ldInputs :: [Option],
includePaths :: [String],
libraryPaths :: [String],
frameworkPaths :: [String], -- used on darwin only
cmdlineFrameworks :: [String], -- ditto
rtsOpts :: Maybe String,
rtsOptsEnabled :: RtsOptsEnabled,
rtsOptsSuggestions :: Bool,
hpcDir :: String, -- ^ Path to store the .mix files
-- Plugins
pluginModNames :: [ModuleName],
pluginModNameOpts :: [(ModuleName,String)],
frontendPluginOpts :: [String],
-- GHC API hooks
hooks :: Hooks,
-- For ghc -M
depMakefile :: FilePath,
depIncludePkgDeps :: Bool,
depExcludeMods :: [ModuleName],
depSuffixes :: [String],
-- Package flags
extraPkgConfs :: [PkgConfRef] -> [PkgConfRef],
-- ^ The @-package-db@ flags given on the command line, in the order
-- they appeared.
ignorePackageFlags :: [IgnorePackageFlag],
-- ^ The @-ignore-package@ flags from the command line
packageFlags :: [PackageFlag],
-- ^ The @-package@ and @-hide-package@ flags from the command-line
pluginPackageFlags :: [PackageFlag],
-- ^ The @-plugin-package-id@ flags from command line
trustFlags :: [TrustFlag],
-- ^ The @-trust@ and @-distrust@ flags
packageEnv :: Maybe FilePath,
-- ^ Filepath to the package environment file (if overriding default)
-- Package state
-- NB. do not modify this field, it is calculated by
-- Packages.initPackages
pkgDatabase :: Maybe [(FilePath, [PackageConfig])],
pkgState :: PackageState,
-- Temporary files
-- These have to be IORefs, because the defaultCleanupHandler needs to
-- know what to clean when an exception happens
filesToClean :: IORef [FilePath],
dirsToClean :: IORef (Map FilePath FilePath),
filesToNotIntermediateClean :: IORef [FilePath],
-- The next available suffix to uniquely name a temp file, updated atomically
nextTempSuffix :: IORef Int,
-- Names of files which were generated from -ddump-to-file; used to
-- track which ones we need to truncate because it's our first run
-- through
generatedDumps :: IORef (Set FilePath),
-- hsc dynamic flags
dumpFlags :: IntSet,
generalFlags :: IntSet,
warningFlags :: IntSet,
fatalWarningFlags :: IntSet,
-- Don't change this without updating extensionFlags:
language :: Maybe Language,
-- | Safe Haskell mode
safeHaskell :: SafeHaskellMode,
safeInfer :: Bool,
safeInferred :: Bool,
-- We store the location of where some extension and flags were turned on so
-- we can produce accurate error messages when Safe Haskell fails due to
-- them.
thOnLoc :: SrcSpan,
newDerivOnLoc :: SrcSpan,
overlapInstLoc :: SrcSpan,
incoherentOnLoc :: SrcSpan,
pkgTrustOnLoc :: SrcSpan,
warnSafeOnLoc :: SrcSpan,
warnUnsafeOnLoc :: SrcSpan,
trustworthyOnLoc :: SrcSpan,
-- Don't change this without updating extensionFlags:
extensions :: [OnOff LangExt.Extension],
-- extensionFlags should always be equal to
-- flattenExtensionFlags language extensions
-- LangExt.Extension is defined in libraries/ghc-boot so that it can be used
-- by template-haskell
extensionFlags :: IntSet,
-- Unfolding control
-- See Note [Discounts and thresholds] in CoreUnfold
ufCreationThreshold :: Int,
ufUseThreshold :: Int,
ufFunAppDiscount :: Int,
ufDictDiscount :: Int,
ufKeenessFactor :: Float,
ufDearOp :: Int,
maxWorkerArgs :: Int,
ghciHistSize :: Int,
-- | MsgDoc output action: use "ErrUtils" instead of this if you can
log_action :: LogAction,
flushOut :: FlushOut,
flushErr :: FlushErr,
haddockOptions :: Maybe String,
-- | GHCi scripts specified by -ghci-script, in reverse order
ghciScripts :: [String],
-- Output style options
pprUserLength :: Int,
pprCols :: Int,
useUnicode :: Bool,
useColor :: OverridingBool,
canUseColor :: Bool,
-- | what kind of {-# SCC #-} to add automatically
profAuto :: ProfAuto,
interactivePrint :: Maybe String,
nextWrapperNum :: IORef (ModuleEnv Int),
-- | Machine dependent flags (-m<blah> stuff)
sseVersion :: Maybe SseVersion,
avx :: Bool,
avx2 :: Bool,
avx512cd :: Bool, -- Enable AVX-512 Conflict Detection Instructions.
avx512er :: Bool, -- Enable AVX-512 Exponential and Reciprocal Instructions.
avx512f :: Bool, -- Enable AVX-512 instructions.
avx512pf :: Bool, -- Enable AVX-512 PreFetch Instructions.
-- | Run-time linker information (what options we need, etc.)
rtldInfo :: IORef (Maybe LinkerInfo),
-- | Run-time compiler information
rtccInfo :: IORef (Maybe CompilerInfo),
-- Constants used to control the amount of optimization done.
-- | Max size, in bytes, of inline array allocations.
maxInlineAllocSize :: Int,
-- | Only inline memcpy if it generates no more than this many
-- pseudo (roughly: Cmm) instructions.
maxInlineMemcpyInsns :: Int,
-- | Only inline memset if it generates no more than this many
-- pseudo (roughly: Cmm) instructions.
maxInlineMemsetInsns :: Int,
-- | Reverse the order of error messages in GHC/GHCi
reverseErrors :: Bool,
-- | Unique supply configuration for testing build determinism
initialUnique :: Int,
uniqueIncrement :: Int
}
class HasDynFlags m where
getDynFlags :: m DynFlags
{- It would be desirable to have the more generalised
instance (MonadTrans t, Monad m, HasDynFlags m) => HasDynFlags (t m) where
getDynFlags = lift getDynFlags
instance definition. However, that definition would overlap with the
`HasDynFlags (GhcT m)` instance. Instead we define instances for a
couple of common Monad transformers explicitly. -}
instance (Monoid a, Monad m, HasDynFlags m) => HasDynFlags (WriterT a m) where
getDynFlags = lift getDynFlags
instance (Monad m, HasDynFlags m) => HasDynFlags (ReaderT a m) where
getDynFlags = lift getDynFlags
instance (Monad m, HasDynFlags m) => HasDynFlags (MaybeT m) where
getDynFlags = lift getDynFlags
instance (Monad m, HasDynFlags m) => HasDynFlags (ExceptT e m) where
getDynFlags = lift getDynFlags
class ContainsDynFlags t where
extractDynFlags :: t -> DynFlags
data ProfAuto
= NoProfAuto -- ^ no SCC annotations added
| ProfAutoAll -- ^ top-level and nested functions are annotated
| ProfAutoTop -- ^ top-level functions annotated only
| ProfAutoExports -- ^ exported functions annotated only
| ProfAutoCalls -- ^ annotate call-sites
deriving (Eq,Enum)
data Settings = Settings {
sTargetPlatform :: Platform, -- Filled in by SysTools
sGhcUsagePath :: FilePath, -- Filled in by SysTools
sGhciUsagePath :: FilePath, -- ditto
sTopDir :: FilePath,
sTmpDir :: String, -- no trailing '/'
sProgramName :: String,
sProjectVersion :: String,
-- You shouldn't need to look things up in rawSettings directly.
-- They should have their own fields instead.
sRawSettings :: [(String, String)],
sExtraGccViaCFlags :: [String],
sSystemPackageConfig :: FilePath,
sLdSupportsCompactUnwind :: Bool,
sLdSupportsBuildId :: Bool,
sLdSupportsFilelist :: Bool,
sLdIsGnuLd :: Bool,
sGccSupportsNoPie :: Bool,
-- commands for particular phases
sPgm_L :: String,
sPgm_P :: (String,[Option]),
sPgm_F :: String,
sPgm_c :: (String,[Option]),
sPgm_s :: (String,[Option]),
sPgm_a :: (String,[Option]),
sPgm_l :: (String,[Option]),
sPgm_dll :: (String,[Option]),
sPgm_T :: String,
sPgm_windres :: String,
sPgm_libtool :: String,
sPgm_lo :: (String,[Option]), -- LLVM: opt llvm optimiser
sPgm_lc :: (String,[Option]), -- LLVM: llc static compiler
sPgm_i :: String,
-- options for particular phases
sOpt_L :: [String],
sOpt_P :: [String],
sOpt_F :: [String],
sOpt_c :: [String],
sOpt_a :: [String],
sOpt_l :: [String],
sOpt_windres :: [String],
sOpt_lo :: [String], -- LLVM: llvm optimiser
sOpt_lc :: [String], -- LLVM: llc static compiler
sOpt_i :: [String], -- iserv options
sPlatformConstants :: PlatformConstants
}
targetPlatform :: DynFlags -> Platform
targetPlatform dflags = sTargetPlatform (settings dflags)
programName :: DynFlags -> String
programName dflags = sProgramName (settings dflags)
projectVersion :: DynFlags -> String
projectVersion dflags = sProjectVersion (settings dflags)
ghcUsagePath :: DynFlags -> FilePath
ghcUsagePath dflags = sGhcUsagePath (settings dflags)
ghciUsagePath :: DynFlags -> FilePath
ghciUsagePath dflags = sGhciUsagePath (settings dflags)
topDir :: DynFlags -> FilePath
topDir dflags = sTopDir (settings dflags)
tmpDir :: DynFlags -> String
tmpDir dflags = sTmpDir (settings dflags)
rawSettings :: DynFlags -> [(String, String)]
rawSettings dflags = sRawSettings (settings dflags)
extraGccViaCFlags :: DynFlags -> [String]
extraGccViaCFlags dflags = sExtraGccViaCFlags (settings dflags)
systemPackageConfig :: DynFlags -> FilePath
systemPackageConfig dflags = sSystemPackageConfig (settings dflags)
pgm_L :: DynFlags -> String
pgm_L dflags = sPgm_L (settings dflags)
pgm_P :: DynFlags -> (String,[Option])
pgm_P dflags = sPgm_P (settings dflags)
pgm_F :: DynFlags -> String
pgm_F dflags = sPgm_F (settings dflags)
pgm_c :: DynFlags -> (String,[Option])
pgm_c dflags = sPgm_c (settings dflags)
pgm_s :: DynFlags -> (String,[Option])
pgm_s dflags = sPgm_s (settings dflags)
pgm_a :: DynFlags -> (String,[Option])
pgm_a dflags = sPgm_a (settings dflags)
pgm_l :: DynFlags -> (String,[Option])
pgm_l dflags = sPgm_l (settings dflags)
pgm_dll :: DynFlags -> (String,[Option])
pgm_dll dflags = sPgm_dll (settings dflags)
pgm_T :: DynFlags -> String
pgm_T dflags = sPgm_T (settings dflags)
pgm_windres :: DynFlags -> String
pgm_windres dflags = sPgm_windres (settings dflags)
pgm_libtool :: DynFlags -> String
pgm_libtool dflags = sPgm_libtool (settings dflags)
pgm_lo :: DynFlags -> (String,[Option])
pgm_lo dflags = sPgm_lo (settings dflags)
pgm_lc :: DynFlags -> (String,[Option])
pgm_lc dflags = sPgm_lc (settings dflags)
pgm_i :: DynFlags -> String
pgm_i dflags = sPgm_i (settings dflags)
opt_L :: DynFlags -> [String]
opt_L dflags = sOpt_L (settings dflags)
opt_P :: DynFlags -> [String]
opt_P dflags = concatMap (wayOptP (targetPlatform dflags)) (ways dflags)
++ sOpt_P (settings dflags)
opt_F :: DynFlags -> [String]
opt_F dflags = sOpt_F (settings dflags)
opt_c :: DynFlags -> [String]
opt_c dflags = concatMap (wayOptc (targetPlatform dflags)) (ways dflags)
++ sOpt_c (settings dflags)
opt_a :: DynFlags -> [String]
opt_a dflags = sOpt_a (settings dflags)
opt_l :: DynFlags -> [String]
opt_l dflags = concatMap (wayOptl (targetPlatform dflags)) (ways dflags)
++ sOpt_l (settings dflags)
opt_windres :: DynFlags -> [String]
opt_windres dflags = sOpt_windres (settings dflags)
opt_lo :: DynFlags -> [String]
opt_lo dflags = sOpt_lo (settings dflags)
opt_lc :: DynFlags -> [String]
opt_lc dflags = sOpt_lc (settings dflags)
opt_i :: DynFlags -> [String]
opt_i dflags = sOpt_i (settings dflags)
-- | The directory for this version of ghc in the user's app directory
-- (typically something like @~/.ghc/x86_64-linux-7.6.3@)
--
versionedAppDir :: DynFlags -> MaybeT IO FilePath
versionedAppDir dflags = do
-- Make sure we handle the case the HOME isn't set (see #11678)
appdir <- tryMaybeT $ getAppUserDataDirectory (programName dflags)
return $ appdir </> versionedFilePath dflags
-- | A filepath like @x86_64-linux-7.6.3@ with the platform string to use when
-- constructing platform-version-dependent files that need to co-exist.
--
versionedFilePath :: DynFlags -> FilePath
versionedFilePath dflags = TARGET_ARCH
++ '-':TARGET_OS
++ '-':projectVersion dflags
-- NB: This functionality is reimplemented in Cabal, so if you
-- change it, be sure to update Cabal.
-- | The target code type of the compilation (if any).
--
-- Whenever you change the target, also make sure to set 'ghcLink' to
-- something sensible.
--
-- 'HscNothing' can be used to avoid generating any output, however, note
-- that:
--
-- * If a program uses Template Haskell the typechecker may try to run code
-- from an imported module. This will fail if no code has been generated
-- for this module. You can use 'GHC.needsTemplateHaskell' to detect
-- whether this might be the case and choose to either switch to a
-- different target or avoid typechecking such modules. (The latter may be
-- preferable for security reasons.)
--
data HscTarget
= HscC -- ^ Generate C code.
| HscAsm -- ^ Generate assembly using the native code generator.
| HscLlvm -- ^ Generate assembly using the llvm code generator.
| HscInterpreted -- ^ Generate bytecode. (Requires 'LinkInMemory')
| HscNothing -- ^ Don't generate any code. See notes above.
deriving (Eq, Show)
-- | Will this target result in an object file on the disk?
isObjectTarget :: HscTarget -> Bool
isObjectTarget HscC = True
isObjectTarget HscAsm = True
isObjectTarget HscLlvm = True
isObjectTarget _ = False
-- | Does this target retain *all* top-level bindings for a module,
-- rather than just the exported bindings, in the TypeEnv and compiled
-- code (if any)? In interpreted mode we do this, so that GHCi can
-- call functions inside a module. In HscNothing mode we also do it,
-- so that Haddock can get access to the GlobalRdrEnv for a module
-- after typechecking it.
targetRetainsAllBindings :: HscTarget -> Bool
targetRetainsAllBindings HscInterpreted = True
targetRetainsAllBindings HscNothing = True
targetRetainsAllBindings _ = False
-- | The 'GhcMode' tells us whether we're doing multi-module
-- compilation (controlled via the "GHC" API) or one-shot
-- (single-module) compilation. This makes a difference primarily to
-- the "Finder": in one-shot mode we look for interface files for
-- imported modules, but in multi-module mode we look for source files
-- in order to check whether they need to be recompiled.
data GhcMode
= CompManager -- ^ @\-\-make@, GHCi, etc.
| OneShot -- ^ @ghc -c Foo.hs@
| MkDepend -- ^ @ghc -M@, see "Finder" for why we need this
deriving Eq
instance Outputable GhcMode where
ppr CompManager = text "CompManager"
ppr OneShot = text "OneShot"
ppr MkDepend = text "MkDepend"
isOneShot :: GhcMode -> Bool
isOneShot OneShot = True
isOneShot _other = False
-- | What to do in the link step, if there is one.
data GhcLink
= NoLink -- ^ Don't link at all
| LinkBinary -- ^ Link object code into a binary
| LinkInMemory -- ^ Use the in-memory dynamic linker (works for both
-- bytecode and object code).
| LinkDynLib -- ^ Link objects into a dynamic lib (DLL on Windows, DSO on ELF platforms)
| LinkStaticLib -- ^ Link objects into a static lib
deriving (Eq, Show)
isNoLink :: GhcLink -> Bool
isNoLink NoLink = True
isNoLink _ = False
-- | We accept flags which make packages visible, but how they select
-- the package varies; this data type reflects what selection criterion
-- is used.
data PackageArg =
PackageArg String -- ^ @-package@, by 'PackageName'
| UnitIdArg UnitId -- ^ @-package-id@, by 'UnitId'
deriving (Eq, Show)
instance Outputable PackageArg where
ppr (PackageArg pn) = text "package" <+> text pn
ppr (UnitIdArg uid) = text "unit" <+> ppr uid
-- | Represents the renaming that may be associated with an exposed
-- package, e.g. the @rns@ part of @-package "foo (rns)"@.
--
-- Here are some example parsings of the package flags (where
-- a string literal is punned to be a 'ModuleName':
--
-- * @-package foo@ is @ModRenaming True []@
-- * @-package foo ()@ is @ModRenaming False []@
-- * @-package foo (A)@ is @ModRenaming False [("A", "A")]@
-- * @-package foo (A as B)@ is @ModRenaming False [("A", "B")]@
-- * @-package foo with (A as B)@ is @ModRenaming True [("A", "B")]@
data ModRenaming = ModRenaming {
modRenamingWithImplicit :: Bool, -- ^ Bring all exposed modules into scope?
modRenamings :: [(ModuleName, ModuleName)] -- ^ Bring module @m@ into scope
-- under name @n@.
} deriving (Eq)
instance Outputable ModRenaming where
ppr (ModRenaming b rns) = ppr b <+> parens (ppr rns)
-- | Flags for manipulating the set of non-broken packages.
newtype IgnorePackageFlag = IgnorePackage String -- ^ @-ignore-package@
deriving (Eq)
-- | Flags for manipulating package trust.
data TrustFlag
= TrustPackage String -- ^ @-trust@
| DistrustPackage String -- ^ @-distrust@
deriving (Eq)
-- | Flags for manipulating packages visibility.
data PackageFlag
= ExposePackage String PackageArg ModRenaming -- ^ @-package@, @-package-id@
| HidePackage String -- ^ @-hide-package@
deriving (Eq)
-- NB: equality instance is used by InteractiveUI to test if
-- package flags have changed.
instance Outputable PackageFlag where
ppr (ExposePackage n arg rn) = text n <> braces (ppr arg <+> ppr rn)
ppr (HidePackage str) = text "-hide-package" <+> text str
defaultHscTarget :: Platform -> HscTarget
defaultHscTarget = defaultObjectTarget
-- | The 'HscTarget' value corresponding to the default way to create
-- object files on the current platform.
defaultObjectTarget :: Platform -> HscTarget
defaultObjectTarget platform
| platformUnregisterised platform = HscC
| cGhcWithNativeCodeGen == "YES" = HscAsm
| otherwise = HscLlvm
tablesNextToCode :: DynFlags -> Bool
tablesNextToCode dflags
= mkTablesNextToCode (platformUnregisterised (targetPlatform dflags))
-- Determines whether we will be compiling
-- info tables that reside just before the entry code, or with an
-- indirection to the entry code. See TABLES_NEXT_TO_CODE in
-- includes/rts/storage/InfoTables.h.
mkTablesNextToCode :: Bool -> Bool
mkTablesNextToCode unregisterised
= not unregisterised && cGhcEnableTablesNextToCode == "YES"
data DynLibLoader
= Deployable
| SystemDependent
deriving Eq
data RtsOptsEnabled = RtsOptsNone | RtsOptsSafeOnly | RtsOptsAll
deriving (Show)
data OverridingBool
= Auto
| Always
| Never
deriving Show
overrideWith :: Bool -> OverridingBool -> Bool
overrideWith b Auto = b
overrideWith _ Always = True
overrideWith _ Never = False
-----------------------------------------------------------------------------
-- Ways
-- The central concept of a "way" is that all objects in a given
-- program must be compiled in the same "way". Certain options change
-- parameters of the virtual machine, eg. profiling adds an extra word
-- to the object header, so profiling objects cannot be linked with
-- non-profiling objects.
-- After parsing the command-line options, we determine which "way" we
-- are building - this might be a combination way, eg. profiling+threaded.
-- We then find the "build-tag" associated with this way, and this
-- becomes the suffix used to find .hi files and libraries used in
-- this compilation.
data Way
= WayCustom String -- for GHC API clients building custom variants
| WayThreaded
| WayDebug
| WayProf
| WayEventLog
| WayDyn
deriving (Eq, Ord, Show)
allowed_combination :: [Way] -> Bool
allowed_combination way = and [ x `allowedWith` y
| x <- way, y <- way, x < y ]
where
-- Note ordering in these tests: the left argument is
-- <= the right argument, according to the Ord instance
-- on Way above.
-- dyn is allowed with everything
_ `allowedWith` WayDyn = True
WayDyn `allowedWith` _ = True
-- debug is allowed with everything
_ `allowedWith` WayDebug = True
WayDebug `allowedWith` _ = True
(WayCustom {}) `allowedWith` _ = True
WayThreaded `allowedWith` WayProf = True
WayThreaded `allowedWith` WayEventLog = True
WayProf `allowedWith` WayEventLog = True
_ `allowedWith` _ = False
mkBuildTag :: [Way] -> String
mkBuildTag ways = concat (intersperse "_" (map wayTag ways))
wayTag :: Way -> String
wayTag (WayCustom xs) = xs
wayTag WayThreaded = "thr"
wayTag WayDebug = "debug"
wayTag WayDyn = "dyn"
wayTag WayProf = "p"
wayTag WayEventLog = "l"
wayRTSOnly :: Way -> Bool
wayRTSOnly (WayCustom {}) = False
wayRTSOnly WayThreaded = True
wayRTSOnly WayDebug = True
wayRTSOnly WayDyn = False
wayRTSOnly WayProf = False
wayRTSOnly WayEventLog = True
wayDesc :: Way -> String
wayDesc (WayCustom xs) = xs
wayDesc WayThreaded = "Threaded"
wayDesc WayDebug = "Debug"
wayDesc WayDyn = "Dynamic"
wayDesc WayProf = "Profiling"
wayDesc WayEventLog = "RTS Event Logging"
-- Turn these flags on when enabling this way
wayGeneralFlags :: Platform -> Way -> [GeneralFlag]
wayGeneralFlags _ (WayCustom {}) = []
wayGeneralFlags _ WayThreaded = []
wayGeneralFlags _ WayDebug = []
wayGeneralFlags _ WayDyn = [Opt_PIC]
-- We could get away without adding -fPIC when compiling the
-- modules of a program that is to be linked with -dynamic; the
-- program itself does not need to be position-independent, only
-- the libraries need to be. HOWEVER, GHCi links objects into a
-- .so before loading the .so using the system linker. Since only
-- PIC objects can be linked into a .so, we have to compile even
-- modules of the main program with -fPIC when using -dynamic.
wayGeneralFlags _ WayProf = [Opt_SccProfilingOn]
wayGeneralFlags _ WayEventLog = []
-- Turn these flags off when enabling this way
wayUnsetGeneralFlags :: Platform -> Way -> [GeneralFlag]
wayUnsetGeneralFlags _ (WayCustom {}) = []
wayUnsetGeneralFlags _ WayThreaded = []
wayUnsetGeneralFlags _ WayDebug = []
wayUnsetGeneralFlags _ WayDyn = [-- There's no point splitting objects
-- when we're going to be dynamically
-- linking. Plus it breaks compilation
-- on OSX x86.
Opt_SplitObjs,
-- If splitobjs wasn't useful for this,
-- assume sections aren't either.
Opt_SplitSections]
wayUnsetGeneralFlags _ WayProf = []
wayUnsetGeneralFlags _ WayEventLog = []
wayOptc :: Platform -> Way -> [String]
wayOptc _ (WayCustom {}) = []
wayOptc platform WayThreaded = case platformOS platform of
OSOpenBSD -> ["-pthread"]
OSNetBSD -> ["-pthread"]
_ -> []
wayOptc _ WayDebug = []
wayOptc _ WayDyn = []
wayOptc _ WayProf = ["-DPROFILING"]
wayOptc _ WayEventLog = ["-DTRACING"]
wayOptl :: Platform -> Way -> [String]
wayOptl _ (WayCustom {}) = []
wayOptl platform WayThreaded =
case platformOS platform of
-- FreeBSD's default threading library is the KSE-based M:N libpthread,
-- which GHC has some problems with. It's currently not clear whether
-- the problems are our fault or theirs, but it seems that using the
-- alternative 1:1 threading library libthr works around it:
OSFreeBSD -> ["-lthr"]
OSOpenBSD -> ["-pthread"]
OSNetBSD -> ["-pthread"]
_ -> []
wayOptl _ WayDebug = []
wayOptl _ WayDyn = []
wayOptl _ WayProf = []
wayOptl _ WayEventLog = []
wayOptP :: Platform -> Way -> [String]
wayOptP _ (WayCustom {}) = []
wayOptP _ WayThreaded = []
wayOptP _ WayDebug = []
wayOptP _ WayDyn = []
wayOptP _ WayProf = ["-DPROFILING"]
wayOptP _ WayEventLog = ["-DTRACING"]
whenGeneratingDynamicToo :: MonadIO m => DynFlags -> m () -> m ()
whenGeneratingDynamicToo dflags f = ifGeneratingDynamicToo dflags f (return ())
ifGeneratingDynamicToo :: MonadIO m => DynFlags -> m a -> m a -> m a
ifGeneratingDynamicToo dflags f g = generateDynamicTooConditional dflags f g g
whenCannotGenerateDynamicToo :: MonadIO m => DynFlags -> m () -> m ()
whenCannotGenerateDynamicToo dflags f
= ifCannotGenerateDynamicToo dflags f (return ())
ifCannotGenerateDynamicToo :: MonadIO m => DynFlags -> m a -> m a -> m a
ifCannotGenerateDynamicToo dflags f g
= generateDynamicTooConditional dflags g f g
generateDynamicTooConditional :: MonadIO m
=> DynFlags -> m a -> m a -> m a -> m a
generateDynamicTooConditional dflags canGen cannotGen notTryingToGen
= if gopt Opt_BuildDynamicToo dflags
then do let ref = canGenerateDynamicToo dflags
b <- liftIO $ readIORef ref
if b then canGen else cannotGen
else notTryingToGen
dynamicTooMkDynamicDynFlags :: DynFlags -> DynFlags
dynamicTooMkDynamicDynFlags dflags0
= let dflags1 = addWay' WayDyn dflags0
dflags2 = dflags1 {
outputFile = dynOutputFile dflags1,
hiSuf = dynHiSuf dflags1,
objectSuf = dynObjectSuf dflags1
}
dflags3 = updateWays dflags2
dflags4 = gopt_unset dflags3 Opt_BuildDynamicToo
in dflags4
-----------------------------------------------------------------------------
-- | Used by 'GHC.runGhc' to partially initialize a new 'DynFlags' value
initDynFlags :: DynFlags -> IO DynFlags
initDynFlags dflags = do
let -- We can't build with dynamic-too on Windows, as labels before
-- the fork point are different depending on whether we are
-- building dynamically or not.
platformCanGenerateDynamicToo
= platformOS (targetPlatform dflags) /= OSMinGW32
refCanGenerateDynamicToo <- newIORef platformCanGenerateDynamicToo
refNextTempSuffix <- newIORef 0
refFilesToClean <- newIORef []
refDirsToClean <- newIORef Map.empty
refFilesToNotIntermediateClean <- newIORef []
refGeneratedDumps <- newIORef Set.empty
refRtldInfo <- newIORef Nothing
refRtccInfo <- newIORef Nothing
wrapperNum <- newIORef emptyModuleEnv
canUseUnicode <- do let enc = localeEncoding
str = "‘’"
(withCString enc str $ \cstr ->
do str' <- peekCString enc cstr
return (str == str'))
`catchIOError` \_ -> return False
canUseColor <- stderrSupportsAnsiColors
return dflags{
canGenerateDynamicToo = refCanGenerateDynamicToo,
nextTempSuffix = refNextTempSuffix,
filesToClean = refFilesToClean,
dirsToClean = refDirsToClean,
filesToNotIntermediateClean = refFilesToNotIntermediateClean,
generatedDumps = refGeneratedDumps,
nextWrapperNum = wrapperNum,
useUnicode = canUseUnicode,
canUseColor = canUseColor,
rtldInfo = refRtldInfo,
rtccInfo = refRtccInfo
}
-- | Check if ANSI escape sequences can be used to control color in stderr.
stderrSupportsAnsiColors :: IO Bool
stderrSupportsAnsiColors = do
#if defined MIN_VERSION_terminfo
queryTerminal stdError `andM` do
(termSupportsColors <$> setupTermFromEnv)
`catch` \ (_ :: SetupTermError) ->
pure False
where
andM :: Monad m => m Bool -> m Bool -> m Bool
andM mx my = do
x <- mx
if x
then my
else pure x
termSupportsColors :: Terminal -> Bool
termSupportsColors term = fromMaybe 0 (getCapability term termColors) > 0
#elif defined mingw32_HOST_OS
foldl1 orM
[ (/= "") <$> getEnvLM "ANSICON"
, (== "on") <$> getEnvLM "ConEmuANSI"
, (== "xterm") <$> getEnvLM "TERM"
, do
h <- Win32.getStdHandle Win32.sTD_ERROR_HANDLE
mode <- getConsoleMode h
if modeHasVTP mode
then pure True
else do
setConsoleMode h (modeAddVTP mode)
modeHasVTP <$> getConsoleMode h
`catch` \ (_ :: IOError) ->
pure False
]
where
orM :: Monad m => m Bool -> m Bool -> m Bool
orM mx my = do
x <- mx
if x
then pure x
else my
getEnvLM :: String -> IO String
getEnvLM name = map toLower . fromMaybe "" <$> lookupEnv name
modeHasVTP :: Win32.DWORD -> Bool
modeHasVTP mode = mode .&. eNABLE_VIRTUAL_TERMINAL_PROCESSING /= 0
modeAddVTP :: Win32.DWORD -> Win32.DWORD
modeAddVTP mode = mode .|. eNABLE_VIRTUAL_TERMINAL_PROCESSING
eNABLE_VIRTUAL_TERMINAL_PROCESSING :: Win32.DWORD
eNABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
getConsoleMode :: Win32.HANDLE -> IO Win32.DWORD
getConsoleMode h = with 64 $ \ mode -> do
Win32.failIfFalse_ "GetConsoleMode" (c_GetConsoleMode h mode)
peek mode
setConsoleMode :: Win32.HANDLE -> Win32.DWORD -> IO ()
setConsoleMode h mode = do
Win32.failIfFalse_ "SetConsoleMode" (c_SetConsoleMode h mode)
foreign import WINAPI unsafe "windows.h GetConsoleMode" c_GetConsoleMode
:: Win32.HANDLE -> Ptr Win32.DWORD -> IO Win32.BOOL
foreign import WINAPI unsafe "windows.h SetConsoleMode" c_SetConsoleMode
:: Win32.HANDLE -> Win32.DWORD -> IO Win32.BOOL
#else
pure False
#endif
-- | The normal 'DynFlags'. Note that they are not suitable for use in this form
-- and must be fully initialized by 'GHC.runGhc' first.
defaultDynFlags :: Settings -> DynFlags
defaultDynFlags mySettings =
-- See Note [Updating flag description in the User's Guide]
DynFlags {
ghcMode = CompManager,
ghcLink = LinkBinary,
hscTarget = defaultHscTarget (sTargetPlatform mySettings),
verbosity = 0,
optLevel = 0,
debugLevel = 0,
simplPhases = 2,
maxSimplIterations = 4,
maxPmCheckIterations = 2000000,
ruleCheck = Nothing,
maxRelevantBinds = Just 6,
maxUncoveredPatterns = 4,
simplTickFactor = 100,
specConstrThreshold = Just 2000,
specConstrCount = Just 3,
specConstrRecursive = 3,
liberateCaseThreshold = Just 2000,
floatLamArgs = Just 0, -- Default: float only if no fvs
historySize = 20,
strictnessBefore = [],
parMakeCount = Just 1,
enableTimeStats = False,
ghcHeapSize = Nothing,
importPaths = ["."],
mainModIs = mAIN,
mainFunIs = Nothing,
reductionDepth = treatZeroAsInf mAX_REDUCTION_DEPTH,
solverIterations = treatZeroAsInf mAX_SOLVER_ITERATIONS,
thisInstalledUnitId = toInstalledUnitId mainUnitId,
thisUnitIdInsts_ = Nothing,
thisComponentId_ = Nothing,
objectDir = Nothing,
dylibInstallName = Nothing,
hiDir = Nothing,
stubDir = Nothing,
dumpDir = Nothing,
objectSuf = phaseInputExt StopLn,
hcSuf = phaseInputExt HCc,
hiSuf = "hi",
canGenerateDynamicToo = panic "defaultDynFlags: No canGenerateDynamicToo",
dynObjectSuf = "dyn_" ++ phaseInputExt StopLn,
dynHiSuf = "dyn_hi",
dllSplitFile = Nothing,
dllSplit = Nothing,
pluginModNames = [],
pluginModNameOpts = [],
frontendPluginOpts = [],
hooks = emptyHooks,
outputFile = Nothing,
dynOutputFile = Nothing,
outputHi = Nothing,
dynLibLoader = SystemDependent,
dumpPrefix = Nothing,
dumpPrefixForce = Nothing,
ldInputs = [],
includePaths = [],
libraryPaths = [],
frameworkPaths = [],
cmdlineFrameworks = [],
rtsOpts = Nothing,
rtsOptsEnabled = RtsOptsSafeOnly,
rtsOptsSuggestions = True,
hpcDir = ".hpc",
extraPkgConfs = id,
packageFlags = [],
pluginPackageFlags = [],
ignorePackageFlags = [],
trustFlags = [],
packageEnv = Nothing,
pkgDatabase = Nothing,
-- This gets filled in with GHC.setSessionDynFlags
pkgState = emptyPackageState,
ways = defaultWays mySettings,
buildTag = mkBuildTag (defaultWays mySettings),
rtsBuildTag = mkBuildTag (defaultWays mySettings),
splitInfo = Nothing,
settings = mySettings,
-- ghc -M values
depMakefile = "Makefile",
depIncludePkgDeps = False,
depExcludeMods = [],
depSuffixes = [],
-- end of ghc -M values
nextTempSuffix = panic "defaultDynFlags: No nextTempSuffix",
filesToClean = panic "defaultDynFlags: No filesToClean",
dirsToClean = panic "defaultDynFlags: No dirsToClean",
filesToNotIntermediateClean = panic "defaultDynFlags: No filesToNotIntermediateClean",
generatedDumps = panic "defaultDynFlags: No generatedDumps",
haddockOptions = Nothing,
dumpFlags = IntSet.empty,
generalFlags = IntSet.fromList (map fromEnum (defaultFlags mySettings)),
warningFlags = IntSet.fromList (map fromEnum standardWarnings),
fatalWarningFlags = IntSet.empty,
ghciScripts = [],
language = Nothing,
safeHaskell = Sf_None,
safeInfer = True,
safeInferred = True,
thOnLoc = noSrcSpan,
newDerivOnLoc = noSrcSpan,
overlapInstLoc = noSrcSpan,
incoherentOnLoc = noSrcSpan,
pkgTrustOnLoc = noSrcSpan,
warnSafeOnLoc = noSrcSpan,
warnUnsafeOnLoc = noSrcSpan,
trustworthyOnLoc = noSrcSpan,
extensions = [],
extensionFlags = flattenExtensionFlags Nothing [],
-- The ufCreationThreshold threshold must be reasonably high to
-- take account of possible discounts.
-- E.g. 450 is not enough in 'fulsom' for Interval.sqr to inline
-- into Csg.calc (The unfolding for sqr never makes it into the
-- interface file.)
ufCreationThreshold = 750,
ufUseThreshold = 60,
ufFunAppDiscount = 60,
-- Be fairly keen to inline a function if that means
-- we'll be able to pick the right method from a dictionary
ufDictDiscount = 30,
ufKeenessFactor = 1.5,
ufDearOp = 40,
maxWorkerArgs = 10,
ghciHistSize = 50, -- keep a log of length 50 by default
log_action = defaultLogAction,
flushOut = defaultFlushOut,
flushErr = defaultFlushErr,
pprUserLength = 5,
pprCols = 100,
useUnicode = False,
useColor = Auto,
canUseColor = False,
profAuto = NoProfAuto,
interactivePrint = Nothing,
nextWrapperNum = panic "defaultDynFlags: No nextWrapperNum",
sseVersion = Nothing,
avx = False,
avx2 = False,
avx512cd = False,
avx512er = False,
avx512f = False,
avx512pf = False,
rtldInfo = panic "defaultDynFlags: no rtldInfo",
rtccInfo = panic "defaultDynFlags: no rtccInfo",
maxInlineAllocSize = 128,
maxInlineMemcpyInsns = 32,
maxInlineMemsetInsns = 32,
initialUnique = 0,
uniqueIncrement = 1,
reverseErrors = False
}
defaultWays :: Settings -> [Way]
defaultWays settings = if pc_DYNAMIC_BY_DEFAULT (sPlatformConstants settings)
then [WayDyn]
else []
interpWays :: [Way]
interpWays
| dynamicGhc = [WayDyn]
| rtsIsProfiled = [WayProf]
| otherwise = []
interpreterProfiled :: DynFlags -> Bool
interpreterProfiled dflags
| gopt Opt_ExternalInterpreter dflags = gopt Opt_SccProfilingOn dflags
| otherwise = rtsIsProfiled
interpreterDynamic :: DynFlags -> Bool
interpreterDynamic dflags
| gopt Opt_ExternalInterpreter dflags = WayDyn `elem` ways dflags
| otherwise = dynamicGhc
--------------------------------------------------------------------------
type FatalMessager = String -> IO ()
type LogAction = DynFlags
-> WarnReason
-> Severity
-> SrcSpan
-> PprStyle
-> MsgDoc
-> IO ()
defaultFatalMessager :: FatalMessager
defaultFatalMessager = hPutStrLn stderr
defaultLogAction :: LogAction
defaultLogAction dflags reason severity srcSpan style msg
= case severity of
SevOutput -> printSDoc msg style
SevDump -> printSDoc (msg $$ blankLine) style
SevInteractive -> putStrSDoc msg style
SevInfo -> printErrs msg style
SevFatal -> printErrs msg style
_ -> do hPutChar stderr '\n'
printErrs message style
-- careful (#2302): printErrs prints in UTF-8,
-- whereas converting to string first and using
-- hPutStr would just emit the low 8 bits of
-- each unicode char.
where printSDoc = defaultLogActionHPrintDoc dflags stdout
printErrs = defaultLogActionHPrintDoc dflags stderr
putStrSDoc = defaultLogActionHPutStrDoc dflags stdout
-- Pretty print the warning flag, if any (#10752)
message = mkLocMessageAnn flagMsg severity srcSpan msg
flagMsg = case reason of
NoReason -> Nothing
Reason flag -> (\spec -> "-W" ++ flagSpecName spec ++ flagGrp flag) <$>
flagSpecOf flag
flagGrp flag
| gopt Opt_ShowWarnGroups dflags =
case smallestGroups flag of
[] -> ""
groups -> " (in " ++ intercalate ", " (map ("-W"++) groups) ++ ")"
| otherwise = ""
defaultLogActionHPrintDoc :: DynFlags -> Handle -> SDoc -> PprStyle -> IO ()
defaultLogActionHPrintDoc dflags h d sty
= defaultLogActionHPutStrDoc dflags h (d $$ text "") sty
-- Adds a newline
defaultLogActionHPutStrDoc :: DynFlags -> Handle -> SDoc -> PprStyle -> IO ()
defaultLogActionHPutStrDoc dflags h d sty
= Pretty.printDoc_ Pretty.PageMode (pprCols dflags) h doc
where -- Don't add a newline at the end, so that successive
-- calls to this log-action can output all on the same line
doc = runSDoc d (initSDocContext dflags sty)
newtype FlushOut = FlushOut (IO ())
defaultFlushOut :: FlushOut
defaultFlushOut = FlushOut $ hFlush stdout
newtype FlushErr = FlushErr (IO ())
defaultFlushErr :: FlushErr
defaultFlushErr = FlushErr $ hFlush stderr
{-
Note [Verbosity levels]
~~~~~~~~~~~~~~~~~~~~~~~
0 | print errors & warnings only
1 | minimal verbosity: print "compiling M ... done." for each module.
2 | equivalent to -dshow-passes
3 | equivalent to existing "ghc -v"
4 | "ghc -v -ddump-most"
5 | "ghc -v -ddump-all"
-}
data OnOff a = On a
| Off a
deriving (Eq, Show)
instance Outputable a => Outputable (OnOff a) where
ppr (On x) = text "On" <+> ppr x
ppr (Off x) = text "Off" <+> ppr x
-- OnOffs accumulate in reverse order, so we use foldr in order to
-- process them in the right order
flattenExtensionFlags :: Maybe Language -> [OnOff LangExt.Extension] -> IntSet
flattenExtensionFlags ml = foldr f defaultExtensionFlags
where f (On f) flags = IntSet.insert (fromEnum f) flags
f (Off f) flags = IntSet.delete (fromEnum f) flags
defaultExtensionFlags = IntSet.fromList (map fromEnum (languageExtensions ml))
languageExtensions :: Maybe Language -> [LangExt.Extension]
languageExtensions Nothing
-- Nothing => the default case
= LangExt.NondecreasingIndentation -- This has been on by default for some time
: delete LangExt.DatatypeContexts -- The Haskell' committee decided to
-- remove datatype contexts from the
-- language:
-- http://www.haskell.org/pipermail/haskell-prime/2011-January/003335.html
(languageExtensions (Just Haskell2010))
-- NB: MonoPatBinds is no longer the default
languageExtensions (Just Haskell98)
= [LangExt.ImplicitPrelude,
LangExt.MonomorphismRestriction,
LangExt.NPlusKPatterns,
LangExt.DatatypeContexts,
LangExt.TraditionalRecordSyntax,
LangExt.NondecreasingIndentation
-- strictly speaking non-standard, but we always had this
-- on implicitly before the option was added in 7.1, and
-- turning it off breaks code, so we're keeping it on for
-- backwards compatibility. Cabal uses -XHaskell98 by
-- default unless you specify another language.
]
languageExtensions (Just Haskell2010)
= [LangExt.ImplicitPrelude,
LangExt.MonomorphismRestriction,
LangExt.DatatypeContexts,
LangExt.TraditionalRecordSyntax,
LangExt.EmptyDataDecls,
LangExt.ForeignFunctionInterface,
LangExt.PatternGuards,
LangExt.DoAndIfThenElse,
LangExt.RelaxedPolyRec]
-- | Test whether a 'DumpFlag' is set
dopt :: DumpFlag -> DynFlags -> Bool
dopt f dflags = (fromEnum f `IntSet.member` dumpFlags dflags)
|| (verbosity dflags >= 4 && enableIfVerbose f)
where enableIfVerbose Opt_D_dump_tc_trace = False
enableIfVerbose Opt_D_dump_rn_trace = False
enableIfVerbose Opt_D_dump_cs_trace = False
enableIfVerbose Opt_D_dump_if_trace = False
enableIfVerbose Opt_D_dump_vt_trace = False
enableIfVerbose Opt_D_dump_tc = False
enableIfVerbose Opt_D_dump_rn = False
enableIfVerbose Opt_D_dump_shape = False
enableIfVerbose Opt_D_dump_rn_stats = False
enableIfVerbose Opt_D_dump_hi_diffs = False
enableIfVerbose Opt_D_verbose_core2core = False
enableIfVerbose Opt_D_verbose_stg2stg = False
enableIfVerbose Opt_D_dump_splices = False
enableIfVerbose Opt_D_th_dec_file = False
enableIfVerbose Opt_D_dump_rule_firings = False
enableIfVerbose Opt_D_dump_rule_rewrites = False
enableIfVerbose Opt_D_dump_simpl_trace = False
enableIfVerbose Opt_D_dump_rtti = False
enableIfVerbose Opt_D_dump_inlinings = False
enableIfVerbose Opt_D_dump_core_stats = False
enableIfVerbose Opt_D_dump_asm_stats = False
enableIfVerbose Opt_D_dump_types = False
enableIfVerbose Opt_D_dump_simpl_iterations = False
enableIfVerbose Opt_D_dump_ticked = False
enableIfVerbose Opt_D_dump_view_pattern_commoning = False
enableIfVerbose Opt_D_dump_mod_cycles = False
enableIfVerbose Opt_D_dump_mod_map = False
enableIfVerbose Opt_D_dump_ec_trace = False
enableIfVerbose _ = True
-- | Set a 'DumpFlag'
dopt_set :: DynFlags -> DumpFlag -> DynFlags
dopt_set dfs f = dfs{ dumpFlags = IntSet.insert (fromEnum f) (dumpFlags dfs) }
-- | Unset a 'DumpFlag'
dopt_unset :: DynFlags -> DumpFlag -> DynFlags
dopt_unset dfs f = dfs{ dumpFlags = IntSet.delete (fromEnum f) (dumpFlags dfs) }
-- | Test whether a 'GeneralFlag' is set
gopt :: GeneralFlag -> DynFlags -> Bool
gopt f dflags = fromEnum f `IntSet.member` generalFlags dflags
-- | Set a 'GeneralFlag'
gopt_set :: DynFlags -> GeneralFlag -> DynFlags
gopt_set dfs f = dfs{ generalFlags = IntSet.insert (fromEnum f) (generalFlags dfs) }
-- | Unset a 'GeneralFlag'
gopt_unset :: DynFlags -> GeneralFlag -> DynFlags
gopt_unset dfs f = dfs{ generalFlags = IntSet.delete (fromEnum f) (generalFlags dfs) }
-- | Test whether a 'WarningFlag' is set
wopt :: WarningFlag -> DynFlags -> Bool
wopt f dflags = fromEnum f `IntSet.member` warningFlags dflags
-- | Set a 'WarningFlag'
wopt_set :: DynFlags -> WarningFlag -> DynFlags
wopt_set dfs f = dfs{ warningFlags = IntSet.insert (fromEnum f) (warningFlags dfs) }
-- | Unset a 'WarningFlag'
wopt_unset :: DynFlags -> WarningFlag -> DynFlags
wopt_unset dfs f = dfs{ warningFlags = IntSet.delete (fromEnum f) (warningFlags dfs) }
-- | Test whether a 'WarningFlag' is set as fatal
wopt_fatal :: WarningFlag -> DynFlags -> Bool
wopt_fatal f dflags = fromEnum f `IntSet.member` fatalWarningFlags dflags
-- | Mark a 'WarningFlag' as fatal (do not set the flag)
wopt_set_fatal :: DynFlags -> WarningFlag -> DynFlags
wopt_set_fatal dfs f
= dfs { fatalWarningFlags =
IntSet.insert (fromEnum f) (fatalWarningFlags dfs) }
-- | Mark a 'WarningFlag' as not fatal
wopt_unset_fatal :: DynFlags -> WarningFlag -> DynFlags
wopt_unset_fatal dfs f
= dfs { fatalWarningFlags =
IntSet.delete (fromEnum f) (fatalWarningFlags dfs) }
-- | Test whether a 'LangExt.Extension' is set
xopt :: LangExt.Extension -> DynFlags -> Bool
xopt f dflags = fromEnum f `IntSet.member` extensionFlags dflags
-- | Set a 'LangExt.Extension'
xopt_set :: DynFlags -> LangExt.Extension -> DynFlags
xopt_set dfs f
= let onoffs = On f : extensions dfs
in dfs { extensions = onoffs,
extensionFlags = flattenExtensionFlags (language dfs) onoffs }
-- | Unset a 'LangExt.Extension'
xopt_unset :: DynFlags -> LangExt.Extension -> DynFlags
xopt_unset dfs f
= let onoffs = Off f : extensions dfs
in dfs { extensions = onoffs,
extensionFlags = flattenExtensionFlags (language dfs) onoffs }
lang_set :: DynFlags -> Maybe Language -> DynFlags
lang_set dflags lang =
dflags {
language = lang,
extensionFlags = flattenExtensionFlags lang (extensions dflags)
}
-- | An internal helper to check whether to use unicode syntax for output.
--
-- Note: You should very likely be using 'Outputable.unicodeSyntax' instead
-- of this function.
useUnicodeSyntax :: DynFlags -> Bool
useUnicodeSyntax = gopt Opt_PrintUnicodeSyntax
-- | Set the Haskell language standard to use
setLanguage :: Language -> DynP ()
setLanguage l = upd (`lang_set` Just l)
-- | Some modules have dependencies on others through the DynFlags rather than textual imports
dynFlagDependencies :: DynFlags -> [ModuleName]
dynFlagDependencies = pluginModNames
-- | Is the -fpackage-trust mode on
packageTrustOn :: DynFlags -> Bool
packageTrustOn = gopt Opt_PackageTrust
-- | Is Safe Haskell on in some way (including inference mode)
safeHaskellOn :: DynFlags -> Bool
safeHaskellOn dflags = safeHaskell dflags /= Sf_None || safeInferOn dflags
-- | Is the Safe Haskell safe language in use
safeLanguageOn :: DynFlags -> Bool
safeLanguageOn dflags = safeHaskell dflags == Sf_Safe
-- | Is the Safe Haskell safe inference mode active
safeInferOn :: DynFlags -> Bool
safeInferOn = safeInfer
-- | Test if Safe Imports are on in some form
safeImportsOn :: DynFlags -> Bool
safeImportsOn dflags = safeHaskell dflags == Sf_Unsafe ||
safeHaskell dflags == Sf_Trustworthy ||
safeHaskell dflags == Sf_Safe
-- | Set a 'Safe Haskell' flag
setSafeHaskell :: SafeHaskellMode -> DynP ()
setSafeHaskell s = updM f
where f dfs = do
let sf = safeHaskell dfs
safeM <- combineSafeFlags sf s
case s of
Sf_Safe -> return $ dfs { safeHaskell = safeM, safeInfer = False }
-- leave safe inferrence on in Trustworthy mode so we can warn
-- if it could have been inferred safe.
Sf_Trustworthy -> do
l <- getCurLoc
return $ dfs { safeHaskell = safeM, trustworthyOnLoc = l }
-- leave safe inference on in Unsafe mode as well.
_ -> return $ dfs { safeHaskell = safeM }
-- | Are all direct imports required to be safe for this Safe Haskell mode?
-- Direct imports are when the code explicitly imports a module
safeDirectImpsReq :: DynFlags -> Bool
safeDirectImpsReq d = safeLanguageOn d
-- | Are all implicit imports required to be safe for this Safe Haskell mode?
-- Implicit imports are things in the prelude. e.g System.IO when print is used.
safeImplicitImpsReq :: DynFlags -> Bool
safeImplicitImpsReq d = safeLanguageOn d
-- | Combine two Safe Haskell modes correctly. Used for dealing with multiple flags.
-- This makes Safe Haskell very much a monoid but for now I prefer this as I don't
-- want to export this functionality from the module but do want to export the
-- type constructors.
combineSafeFlags :: SafeHaskellMode -> SafeHaskellMode -> DynP SafeHaskellMode
combineSafeFlags a b | a == Sf_None = return b
| b == Sf_None = return a
| a == b = return a
| otherwise = addErr errm >> pure a
where errm = "Incompatible Safe Haskell flags! ("
++ show a ++ ", " ++ show b ++ ")"
-- | A list of unsafe flags under Safe Haskell. Tuple elements are:
-- * name of the flag
-- * function to get srcspan that enabled the flag
-- * function to test if the flag is on
-- * function to turn the flag off
unsafeFlags, unsafeFlagsForInfer
:: [(String, DynFlags -> SrcSpan, DynFlags -> Bool, DynFlags -> DynFlags)]
unsafeFlags = [ ("-XGeneralizedNewtypeDeriving", newDerivOnLoc,
xopt LangExt.GeneralizedNewtypeDeriving,
flip xopt_unset LangExt.GeneralizedNewtypeDeriving)
, ("-XTemplateHaskell", thOnLoc,
xopt LangExt.TemplateHaskell,
flip xopt_unset LangExt.TemplateHaskell)
]
unsafeFlagsForInfer = unsafeFlags
-- | Retrieve the options corresponding to a particular @opt_*@ field in the correct order
getOpts :: DynFlags -- ^ 'DynFlags' to retrieve the options from
-> (DynFlags -> [a]) -- ^ Relevant record accessor: one of the @opt_*@ accessors
-> [a] -- ^ Correctly ordered extracted options
getOpts dflags opts = reverse (opts dflags)
-- We add to the options from the front, so we need to reverse the list
-- | Gets the verbosity flag for the current verbosity level. This is fed to
-- other tools, so GHC-specific verbosity flags like @-ddump-most@ are not included
getVerbFlags :: DynFlags -> [String]
getVerbFlags dflags
| verbosity dflags >= 4 = ["-v"]
| otherwise = []
setObjectDir, setHiDir, setStubDir, setDumpDir, setOutputDir,
setDynObjectSuf, setDynHiSuf,
setDylibInstallName,
setObjectSuf, setHiSuf, setHcSuf, parseDynLibLoaderMode,
setPgmP, addOptl, addOptc, addOptP,
addCmdlineFramework, addHaddockOpts, addGhciScript,
setInteractivePrint
:: String -> DynFlags -> DynFlags
setOutputFile, setDynOutputFile, setOutputHi, setDumpPrefixForce
:: Maybe String -> DynFlags -> DynFlags
setObjectDir f d = d { objectDir = Just f}
setHiDir f d = d { hiDir = Just f}
setStubDir f d = d { stubDir = Just f, includePaths = f : includePaths d }
-- -stubdir D adds an implicit -I D, so that gcc can find the _stub.h file
-- \#included from the .hc file when compiling via C (i.e. unregisterised
-- builds).
setDumpDir f d = d { dumpDir = Just f}
setOutputDir f = setObjectDir f . setHiDir f . setStubDir f . setDumpDir f
setDylibInstallName f d = d { dylibInstallName = Just f}
setObjectSuf f d = d { objectSuf = f}
setDynObjectSuf f d = d { dynObjectSuf = f}
setHiSuf f d = d { hiSuf = f}
setDynHiSuf f d = d { dynHiSuf = f}
setHcSuf f d = d { hcSuf = f}
setOutputFile f d = d { outputFile = f}
setDynOutputFile f d = d { dynOutputFile = f}
setOutputHi f d = d { outputHi = f}
thisComponentId :: DynFlags -> ComponentId
thisComponentId dflags =
case thisComponentId_ dflags of
Just cid -> cid
Nothing ->
case thisUnitIdInsts_ dflags of
Just _ ->
throwGhcException $ CmdLineError ("Use of -instantiated-with requires -this-component-id")
Nothing -> ComponentId (unitIdFS (thisPackage dflags))
thisUnitIdInsts :: DynFlags -> [(ModuleName, Module)]
thisUnitIdInsts dflags =
case thisUnitIdInsts_ dflags of
Just insts -> insts
Nothing -> []
thisPackage :: DynFlags -> UnitId
thisPackage dflags =
case thisUnitIdInsts_ dflags of
Nothing -> default_uid
Just insts
| all (\(x,y) -> mkHoleModule x == y) insts
-> newUnitId (thisComponentId dflags) insts
| otherwise
-> default_uid
where
default_uid = DefiniteUnitId (DefUnitId (thisInstalledUnitId dflags))
parseUnitIdInsts :: String -> [(ModuleName, Module)]
parseUnitIdInsts str = case filter ((=="").snd) (readP_to_S parse str) of
[(r, "")] -> r
_ -> throwGhcException $ CmdLineError ("Can't parse -instantiated-with: " ++ str)
where parse = sepBy parseEntry (R.char ',')
parseEntry = do
n <- parseModuleName
_ <- R.char '='
m <- parseModuleId
return (n, m)
setUnitIdInsts :: String -> DynFlags -> DynFlags
setUnitIdInsts s d =
d { thisUnitIdInsts_ = Just (parseUnitIdInsts s) }
setComponentId :: String -> DynFlags -> DynFlags
setComponentId s d =
d { thisComponentId_ = Just (ComponentId (fsLit s)) }
addPluginModuleName :: String -> DynFlags -> DynFlags
addPluginModuleName name d = d { pluginModNames = (mkModuleName name) : (pluginModNames d) }
addPluginModuleNameOption :: String -> DynFlags -> DynFlags
addPluginModuleNameOption optflag d = d { pluginModNameOpts = (mkModuleName m, option) : (pluginModNameOpts d) }
where (m, rest) = break (== ':') optflag
option = case rest of
[] -> "" -- should probably signal an error
(_:plug_opt) -> plug_opt -- ignore the ':' from break
addFrontendPluginOption :: String -> DynFlags -> DynFlags
addFrontendPluginOption s d = d { frontendPluginOpts = s : frontendPluginOpts d }
parseDynLibLoaderMode f d =
case splitAt 8 f of
("deploy", "") -> d { dynLibLoader = Deployable }
("sysdep", "") -> d { dynLibLoader = SystemDependent }
_ -> throwGhcException (CmdLineError ("Unknown dynlib loader: " ++ f))
setDumpPrefixForce f d = d { dumpPrefixForce = f}
-- XXX HACK: Prelude> words "'does not' work" ===> ["'does","not'","work"]
-- Config.hs should really use Option.
setPgmP f = let (pgm:args) = words f in alterSettings (\s -> s { sPgm_P = (pgm, map Option args)})
addOptl f = alterSettings (\s -> s { sOpt_l = f : sOpt_l s})
addOptc f = alterSettings (\s -> s { sOpt_c = f : sOpt_c s})
addOptP f = alterSettings (\s -> s { sOpt_P = f : sOpt_P s})
setDepMakefile :: FilePath -> DynFlags -> DynFlags
setDepMakefile f d = d { depMakefile = f }
setDepIncludePkgDeps :: Bool -> DynFlags -> DynFlags
setDepIncludePkgDeps b d = d { depIncludePkgDeps = b }
addDepExcludeMod :: String -> DynFlags -> DynFlags
addDepExcludeMod m d
= d { depExcludeMods = mkModuleName m : depExcludeMods d }
addDepSuffix :: FilePath -> DynFlags -> DynFlags
addDepSuffix s d = d { depSuffixes = s : depSuffixes d }
addCmdlineFramework f d = d { cmdlineFrameworks = f : cmdlineFrameworks d}
addHaddockOpts f d = d { haddockOptions = Just f}
addGhciScript f d = d { ghciScripts = f : ghciScripts d}
setInteractivePrint f d = d { interactivePrint = Just f}
-- -----------------------------------------------------------------------------
-- Command-line options
-- | When invoking external tools as part of the compilation pipeline, we
-- pass these a sequence of options on the command-line. Rather than
-- just using a list of Strings, we use a type that allows us to distinguish
-- between filepaths and 'other stuff'. The reason for this is that
-- this type gives us a handle on transforming filenames, and filenames only,
-- to whatever format they're expected to be on a particular platform.
data Option
= FileOption -- an entry that _contains_ filename(s) / filepaths.
String -- a non-filepath prefix that shouldn't be
-- transformed (e.g., "/out=")
String -- the filepath/filename portion
| Option String
deriving ( Eq )
showOpt :: Option -> String
showOpt (FileOption pre f) = pre ++ f
showOpt (Option s) = s
-----------------------------------------------------------------------------
-- Setting the optimisation level
updOptLevel :: Int -> DynFlags -> DynFlags
-- ^ Sets the 'DynFlags' to be appropriate to the optimisation level
updOptLevel n dfs
= dfs2{ optLevel = final_n }
where
final_n = max 0 (min 2 n) -- Clamp to 0 <= n <= 2
dfs1 = foldr (flip gopt_unset) dfs remove_gopts
dfs2 = foldr (flip gopt_set) dfs1 extra_gopts
extra_gopts = [ f | (ns,f) <- optLevelFlags, final_n `elem` ns ]
remove_gopts = [ f | (ns,f) <- optLevelFlags, final_n `notElem` ns ]
{- **********************************************************************
%* *
DynFlags parser
%* *
%********************************************************************* -}
-- -----------------------------------------------------------------------------
-- Parsing the dynamic flags.
-- | Parse dynamic flags from a list of command line arguments. Returns the
-- the parsed 'DynFlags', the left-over arguments, and a list of warnings.
-- Throws a 'UsageError' if errors occurred during parsing (such as unknown
-- flags or missing arguments).
parseDynamicFlagsCmdLine :: MonadIO m => DynFlags -> [Located String]
-> m (DynFlags, [Located String], [Located String])
-- ^ Updated 'DynFlags', left-over arguments, and
-- list of warnings.
parseDynamicFlagsCmdLine = parseDynamicFlagsFull flagsAll True
-- | Like 'parseDynamicFlagsCmdLine' but does not allow the package flags
-- (-package, -hide-package, -ignore-package, -hide-all-packages, -package-db).
-- Used to parse flags set in a modules pragma.
parseDynamicFilePragma :: MonadIO m => DynFlags -> [Located String]
-> m (DynFlags, [Located String], [Located String])
-- ^ Updated 'DynFlags', left-over arguments, and
-- list of warnings.
parseDynamicFilePragma = parseDynamicFlagsFull flagsDynamic False
-- | Parses the dynamically set flags for GHC. This is the most general form of
-- the dynamic flag parser that the other methods simply wrap. It allows
-- saying which flags are valid flags and indicating if we are parsing
-- arguments from the command line or from a file pragma.
parseDynamicFlagsFull :: MonadIO m
=> [Flag (CmdLineP DynFlags)] -- ^ valid flags to match against
-> Bool -- ^ are the arguments from the command line?
-> DynFlags -- ^ current dynamic flags
-> [Located String] -- ^ arguments to parse
-> m (DynFlags, [Located String], [Located String])
parseDynamicFlagsFull activeFlags cmdline dflags0 args = do
let ((leftover, errs, warns), dflags1)
= runCmdLine (processArgs activeFlags args) dflags0
-- See Note [Handling errors when parsing commandline flags]
unless (null errs) $ liftIO $ throwGhcExceptionIO $
errorsToGhcException . map (showPpr dflags0 . getLoc &&& unLoc) $ errs
-- check for disabled flags in safe haskell
let (dflags2, sh_warns) = safeFlagCheck cmdline dflags1
dflags3 = updateWays dflags2
theWays = ways dflags3
unless (allowed_combination theWays) $ liftIO $
throwGhcExceptionIO (CmdLineError ("combination not supported: " ++
intercalate "/" (map wayDesc theWays)))
let chooseOutput
| isJust (outputFile dflags3) -- Only iff user specified -o ...
, not (isJust (dynOutputFile dflags3)) -- but not -dyno
= return $ dflags3 { dynOutputFile = Just $ dynOut (fromJust $ outputFile dflags3) }
| otherwise
= return dflags3
where
dynOut = flip addExtension (dynObjectSuf dflags3) . dropExtension
dflags4 <- ifGeneratingDynamicToo dflags3 chooseOutput (return dflags3)
let (dflags5, consistency_warnings) = makeDynFlagsConsistent dflags4
dflags6 <- case dllSplitFile dflags5 of
Nothing -> return (dflags5 { dllSplit = Nothing })
Just f ->
case dllSplit dflags5 of
Just _ ->
-- If dllSplit is out of date then it would have
-- been set to Nothing. As it's a Just, it must be
-- up-to-date.
return dflags5
Nothing ->
do xs <- liftIO $ readFile f
let ss = map (Set.fromList . words) (lines xs)
return $ dflags5 { dllSplit = Just ss }
-- Set timer stats & heap size
when (enableTimeStats dflags6) $ liftIO enableTimingStats
case (ghcHeapSize dflags6) of
Just x -> liftIO (setHeapSize x)
_ -> return ()
liftIO $ setUnsafeGlobalDynFlags dflags6
return (dflags6, leftover, consistency_warnings ++ sh_warns ++ warns)
updateWays :: DynFlags -> DynFlags
updateWays dflags
= let theWays = sort $ nub $ ways dflags
in dflags {
ways = theWays,
buildTag = mkBuildTag (filter (not . wayRTSOnly) theWays),
rtsBuildTag = mkBuildTag theWays
}
-- | Check (and potentially disable) any extensions that aren't allowed
-- in safe mode.
--
-- The bool is to indicate if we are parsing command line flags (false means
-- file pragma). This allows us to generate better warnings.
safeFlagCheck :: Bool -> DynFlags -> (DynFlags, [Located String])
safeFlagCheck _ dflags | safeLanguageOn dflags = (dflagsUnset, warns)
where
-- Handle illegal flags under safe language.
(dflagsUnset, warns) = foldl check_method (dflags, []) unsafeFlags
check_method (df, warns) (str,loc,test,fix)
| test df = (fix df, warns ++ safeFailure (loc df) str)
| otherwise = (df, warns)
safeFailure loc str
= [L loc $ str ++ " is not allowed in Safe Haskell; ignoring "
++ str]
safeFlagCheck cmdl dflags =
case (safeInferOn dflags) of
True | safeFlags -> (dflags', warn)
True -> (dflags' { safeInferred = False }, warn)
False -> (dflags', warn)
where
-- dynflags and warn for when -fpackage-trust by itself with no safe
-- haskell flag
(dflags', warn)
| safeHaskell dflags == Sf_None && not cmdl && packageTrustOn dflags
= (gopt_unset dflags Opt_PackageTrust, pkgWarnMsg)
| otherwise = (dflags, [])
pkgWarnMsg = [L (pkgTrustOnLoc dflags') $
"-fpackage-trust ignored;" ++
" must be specified with a Safe Haskell flag"]
-- Have we inferred Unsafe? See Note [HscMain . Safe Haskell Inference]
safeFlags = all (\(_,_,t,_) -> not $ t dflags) unsafeFlagsForInfer
{- **********************************************************************
%* *
DynFlags specifications
%* *
%********************************************************************* -}
-- | All dynamic flags option strings without the deprecated ones.
-- These are the user facing strings for enabling and disabling options.
allNonDeprecatedFlags :: [String]
allNonDeprecatedFlags = allFlagsDeps False
-- | All flags with possibility to filter deprecated ones
allFlagsDeps :: Bool -> [String]
allFlagsDeps keepDeprecated = [ '-':flagName flag
| (deprecated, flag) <- flagsAllDeps
, ok (flagOptKind flag)
, keepDeprecated || not (isDeprecated deprecated)]
where ok (PrefixPred _ _) = False
ok _ = True
isDeprecated Deprecated = True
isDeprecated _ = False
{-
- Below we export user facing symbols for GHC dynamic flags for use with the
- GHC API.
-}
-- All dynamic flags present in GHC.
flagsAll :: [Flag (CmdLineP DynFlags)]
flagsAll = map snd flagsAllDeps
-- All dynamic flags present in GHC with deprecation information.
flagsAllDeps :: [(Deprecation, Flag (CmdLineP DynFlags))]
flagsAllDeps = package_flags_deps ++ dynamic_flags_deps
-- All dynamic flags, minus package flags, present in GHC.
flagsDynamic :: [Flag (CmdLineP DynFlags)]
flagsDynamic = map snd dynamic_flags_deps
-- ALl package flags present in GHC.
flagsPackage :: [Flag (CmdLineP DynFlags)]
flagsPackage = map snd package_flags_deps
----------------Helpers to make flags and keep deprecation information----------
type FlagMaker m = String -> OptKind m -> Flag m
type DynFlagMaker = FlagMaker (CmdLineP DynFlags)
data Deprecation = NotDeprecated | Deprecated deriving (Eq, Ord)
-- Make a non-deprecated flag
make_ord_flag :: DynFlagMaker -> String -> OptKind (CmdLineP DynFlags)
-> (Deprecation, Flag (CmdLineP DynFlags))
make_ord_flag fm name kind = (NotDeprecated, fm name kind)
-- Make a deprecated flag
make_dep_flag :: DynFlagMaker -> String -> OptKind (CmdLineP DynFlags) -> String
-> (Deprecation, Flag (CmdLineP DynFlags))
make_dep_flag fm name kind message = (Deprecated,
fm name $ add_dep_message kind message)
add_dep_message :: OptKind (CmdLineP DynFlags) -> String
-> OptKind (CmdLineP DynFlags)
add_dep_message (NoArg f) message = NoArg $ f >> deprecate message
add_dep_message (HasArg f) message = HasArg $ \s -> f s >> deprecate message
add_dep_message (SepArg f) message = SepArg $ \s -> f s >> deprecate message
add_dep_message (Prefix f) message = Prefix $ \s -> f s >> deprecate message
add_dep_message (OptPrefix f) message =
OptPrefix $ \s -> f s >> deprecate message
add_dep_message (OptIntSuffix f) message =
OptIntSuffix $ \oi -> f oi >> deprecate message
add_dep_message (IntSuffix f) message =
IntSuffix $ \i -> f i >> deprecate message
add_dep_message (FloatSuffix f) message =
FloatSuffix $ \fl -> f fl >> deprecate message
add_dep_message (PassFlag f) message =
PassFlag $ \s -> f s >> deprecate message
add_dep_message (AnySuffix f) message =
AnySuffix $ \s -> f s >> deprecate message
add_dep_message (PrefixPred pred f) message =
PrefixPred pred $ \s -> f s >> deprecate message
add_dep_message (AnySuffixPred pred f) message =
AnySuffixPred pred $ \s -> f s >> deprecate message
----------------------- The main flags themselves ------------------------------
-- See Note [Updating flag description in the User's Guide]
-- See Note [Supporting CLI completion]
dynamic_flags_deps :: [(Deprecation, Flag (CmdLineP DynFlags))]
dynamic_flags_deps = [
make_dep_flag defFlag "n" (NoArg $ return ())
"The -n flag is deprecated and no longer has any effect"
, make_ord_flag defFlag "cpp" (NoArg (setExtensionFlag LangExt.Cpp))
, make_ord_flag defFlag "F" (NoArg (setGeneralFlag Opt_Pp))
, (Deprecated, defFlag "#include"
(HasArg (\_s ->
addWarn ("-#include and INCLUDE pragmas are " ++
"deprecated: They no longer have any effect"))))
, make_ord_flag defFlag "v" (OptIntSuffix setVerbosity)
, make_ord_flag defGhcFlag "j" (OptIntSuffix
(\n -> case n of
Just n
| n > 0 -> upd (\d -> d { parMakeCount = Just n })
| otherwise -> addErr "Syntax: -j[n] where n > 0"
Nothing -> upd (\d -> d { parMakeCount = Nothing })))
-- When the number of parallel builds
-- is omitted, it is the same
-- as specifing that the number of
-- parallel builds is equal to the
-- result of getNumProcessors
, make_ord_flag defFlag "instantiated-with" (sepArg setUnitIdInsts)
, make_ord_flag defFlag "this-component-id" (sepArg setComponentId)
-- RTS options -------------------------------------------------------------
, make_ord_flag defFlag "H" (HasArg (\s -> upd (\d ->
d { ghcHeapSize = Just $ fromIntegral (decodeSize s)})))
, make_ord_flag defFlag "Rghc-timing" (NoArg (upd (\d ->
d { enableTimeStats = True })))
------- ways ---------------------------------------------------------------
, make_ord_flag defGhcFlag "prof" (NoArg (addWay WayProf))
, make_ord_flag defGhcFlag "eventlog" (NoArg (addWay WayEventLog))
, make_dep_flag defGhcFlag "smp"
(NoArg $ addWay WayThreaded) "Use -threaded instead"
, make_ord_flag defGhcFlag "debug" (NoArg (addWay WayDebug))
, make_ord_flag defGhcFlag "threaded" (NoArg (addWay WayThreaded))
, make_ord_flag defGhcFlag "ticky"
(NoArg (setGeneralFlag Opt_Ticky >> addWay WayDebug))
-- -ticky enables ticky-ticky code generation, and also implies -debug which
-- is required to get the RTS ticky support.
----- Linker --------------------------------------------------------
, make_ord_flag defGhcFlag "static" (NoArg removeWayDyn)
, make_ord_flag defGhcFlag "dynamic" (NoArg (addWay WayDyn))
, make_ord_flag defGhcFlag "rdynamic" $ noArg $
#ifdef linux_HOST_OS
addOptl "-rdynamic"
#elif defined (mingw32_HOST_OS)
addOptl "-Wl,--export-all-symbols"
#else
-- ignored for compat w/ gcc:
id
#endif
, make_ord_flag defGhcFlag "relative-dynlib-paths"
(NoArg (setGeneralFlag Opt_RelativeDynlibPaths))
------- Specific phases --------------------------------------------
-- need to appear before -pgmL to be parsed as LLVM flags.
, make_ord_flag defFlag "pgmlo"
(hasArg (\f -> alterSettings (\s -> s { sPgm_lo = (f,[])})))
, make_ord_flag defFlag "pgmlc"
(hasArg (\f -> alterSettings (\s -> s { sPgm_lc = (f,[])})))
, make_ord_flag defFlag "pgmi"
(hasArg (\f -> alterSettings (\s -> s { sPgm_i = f})))
, make_ord_flag defFlag "pgmL"
(hasArg (\f -> alterSettings (\s -> s { sPgm_L = f})))
, make_ord_flag defFlag "pgmP"
(hasArg setPgmP)
, make_ord_flag defFlag "pgmF"
(hasArg (\f -> alterSettings (\s -> s { sPgm_F = f})))
, make_ord_flag defFlag "pgmc"
(hasArg (\f -> alterSettings (\s -> s { sPgm_c = (f,[])})))
, make_ord_flag defFlag "pgms"
(hasArg (\f -> alterSettings (\s -> s { sPgm_s = (f,[])})))
, make_ord_flag defFlag "pgma"
(hasArg (\f -> alterSettings (\s -> s { sPgm_a = (f,[])})))
, make_ord_flag defFlag "pgml"
(hasArg (\f -> alterSettings (\s -> s { sPgm_l = (f,[])})))
, make_ord_flag defFlag "pgmdll"
(hasArg (\f -> alterSettings (\s -> s { sPgm_dll = (f,[])})))
, make_ord_flag defFlag "pgmwindres"
(hasArg (\f -> alterSettings (\s -> s { sPgm_windres = f})))
, make_ord_flag defFlag "pgmlibtool"
(hasArg (\f -> alterSettings (\s -> s { sPgm_libtool = f})))
-- need to appear before -optl/-opta to be parsed as LLVM flags.
, make_ord_flag defFlag "optlo"
(hasArg (\f -> alterSettings (\s -> s { sOpt_lo = f : sOpt_lo s})))
, make_ord_flag defFlag "optlc"
(hasArg (\f -> alterSettings (\s -> s { sOpt_lc = f : sOpt_lc s})))
, make_ord_flag defFlag "opti"
(hasArg (\f -> alterSettings (\s -> s { sOpt_i = f : sOpt_i s})))
, make_ord_flag defFlag "optL"
(hasArg (\f -> alterSettings (\s -> s { sOpt_L = f : sOpt_L s})))
, make_ord_flag defFlag "optP"
(hasArg addOptP)
, make_ord_flag defFlag "optF"
(hasArg (\f -> alterSettings (\s -> s { sOpt_F = f : sOpt_F s})))
, make_ord_flag defFlag "optc"
(hasArg addOptc)
, make_ord_flag defFlag "opta"
(hasArg (\f -> alterSettings (\s -> s { sOpt_a = f : sOpt_a s})))
, make_ord_flag defFlag "optl"
(hasArg addOptl)
, make_ord_flag defFlag "optwindres"
(hasArg (\f ->
alterSettings (\s -> s { sOpt_windres = f : sOpt_windres s})))
, make_ord_flag defGhcFlag "split-objs"
(NoArg (if can_split
then setGeneralFlag Opt_SplitObjs
else addWarn "ignoring -fsplit-objs"))
, make_ord_flag defGhcFlag "split-sections"
(noArgM (\dflags -> do
if platformHasSubsectionsViaSymbols (targetPlatform dflags)
then do addErr $
"-split-sections is not useful on this platform " ++
"since it always uses subsections via symbols."
return dflags
else return (gopt_set dflags Opt_SplitSections)))
-------- ghc -M -----------------------------------------------------
, make_ord_flag defGhcFlag "dep-suffix" (hasArg addDepSuffix)
, make_ord_flag defGhcFlag "dep-makefile" (hasArg setDepMakefile)
, make_ord_flag defGhcFlag "include-pkg-deps"
(noArg (setDepIncludePkgDeps True))
, make_ord_flag defGhcFlag "exclude-module" (hasArg addDepExcludeMod)
-------- Linking ----------------------------------------------------
, make_ord_flag defGhcFlag "no-link"
(noArg (\d -> d { ghcLink=NoLink }))
, make_ord_flag defGhcFlag "shared"
(noArg (\d -> d { ghcLink=LinkDynLib }))
, make_ord_flag defGhcFlag "staticlib"
(noArg (\d -> d { ghcLink=LinkStaticLib }))
, make_ord_flag defGhcFlag "dynload" (hasArg parseDynLibLoaderMode)
, make_ord_flag defGhcFlag "dylib-install-name" (hasArg setDylibInstallName)
-- -dll-split is an internal flag, used only during the GHC build
, make_ord_flag defHiddenFlag "dll-split"
(hasArg (\f d -> d { dllSplitFile = Just f, dllSplit = Nothing }))
------- Libraries ---------------------------------------------------
, make_ord_flag defFlag "L" (Prefix addLibraryPath)
, make_ord_flag defFlag "l" (hasArg (addLdInputs . Option . ("-l" ++)))
------- Frameworks --------------------------------------------------
-- -framework-path should really be -F ...
, make_ord_flag defFlag "framework-path" (HasArg addFrameworkPath)
, make_ord_flag defFlag "framework" (hasArg addCmdlineFramework)
------- Output Redirection ------------------------------------------
, make_ord_flag defGhcFlag "odir" (hasArg setObjectDir)
, make_ord_flag defGhcFlag "o" (sepArg (setOutputFile . Just))
, make_ord_flag defGhcFlag "dyno"
(sepArg (setDynOutputFile . Just))
, make_ord_flag defGhcFlag "ohi"
(hasArg (setOutputHi . Just ))
, make_ord_flag defGhcFlag "osuf" (hasArg setObjectSuf)
, make_ord_flag defGhcFlag "dynosuf" (hasArg setDynObjectSuf)
, make_ord_flag defGhcFlag "hcsuf" (hasArg setHcSuf)
, make_ord_flag defGhcFlag "hisuf" (hasArg setHiSuf)
, make_ord_flag defGhcFlag "dynhisuf" (hasArg setDynHiSuf)
, make_ord_flag defGhcFlag "hidir" (hasArg setHiDir)
, make_ord_flag defGhcFlag "tmpdir" (hasArg setTmpDir)
, make_ord_flag defGhcFlag "stubdir" (hasArg setStubDir)
, make_ord_flag defGhcFlag "dumpdir" (hasArg setDumpDir)
, make_ord_flag defGhcFlag "outputdir" (hasArg setOutputDir)
, make_ord_flag defGhcFlag "ddump-file-prefix"
(hasArg (setDumpPrefixForce . Just))
, make_ord_flag defGhcFlag "dynamic-too"
(NoArg (setGeneralFlag Opt_BuildDynamicToo))
------- Keeping temporary files -------------------------------------
-- These can be singular (think ghc -c) or plural (think ghc --make)
, make_ord_flag defGhcFlag "keep-hc-file"
(NoArg (setGeneralFlag Opt_KeepHcFiles))
, make_ord_flag defGhcFlag "keep-hc-files"
(NoArg (setGeneralFlag Opt_KeepHcFiles))
, make_ord_flag defGhcFlag "keep-s-file"
(NoArg (setGeneralFlag Opt_KeepSFiles))
, make_ord_flag defGhcFlag "keep-s-files"
(NoArg (setGeneralFlag Opt_KeepSFiles))
, make_ord_flag defGhcFlag "keep-llvm-file"
(NoArg $ setObjTarget HscLlvm >> setGeneralFlag Opt_KeepLlvmFiles)
, make_ord_flag defGhcFlag "keep-llvm-files"
(NoArg $ setObjTarget HscLlvm >> setGeneralFlag Opt_KeepLlvmFiles)
-- This only makes sense as plural
, make_ord_flag defGhcFlag "keep-tmp-files"
(NoArg (setGeneralFlag Opt_KeepTmpFiles))
, make_ord_flag defGhcFlag "keep-hi-file"
(NoArg (setGeneralFlag Opt_KeepHiFiles))
, make_ord_flag defGhcFlag "no-keep-hi-file"
(NoArg (unSetGeneralFlag Opt_KeepHiFiles))
, make_ord_flag defGhcFlag "keep-hi-files"
(NoArg (setGeneralFlag Opt_KeepHiFiles))
, make_ord_flag defGhcFlag "no-keep-hi-files"
(NoArg (unSetGeneralFlag Opt_KeepHiFiles))
, make_ord_flag defGhcFlag "keep-o-file"
(NoArg (setGeneralFlag Opt_KeepOFiles))
, make_ord_flag defGhcFlag "no-keep-o-file"
(NoArg (unSetGeneralFlag Opt_KeepOFiles))
, make_ord_flag defGhcFlag "keep-o-files"
(NoArg (setGeneralFlag Opt_KeepOFiles))
, make_ord_flag defGhcFlag "no-keep-o-files"
(NoArg (unSetGeneralFlag Opt_KeepOFiles))
------- Miscellaneous ----------------------------------------------
, make_ord_flag defGhcFlag "no-auto-link-packages"
(NoArg (unSetGeneralFlag Opt_AutoLinkPackages))
, make_ord_flag defGhcFlag "no-hs-main"
(NoArg (setGeneralFlag Opt_NoHsMain))
, make_ord_flag defGhcFlag "with-rtsopts"
(HasArg setRtsOpts)
, make_ord_flag defGhcFlag "rtsopts"
(NoArg (setRtsOptsEnabled RtsOptsAll))
, make_ord_flag defGhcFlag "rtsopts=all"
(NoArg (setRtsOptsEnabled RtsOptsAll))
, make_ord_flag defGhcFlag "rtsopts=some"
(NoArg (setRtsOptsEnabled RtsOptsSafeOnly))
, make_ord_flag defGhcFlag "rtsopts=none"
(NoArg (setRtsOptsEnabled RtsOptsNone))
, make_ord_flag defGhcFlag "no-rtsopts"
(NoArg (setRtsOptsEnabled RtsOptsNone))
, make_ord_flag defGhcFlag "no-rtsopts-suggestions"
(noArg (\d -> d {rtsOptsSuggestions = False}))
, make_ord_flag defGhcFlag "main-is" (SepArg setMainIs)
, make_ord_flag defGhcFlag "haddock" (NoArg (setGeneralFlag Opt_Haddock))
, make_ord_flag defGhcFlag "haddock-opts" (hasArg addHaddockOpts)
, make_ord_flag defGhcFlag "hpcdir" (SepArg setOptHpcDir)
, make_ord_flag defGhciFlag "ghci-script" (hasArg addGhciScript)
, make_ord_flag defGhciFlag "interactive-print" (hasArg setInteractivePrint)
, make_ord_flag defGhcFlag "ticky-allocd"
(NoArg (setGeneralFlag Opt_Ticky_Allocd))
, make_ord_flag defGhcFlag "ticky-LNE"
(NoArg (setGeneralFlag Opt_Ticky_LNE))
, make_ord_flag defGhcFlag "ticky-dyn-thunk"
(NoArg (setGeneralFlag Opt_Ticky_Dyn_Thunk))
------- recompilation checker --------------------------------------
, make_dep_flag defGhcFlag "recomp"
(NoArg $ unSetGeneralFlag Opt_ForceRecomp)
"Use -fno-force-recomp instead"
, make_dep_flag defGhcFlag "no-recomp"
(NoArg $ setGeneralFlag Opt_ForceRecomp) "Use -fforce-recomp instead"
, make_ord_flag defFlag "freverse-errors"
(noArg (\d -> d {reverseErrors = True} ))
, make_ord_flag defFlag "fno-reverse-errors"
(noArg (\d -> d {reverseErrors = False} ))
------ HsCpp opts ---------------------------------------------------
, make_ord_flag defFlag "D" (AnySuffix (upd . addOptP))
, make_ord_flag defFlag "U" (AnySuffix (upd . addOptP))
------- Include/Import Paths ----------------------------------------
, make_ord_flag defFlag "I" (Prefix addIncludePath)
, make_ord_flag defFlag "i" (OptPrefix addImportPath)
------ Output style options -----------------------------------------
, make_ord_flag defFlag "dppr-user-length" (intSuffix (\n d ->
d { pprUserLength = n }))
, make_ord_flag defFlag "dppr-cols" (intSuffix (\n d ->
d { pprCols = n }))
, make_ord_flag defFlag "fdiagnostics-color=auto"
(NoArg (upd (\d -> d { useColor = Auto })))
, make_ord_flag defFlag "fdiagnostics-color=always"
(NoArg (upd (\d -> d { useColor = Always })))
, make_ord_flag defFlag "fdiagnostics-color=never"
(NoArg (upd (\d -> d { useColor = Never })))
-- Suppress all that is suppressable in core dumps.
-- Except for uniques, as some simplifier phases introduce new variables that
-- have otherwise identical names.
, make_ord_flag defGhcFlag "dsuppress-all"
(NoArg $ do setGeneralFlag Opt_SuppressCoercions
setGeneralFlag Opt_SuppressVarKinds
setGeneralFlag Opt_SuppressModulePrefixes
setGeneralFlag Opt_SuppressTypeApplications
setGeneralFlag Opt_SuppressIdInfo
setGeneralFlag Opt_SuppressTypeSignatures)
------ Debugging ----------------------------------------------------
, make_ord_flag defGhcFlag "dstg-stats"
(NoArg (setGeneralFlag Opt_StgStats))
, make_ord_flag defGhcFlag "ddump-cmm"
(setDumpFlag Opt_D_dump_cmm)
, make_ord_flag defGhcFlag "ddump-cmm-from-stg"
(setDumpFlag Opt_D_dump_cmm_from_stg)
, make_ord_flag defGhcFlag "ddump-cmm-raw"
(setDumpFlag Opt_D_dump_cmm_raw)
, make_ord_flag defGhcFlag "ddump-cmm-verbose"
(setDumpFlag Opt_D_dump_cmm_verbose)
, make_ord_flag defGhcFlag "ddump-cmm-cfg"
(setDumpFlag Opt_D_dump_cmm_cfg)
, make_ord_flag defGhcFlag "ddump-cmm-cbe"
(setDumpFlag Opt_D_dump_cmm_cbe)
, make_ord_flag defGhcFlag "ddump-cmm-switch"
(setDumpFlag Opt_D_dump_cmm_switch)
, make_ord_flag defGhcFlag "ddump-cmm-proc"
(setDumpFlag Opt_D_dump_cmm_proc)
, make_ord_flag defGhcFlag "ddump-cmm-sp"
(setDumpFlag Opt_D_dump_cmm_sp)
, make_ord_flag defGhcFlag "ddump-cmm-sink"
(setDumpFlag Opt_D_dump_cmm_sink)
, make_ord_flag defGhcFlag "ddump-cmm-caf"
(setDumpFlag Opt_D_dump_cmm_caf)
, make_ord_flag defGhcFlag "ddump-cmm-procmap"
(setDumpFlag Opt_D_dump_cmm_procmap)
, make_ord_flag defGhcFlag "ddump-cmm-split"
(setDumpFlag Opt_D_dump_cmm_split)
, make_ord_flag defGhcFlag "ddump-cmm-info"
(setDumpFlag Opt_D_dump_cmm_info)
, make_ord_flag defGhcFlag "ddump-cmm-cps"
(setDumpFlag Opt_D_dump_cmm_cps)
, make_ord_flag defGhcFlag "ddump-core-stats"
(setDumpFlag Opt_D_dump_core_stats)
, make_ord_flag defGhcFlag "ddump-asm"
(setDumpFlag Opt_D_dump_asm)
, make_ord_flag defGhcFlag "ddump-asm-native"
(setDumpFlag Opt_D_dump_asm_native)
, make_ord_flag defGhcFlag "ddump-asm-liveness"
(setDumpFlag Opt_D_dump_asm_liveness)
, make_ord_flag defGhcFlag "ddump-asm-regalloc"
(setDumpFlag Opt_D_dump_asm_regalloc)
, make_ord_flag defGhcFlag "ddump-asm-conflicts"
(setDumpFlag Opt_D_dump_asm_conflicts)
, make_ord_flag defGhcFlag "ddump-asm-regalloc-stages"
(setDumpFlag Opt_D_dump_asm_regalloc_stages)
, make_ord_flag defGhcFlag "ddump-asm-stats"
(setDumpFlag Opt_D_dump_asm_stats)
, make_ord_flag defGhcFlag "ddump-asm-expanded"
(setDumpFlag Opt_D_dump_asm_expanded)
, make_ord_flag defGhcFlag "ddump-llvm"
(NoArg $ setObjTarget HscLlvm >> setDumpFlag' Opt_D_dump_llvm)
, make_ord_flag defGhcFlag "ddump-deriv"
(setDumpFlag Opt_D_dump_deriv)
, make_ord_flag defGhcFlag "ddump-ds"
(setDumpFlag Opt_D_dump_ds)
, make_ord_flag defGhcFlag "ddump-foreign"
(setDumpFlag Opt_D_dump_foreign)
, make_ord_flag defGhcFlag "ddump-inlinings"
(setDumpFlag Opt_D_dump_inlinings)
, make_ord_flag defGhcFlag "ddump-rule-firings"
(setDumpFlag Opt_D_dump_rule_firings)
, make_ord_flag defGhcFlag "ddump-rule-rewrites"
(setDumpFlag Opt_D_dump_rule_rewrites)
, make_ord_flag defGhcFlag "ddump-simpl-trace"
(setDumpFlag Opt_D_dump_simpl_trace)
, make_ord_flag defGhcFlag "ddump-occur-anal"
(setDumpFlag Opt_D_dump_occur_anal)
, make_ord_flag defGhcFlag "ddump-parsed"
(setDumpFlag Opt_D_dump_parsed)
, make_ord_flag defGhcFlag "ddump-rn"
(setDumpFlag Opt_D_dump_rn)
, make_ord_flag defGhcFlag "ddump-simpl"
(setDumpFlag Opt_D_dump_simpl)
, make_ord_flag defGhcFlag "ddump-simpl-iterations"
(setDumpFlag Opt_D_dump_simpl_iterations)
, make_ord_flag defGhcFlag "ddump-spec"
(setDumpFlag Opt_D_dump_spec)
, make_ord_flag defGhcFlag "ddump-prep"
(setDumpFlag Opt_D_dump_prep)
, make_ord_flag defGhcFlag "ddump-stg"
(setDumpFlag Opt_D_dump_stg)
, make_ord_flag defGhcFlag "ddump-call-arity"
(setDumpFlag Opt_D_dump_call_arity)
, make_ord_flag defGhcFlag "ddump-stranal"
(setDumpFlag Opt_D_dump_stranal)
, make_ord_flag defGhcFlag "ddump-str-signatures"
(setDumpFlag Opt_D_dump_str_signatures)
, make_ord_flag defGhcFlag "ddump-tc"
(setDumpFlag Opt_D_dump_tc)
, make_ord_flag defGhcFlag "ddump-types"
(setDumpFlag Opt_D_dump_types)
, make_ord_flag defGhcFlag "ddump-rules"
(setDumpFlag Opt_D_dump_rules)
, make_ord_flag defGhcFlag "ddump-cse"
(setDumpFlag Opt_D_dump_cse)
, make_ord_flag defGhcFlag "ddump-worker-wrapper"
(setDumpFlag Opt_D_dump_worker_wrapper)
, make_ord_flag defGhcFlag "ddump-rn-trace"
(setDumpFlag Opt_D_dump_rn_trace)
, make_ord_flag defGhcFlag "ddump-shape"
(setDumpFlag Opt_D_dump_shape)
, make_ord_flag defGhcFlag "ddump-if-trace"
(setDumpFlag Opt_D_dump_if_trace)
, make_ord_flag defGhcFlag "ddump-cs-trace"
(setDumpFlag Opt_D_dump_cs_trace)
, make_ord_flag defGhcFlag "ddump-tc-trace"
(NoArg (do setDumpFlag' Opt_D_dump_tc_trace
setDumpFlag' Opt_D_dump_cs_trace))
, make_ord_flag defGhcFlag "ddump-ec-trace"
(setDumpFlag Opt_D_dump_ec_trace)
, make_ord_flag defGhcFlag "ddump-vt-trace"
(setDumpFlag Opt_D_dump_vt_trace)
, make_ord_flag defGhcFlag "ddump-splices"
(setDumpFlag Opt_D_dump_splices)
, make_ord_flag defGhcFlag "dth-dec-file"
(setDumpFlag Opt_D_th_dec_file)
, make_ord_flag defGhcFlag "ddump-rn-stats"
(setDumpFlag Opt_D_dump_rn_stats)
, make_ord_flag defGhcFlag "ddump-opt-cmm"
(setDumpFlag Opt_D_dump_opt_cmm)
, make_ord_flag defGhcFlag "ddump-simpl-stats"
(setDumpFlag Opt_D_dump_simpl_stats)
, make_ord_flag defGhcFlag "ddump-bcos"
(setDumpFlag Opt_D_dump_BCOs)
, make_ord_flag defGhcFlag "dsource-stats"
(setDumpFlag Opt_D_source_stats)
, make_ord_flag defGhcFlag "dverbose-core2core"
(NoArg $ setVerbosity (Just 2) >> setVerboseCore2Core)
, make_ord_flag defGhcFlag "dverbose-stg2stg"
(setDumpFlag Opt_D_verbose_stg2stg)
, make_ord_flag defGhcFlag "ddump-hi"
(setDumpFlag Opt_D_dump_hi)
, make_ord_flag defGhcFlag "ddump-minimal-imports"
(NoArg (setGeneralFlag Opt_D_dump_minimal_imports))
, make_ord_flag defGhcFlag "ddump-vect"
(setDumpFlag Opt_D_dump_vect)
, make_ord_flag defGhcFlag "ddump-hpc"
(setDumpFlag Opt_D_dump_ticked) -- back compat
, make_ord_flag defGhcFlag "ddump-ticked"
(setDumpFlag Opt_D_dump_ticked)
, make_ord_flag defGhcFlag "ddump-mod-cycles"
(setDumpFlag Opt_D_dump_mod_cycles)
, make_ord_flag defGhcFlag "ddump-mod-map"
(setDumpFlag Opt_D_dump_mod_map)
, make_ord_flag defGhcFlag "ddump-view-pattern-commoning"
(setDumpFlag Opt_D_dump_view_pattern_commoning)
, make_ord_flag defGhcFlag "ddump-to-file"
(NoArg (setGeneralFlag Opt_DumpToFile))
, make_ord_flag defGhcFlag "ddump-hi-diffs"
(setDumpFlag Opt_D_dump_hi_diffs)
, make_ord_flag defGhcFlag "ddump-rtti"
(setDumpFlag Opt_D_dump_rtti)
, make_ord_flag defGhcFlag "dcore-lint"
(NoArg (setGeneralFlag Opt_DoCoreLinting))
, make_ord_flag defGhcFlag "dstg-lint"
(NoArg (setGeneralFlag Opt_DoStgLinting))
, make_ord_flag defGhcFlag "dcmm-lint"
(NoArg (setGeneralFlag Opt_DoCmmLinting))
, make_ord_flag defGhcFlag "dasm-lint"
(NoArg (setGeneralFlag Opt_DoAsmLinting))
, make_ord_flag defGhcFlag "dannot-lint"
(NoArg (setGeneralFlag Opt_DoAnnotationLinting))
, make_ord_flag defGhcFlag "dshow-passes"
(NoArg $ forceRecompile >> (setVerbosity $ Just 2))
, make_ord_flag defGhcFlag "dfaststring-stats"
(NoArg (setGeneralFlag Opt_D_faststring_stats))
, make_ord_flag defGhcFlag "dno-llvm-mangler"
(NoArg (setGeneralFlag Opt_NoLlvmMangler)) -- hidden flag
, make_ord_flag defGhcFlag "ddump-debug" (setDumpFlag Opt_D_dump_debug)
------ Machine dependent (-m<blah>) stuff ---------------------------
, make_ord_flag defGhcFlag "msse" (noArg (\d ->
d { sseVersion = Just SSE1 }))
, make_ord_flag defGhcFlag "msse2" (noArg (\d ->
d { sseVersion = Just SSE2 }))
, make_ord_flag defGhcFlag "msse3" (noArg (\d ->
d { sseVersion = Just SSE3 }))
, make_ord_flag defGhcFlag "msse4" (noArg (\d ->
d { sseVersion = Just SSE4 }))
, make_ord_flag defGhcFlag "msse4.2" (noArg (\d ->
d { sseVersion = Just SSE42 }))
, make_ord_flag defGhcFlag "mavx" (noArg (\d -> d { avx = True }))
, make_ord_flag defGhcFlag "mavx2" (noArg (\d -> d { avx2 = True }))
, make_ord_flag defGhcFlag "mavx512cd" (noArg (\d ->
d { avx512cd = True }))
, make_ord_flag defGhcFlag "mavx512er" (noArg (\d ->
d { avx512er = True }))
, make_ord_flag defGhcFlag "mavx512f" (noArg (\d -> d { avx512f = True }))
, make_ord_flag defGhcFlag "mavx512pf" (noArg (\d ->
d { avx512pf = True }))
------ Warning opts -------------------------------------------------
, make_ord_flag defFlag "W" (NoArg (mapM_ setWarningFlag minusWOpts))
, make_ord_flag defFlag "Werror"
(NoArg (do { setGeneralFlag Opt_WarnIsError
; mapM_ setFatalWarningFlag minusWeverythingOpts }))
, make_ord_flag defFlag "Wwarn"
(NoArg (do { unSetGeneralFlag Opt_WarnIsError
; mapM_ unSetFatalWarningFlag minusWeverythingOpts }))
-- Opt_WarnIsError is still needed to pass -Werror
-- to CPP; see runCpp in SysTools
, make_dep_flag defFlag "Wnot" (NoArg (upd (\d ->
d {warningFlags = IntSet.empty})))
"Use -w or -Wno-everything instead"
, make_ord_flag defFlag "w" (NoArg (upd (\d ->
d {warningFlags = IntSet.empty})))
-- New-style uniform warning sets
--
-- Note that -Weverything > -Wall > -Wextra > -Wdefault > -Wno-everything
, make_ord_flag defFlag "Weverything" (NoArg (mapM_
setWarningFlag minusWeverythingOpts))
, make_ord_flag defFlag "Wno-everything"
(NoArg (upd (\d -> d {warningFlags = IntSet.empty})))
, make_ord_flag defFlag "Wall" (NoArg (mapM_
setWarningFlag minusWallOpts))
, make_ord_flag defFlag "Wno-all" (NoArg (mapM_
unSetWarningFlag minusWallOpts))
, make_ord_flag defFlag "Wextra" (NoArg (mapM_
setWarningFlag minusWOpts))
, make_ord_flag defFlag "Wno-extra" (NoArg (mapM_
unSetWarningFlag minusWOpts))
, make_ord_flag defFlag "Wdefault" (NoArg (mapM_
setWarningFlag standardWarnings))
, make_ord_flag defFlag "Wno-default" (NoArg (mapM_
unSetWarningFlag standardWarnings))
, make_ord_flag defFlag "Wcompat" (NoArg (mapM_
setWarningFlag minusWcompatOpts))
, make_ord_flag defFlag "Wno-compat" (NoArg (mapM_
unSetWarningFlag minusWcompatOpts))
------ Plugin flags ------------------------------------------------
, make_ord_flag defGhcFlag "fplugin-opt" (hasArg addPluginModuleNameOption)
, make_ord_flag defGhcFlag "fplugin" (hasArg addPluginModuleName)
, make_ord_flag defGhcFlag "ffrontend-opt" (hasArg addFrontendPluginOption)
------ Optimisation flags ------------------------------------------
, make_ord_flag defGhcFlag "O" (noArgM (setOptLevel 1))
, make_dep_flag defGhcFlag "Onot" (noArgM $ setOptLevel 0 )
"Use -O0 instead"
, make_ord_flag defGhcFlag "Odph" (noArgM setDPHOpt)
, make_ord_flag defGhcFlag "O" (optIntSuffixM (\mb_n ->
setOptLevel (mb_n `orElse` 1)))
-- If the number is missing, use 1
, make_ord_flag defFlag "fmax-relevant-binds"
(intSuffix (\n d -> d { maxRelevantBinds = Just n }))
, make_ord_flag defFlag "fno-max-relevant-binds"
(noArg (\d -> d { maxRelevantBinds = Nothing }))
, make_ord_flag defFlag "fmax-uncovered-patterns"
(intSuffix (\n d -> d { maxUncoveredPatterns = n }))
, make_ord_flag defFlag "fsimplifier-phases"
(intSuffix (\n d -> d { simplPhases = n }))
, make_ord_flag defFlag "fmax-simplifier-iterations"
(intSuffix (\n d -> d { maxSimplIterations = n }))
, make_ord_flag defFlag "fmax-pmcheck-iterations"
(intSuffix (\n d -> d{ maxPmCheckIterations = n }))
, make_ord_flag defFlag "fsimpl-tick-factor"
(intSuffix (\n d -> d { simplTickFactor = n }))
, make_ord_flag defFlag "fspec-constr-threshold"
(intSuffix (\n d -> d { specConstrThreshold = Just n }))
, make_ord_flag defFlag "fno-spec-constr-threshold"
(noArg (\d -> d { specConstrThreshold = Nothing }))
, make_ord_flag defFlag "fspec-constr-count"
(intSuffix (\n d -> d { specConstrCount = Just n }))
, make_ord_flag defFlag "fno-spec-constr-count"
(noArg (\d -> d { specConstrCount = Nothing }))
, make_ord_flag defFlag "fspec-constr-recursive"
(intSuffix (\n d -> d { specConstrRecursive = n }))
, make_ord_flag defFlag "fliberate-case-threshold"
(intSuffix (\n d -> d { liberateCaseThreshold = Just n }))
, make_ord_flag defFlag "fno-liberate-case-threshold"
(noArg (\d -> d { liberateCaseThreshold = Nothing }))
, make_ord_flag defFlag "frule-check"
(sepArg (\s d -> d { ruleCheck = Just s }))
, make_ord_flag defFlag "freduction-depth"
(intSuffix (\n d -> d { reductionDepth = treatZeroAsInf n }))
, make_ord_flag defFlag "fconstraint-solver-iterations"
(intSuffix (\n d -> d { solverIterations = treatZeroAsInf n }))
, (Deprecated, defFlag "fcontext-stack"
(intSuffixM (\n d ->
do { deprecate $ "use -freduction-depth=" ++ show n ++ " instead"
; return $ d { reductionDepth = treatZeroAsInf n } })))
, (Deprecated, defFlag "ftype-function-depth"
(intSuffixM (\n d ->
do { deprecate $ "use -freduction-depth=" ++ show n ++ " instead"
; return $ d { reductionDepth = treatZeroAsInf n } })))
, make_ord_flag defFlag "fstrictness-before"
(intSuffix (\n d -> d { strictnessBefore = n : strictnessBefore d }))
, make_ord_flag defFlag "ffloat-lam-args"
(intSuffix (\n d -> d { floatLamArgs = Just n }))
, make_ord_flag defFlag "ffloat-all-lams"
(noArg (\d -> d { floatLamArgs = Nothing }))
, make_ord_flag defFlag "fhistory-size"
(intSuffix (\n d -> d { historySize = n }))
, make_ord_flag defFlag "funfolding-creation-threshold"
(intSuffix (\n d -> d {ufCreationThreshold = n}))
, make_ord_flag defFlag "funfolding-use-threshold"
(intSuffix (\n d -> d {ufUseThreshold = n}))
, make_ord_flag defFlag "funfolding-fun-discount"
(intSuffix (\n d -> d {ufFunAppDiscount = n}))
, make_ord_flag defFlag "funfolding-dict-discount"
(intSuffix (\n d -> d {ufDictDiscount = n}))
, make_ord_flag defFlag "funfolding-keeness-factor"
(floatSuffix (\n d -> d {ufKeenessFactor = n}))
, make_ord_flag defFlag "fmax-worker-args"
(intSuffix (\n d -> d {maxWorkerArgs = n}))
, make_ord_flag defGhciFlag "fghci-hist-size"
(intSuffix (\n d -> d {ghciHistSize = n}))
, make_ord_flag defGhcFlag "fmax-inline-alloc-size"
(intSuffix (\n d -> d { maxInlineAllocSize = n }))
, make_ord_flag defGhcFlag "fmax-inline-memcpy-insns"
(intSuffix (\n d -> d { maxInlineMemcpyInsns = n }))
, make_ord_flag defGhcFlag "fmax-inline-memset-insns"
(intSuffix (\n d -> d { maxInlineMemsetInsns = n }))
, make_ord_flag defGhcFlag "dinitial-unique"
(intSuffix (\n d -> d { initialUnique = n }))
, make_ord_flag defGhcFlag "dunique-increment"
(intSuffix (\n d -> d { uniqueIncrement = n }))
------ Profiling ----------------------------------------------------
-- OLD profiling flags
, make_dep_flag defGhcFlag "auto-all"
(noArg (\d -> d { profAuto = ProfAutoAll } ))
"Use -fprof-auto instead"
, make_dep_flag defGhcFlag "no-auto-all"
(noArg (\d -> d { profAuto = NoProfAuto } ))
"Use -fno-prof-auto instead"
, make_dep_flag defGhcFlag "auto"
(noArg (\d -> d { profAuto = ProfAutoExports } ))
"Use -fprof-auto-exported instead"
, make_dep_flag defGhcFlag "no-auto"
(noArg (\d -> d { profAuto = NoProfAuto } ))
"Use -fno-prof-auto instead"
, make_dep_flag defGhcFlag "caf-all"
(NoArg (setGeneralFlag Opt_AutoSccsOnIndividualCafs))
"Use -fprof-cafs instead"
, make_dep_flag defGhcFlag "no-caf-all"
(NoArg (unSetGeneralFlag Opt_AutoSccsOnIndividualCafs))
"Use -fno-prof-cafs instead"
-- NEW profiling flags
, make_ord_flag defGhcFlag "fprof-auto"
(noArg (\d -> d { profAuto = ProfAutoAll } ))
, make_ord_flag defGhcFlag "fprof-auto-top"
(noArg (\d -> d { profAuto = ProfAutoTop } ))
, make_ord_flag defGhcFlag "fprof-auto-exported"
(noArg (\d -> d { profAuto = ProfAutoExports } ))
, make_ord_flag defGhcFlag "fprof-auto-calls"
(noArg (\d -> d { profAuto = ProfAutoCalls } ))
, make_ord_flag defGhcFlag "fno-prof-auto"
(noArg (\d -> d { profAuto = NoProfAuto } ))
------ Compiler flags -----------------------------------------------
, make_ord_flag defGhcFlag "fasm" (NoArg (setObjTarget HscAsm))
, make_ord_flag defGhcFlag "fvia-c" (NoArg
(addWarn $ "The -fvia-c flag does nothing; " ++
"it will be removed in a future GHC release"))
, make_ord_flag defGhcFlag "fvia-C" (NoArg
(addWarn $ "The -fvia-C flag does nothing; " ++
"it will be removed in a future GHC release"))
, make_ord_flag defGhcFlag "fllvm" (NoArg (setObjTarget HscLlvm))
, make_ord_flag defFlag "fno-code" (NoArg ((upd $ \d ->
d { ghcLink=NoLink }) >> setTarget HscNothing))
, make_ord_flag defFlag "fbyte-code" (NoArg (setTarget HscInterpreted))
, make_ord_flag defFlag "fobject-code" (NoArg (setTargetWithPlatform
defaultHscTarget))
, make_dep_flag defFlag "fglasgow-exts"
(NoArg enableGlasgowExts) "Use individual extensions instead"
, make_dep_flag defFlag "fno-glasgow-exts"
(NoArg disableGlasgowExts) "Use individual extensions instead"
, make_ord_flag defFlag "Wunused-binds" (NoArg enableUnusedBinds)
, make_ord_flag defFlag "Wno-unused-binds" (NoArg disableUnusedBinds)
, make_ord_flag defHiddenFlag "fwarn-unused-binds" (NoArg enableUnusedBinds)
, make_ord_flag defHiddenFlag "fno-warn-unused-binds" (NoArg
disableUnusedBinds)
------ Safe Haskell flags -------------------------------------------
, make_ord_flag defFlag "fpackage-trust" (NoArg setPackageTrust)
, make_ord_flag defFlag "fno-safe-infer" (noArg (\d ->
d { safeInfer = False }))
, make_ord_flag defGhcFlag "fPIC" (NoArg (setGeneralFlag Opt_PIC))
, make_ord_flag defGhcFlag "fno-PIC" (NoArg (unSetGeneralFlag Opt_PIC))
------ Debugging flags ----------------------------------------------
, make_ord_flag defGhcFlag "g" (OptIntSuffix setDebugLevel)
]
++ map (mkFlag turnOn "" setGeneralFlag ) negatableFlagsDeps
++ map (mkFlag turnOff "no-" unSetGeneralFlag ) negatableFlagsDeps
++ map (mkFlag turnOn "d" setGeneralFlag ) dFlagsDeps
++ map (mkFlag turnOff "dno-" unSetGeneralFlag ) dFlagsDeps
++ map (mkFlag turnOn "f" setGeneralFlag ) fFlagsDeps
++ map (mkFlag turnOff "fno-" unSetGeneralFlag ) fFlagsDeps
++ map (mkFlag turnOn "W" setWarningFlag ) wWarningFlagsDeps
++ map (mkFlag turnOff "Wno-" unSetWarningFlag ) wWarningFlagsDeps
++ map (mkFlag turnOn "Werror=" (\flag -> do {
; setWarningFlag flag
; setFatalWarningFlag flag }))
wWarningFlagsDeps
++ map (mkFlag turnOn "Wwarn=" unSetFatalWarningFlag )
wWarningFlagsDeps
++ map (mkFlag turnOn "Wno-error=" unSetFatalWarningFlag )
wWarningFlagsDeps
++ map (mkFlag turnOn "fwarn-" setWarningFlag . hideFlag)
wWarningFlagsDeps
++ map (mkFlag turnOff "fno-warn-" unSetWarningFlag . hideFlag)
wWarningFlagsDeps
++ [ (NotDeprecated, unrecognisedWarning "W"),
(Deprecated, unrecognisedWarning "fwarn-"),
(Deprecated, unrecognisedWarning "fno-warn-") ]
++ map (mkFlag turnOn "f" setExtensionFlag ) fLangFlagsDeps
++ map (mkFlag turnOff "fno-" unSetExtensionFlag) fLangFlagsDeps
++ map (mkFlag turnOn "X" setExtensionFlag ) xFlagsDeps
++ map (mkFlag turnOff "XNo" unSetExtensionFlag) xFlagsDeps
++ map (mkFlag turnOn "X" setLanguage ) languageFlagsDeps
++ map (mkFlag turnOn "X" setSafeHaskell ) safeHaskellFlagsDeps
++ [ make_dep_flag defFlag "XGenerics"
(NoArg $ return ())
("it does nothing; look into -XDefaultSignatures " ++
"and -XDeriveGeneric for generic programming support.")
, make_dep_flag defFlag "XNoGenerics"
(NoArg $ return ())
("it does nothing; look into -XDefaultSignatures and " ++
"-XDeriveGeneric for generic programming support.") ]
-- | This is where we handle unrecognised warning flags. We only issue a warning
-- if -Wunrecognised-warning-flags is set. See Trac #11429 for context.
unrecognisedWarning :: String -> Flag (CmdLineP DynFlags)
unrecognisedWarning prefix = defHiddenFlag prefix (Prefix action)
where
action :: String -> EwM (CmdLineP DynFlags) ()
action flag = do
f <- wopt Opt_WarnUnrecognisedWarningFlags <$> liftEwM getCmdLineState
when f $ addWarn $ "unrecognised warning flag: -" ++ prefix ++ flag
-- See Note [Supporting CLI completion]
package_flags_deps :: [(Deprecation, Flag (CmdLineP DynFlags))]
package_flags_deps = [
------- Packages ----------------------------------------------------
make_ord_flag defFlag "package-db"
(HasArg (addPkgConfRef . PkgConfFile))
, make_ord_flag defFlag "clear-package-db" (NoArg clearPkgConf)
, make_ord_flag defFlag "no-global-package-db" (NoArg removeGlobalPkgConf)
, make_ord_flag defFlag "no-user-package-db" (NoArg removeUserPkgConf)
, make_ord_flag defFlag "global-package-db"
(NoArg (addPkgConfRef GlobalPkgConf))
, make_ord_flag defFlag "user-package-db"
(NoArg (addPkgConfRef UserPkgConf))
-- backwards compat with GHC<=7.4 :
, make_dep_flag defFlag "package-conf"
(HasArg $ addPkgConfRef . PkgConfFile) "Use -package-db instead"
, make_dep_flag defFlag "no-user-package-conf"
(NoArg removeUserPkgConf) "Use -no-user-package-db instead"
, make_ord_flag defGhcFlag "package-name" (HasArg $ \name -> do
upd (setUnitId name))
-- TODO: Since we JUST deprecated
-- -this-package-key, let's keep this
-- undeprecated for another cycle.
-- Deprecate this eventually.
-- deprecate "Use -this-unit-id instead")
, make_dep_flag defGhcFlag "this-package-key" (HasArg $ upd . setUnitId)
"Use -this-unit-id instead"
, make_ord_flag defGhcFlag "this-unit-id" (hasArg setUnitId)
, make_ord_flag defFlag "package" (HasArg exposePackage)
, make_ord_flag defFlag "plugin-package-id" (HasArg exposePluginPackageId)
, make_ord_flag defFlag "plugin-package" (HasArg exposePluginPackage)
, make_ord_flag defFlag "package-id" (HasArg exposePackageId)
, make_ord_flag defFlag "hide-package" (HasArg hidePackage)
, make_ord_flag defFlag "hide-all-packages"
(NoArg (setGeneralFlag Opt_HideAllPackages))
, make_ord_flag defFlag "hide-all-plugin-packages"
(NoArg (setGeneralFlag Opt_HideAllPluginPackages))
, make_ord_flag defFlag "package-env" (HasArg setPackageEnv)
, make_ord_flag defFlag "ignore-package" (HasArg ignorePackage)
, make_dep_flag defFlag "syslib" (HasArg exposePackage) "Use -package instead"
, make_ord_flag defFlag "distrust-all-packages"
(NoArg (setGeneralFlag Opt_DistrustAllPackages))
, make_ord_flag defFlag "trust" (HasArg trustPackage)
, make_ord_flag defFlag "distrust" (HasArg distrustPackage)
]
where
setPackageEnv env = upd $ \s -> s { packageEnv = Just env }
-- | Make a list of flags for shell completion.
-- Filter all available flags into two groups, for interactive GHC vs all other.
flagsForCompletion :: Bool -> [String]
flagsForCompletion isInteractive
= [ '-':flagName flag
| flag <- flagsAll
, modeFilter (flagGhcMode flag)
]
where
modeFilter AllModes = True
modeFilter OnlyGhci = isInteractive
modeFilter OnlyGhc = not isInteractive
modeFilter HiddenFlag = False
type TurnOnFlag = Bool -- True <=> we are turning the flag on
-- False <=> we are turning the flag off
turnOn :: TurnOnFlag; turnOn = True
turnOff :: TurnOnFlag; turnOff = False
data FlagSpec flag
= FlagSpec
{ flagSpecName :: String -- ^ Flag in string form
, flagSpecFlag :: flag -- ^ Flag in internal form
, flagSpecAction :: (TurnOnFlag -> DynP ())
-- ^ Extra action to run when the flag is found
-- Typically, emit a warning or error
, flagSpecGhcMode :: GhcFlagMode
-- ^ In which ghc mode the flag has effect
}
-- | Define a new flag.
flagSpec :: String -> flag -> (Deprecation, FlagSpec flag)
flagSpec name flag = flagSpec' name flag nop
-- | Define a new flag with an effect.
flagSpec' :: String -> flag -> (TurnOnFlag -> DynP ())
-> (Deprecation, FlagSpec flag)
flagSpec' name flag act = (NotDeprecated, FlagSpec name flag act AllModes)
-- | Define a new deprecated flag with an effect.
depFlagSpecOp :: String -> flag -> (TurnOnFlag -> DynP ()) -> String
-> (Deprecation, FlagSpec flag)
depFlagSpecOp name flag act dep =
(Deprecated, snd (flagSpec' name flag (\f -> act f >> deprecate dep)))
-- | Define a new deprecated flag.
depFlagSpec :: String -> flag -> String
-> (Deprecation, FlagSpec flag)
depFlagSpec name flag dep = depFlagSpecOp name flag nop dep
-- | Define a new deprecated flag with an effect where the deprecation message
-- depends on the flag value
depFlagSpecOp' :: String
-> flag
-> (TurnOnFlag -> DynP ())
-> (TurnOnFlag -> String)
-> (Deprecation, FlagSpec flag)
depFlagSpecOp' name flag act dep =
(Deprecated, FlagSpec name flag (\f -> act f >> (deprecate $ dep f))
AllModes)
-- | Define a new deprecated flag where the deprecation message
-- depends on the flag value
depFlagSpec' :: String
-> flag
-> (TurnOnFlag -> String)
-> (Deprecation, FlagSpec flag)
depFlagSpec' name flag dep = depFlagSpecOp' name flag nop dep
-- | Define a new deprecated flag where the deprecation message
-- is shown depending on the flag value
depFlagSpecCond :: String
-> flag
-> (TurnOnFlag -> Bool)
-> String
-> (Deprecation, FlagSpec flag)
depFlagSpecCond name flag cond dep =
(Deprecated, FlagSpec name flag (\f -> when (cond f) $ deprecate dep)
AllModes)
-- | Define a new flag for GHCi.
flagGhciSpec :: String -> flag -> (Deprecation, FlagSpec flag)
flagGhciSpec name flag = flagGhciSpec' name flag nop
-- | Define a new flag for GHCi with an effect.
flagGhciSpec' :: String -> flag -> (TurnOnFlag -> DynP ())
-> (Deprecation, FlagSpec flag)
flagGhciSpec' name flag act = (NotDeprecated, FlagSpec name flag act OnlyGhci)
-- | Define a new flag invisible to CLI completion.
flagHiddenSpec :: String -> flag -> (Deprecation, FlagSpec flag)
flagHiddenSpec name flag = flagHiddenSpec' name flag nop
-- | Define a new flag invisible to CLI completion with an effect.
flagHiddenSpec' :: String -> flag -> (TurnOnFlag -> DynP ())
-> (Deprecation, FlagSpec flag)
flagHiddenSpec' name flag act = (NotDeprecated, FlagSpec name flag act
HiddenFlag)
-- | Hide a 'FlagSpec' from being displayed in @--show-options@.
--
-- This is for example useful for flags that are obsolete, but should not
-- (yet) be deprecated for compatibility reasons.
hideFlag :: (Deprecation, FlagSpec a) -> (Deprecation, FlagSpec a)
hideFlag (dep, fs) = (dep, fs { flagSpecGhcMode = HiddenFlag })
mkFlag :: TurnOnFlag -- ^ True <=> it should be turned on
-> String -- ^ The flag prefix
-> (flag -> DynP ()) -- ^ What to do when the flag is found
-> (Deprecation, FlagSpec flag) -- ^ Specification of
-- this particular flag
-> (Deprecation, Flag (CmdLineP DynFlags))
mkFlag turn_on flagPrefix f (dep, (FlagSpec name flag extra_action mode))
= (dep,
Flag (flagPrefix ++ name) (NoArg (f flag >> extra_action turn_on)) mode)
deprecatedForExtension :: String -> TurnOnFlag -> String
deprecatedForExtension lang turn_on
= "use -X" ++ flag ++
" or pragma {-# LANGUAGE " ++ flag ++ " #-} instead"
where
flag | turn_on = lang
| otherwise = "No" ++ lang
useInstead :: String -> TurnOnFlag -> String
useInstead flag turn_on
= "Use -f" ++ no ++ flag ++ " instead"
where
no = if turn_on then "" else "no-"
nop :: TurnOnFlag -> DynP ()
nop _ = return ()
-- | Find the 'FlagSpec' for a 'WarningFlag'.
flagSpecOf :: WarningFlag -> Maybe (FlagSpec WarningFlag)
flagSpecOf flag = listToMaybe $ filter check wWarningFlags
where
check fs = flagSpecFlag fs == flag
-- | These @-W\<blah\>@ flags can all be reversed with @-Wno-\<blah\>@
wWarningFlags :: [FlagSpec WarningFlag]
wWarningFlags = map snd (sortBy (comparing fst) wWarningFlagsDeps)
wWarningFlagsDeps :: [(Deprecation, FlagSpec WarningFlag)]
wWarningFlagsDeps = [
-- See Note [Updating flag description in the User's Guide]
-- See Note [Supporting CLI completion]
-- Please keep the list of flags below sorted alphabetically
flagSpec "alternative-layout-rule-transitional"
Opt_WarnAlternativeLayoutRuleTransitional,
depFlagSpec "amp" Opt_WarnAMP
"it has no effect",
depFlagSpec "auto-orphans" Opt_WarnAutoOrphans
"it has no effect",
flagSpec "cpp-undef" Opt_WarnCPPUndef,
flagSpec "deferred-type-errors" Opt_WarnDeferredTypeErrors,
flagSpec "deferred-out-of-scope-variables"
Opt_WarnDeferredOutOfScopeVariables,
flagSpec "deprecations" Opt_WarnWarningsDeprecations,
flagSpec "deprecated-flags" Opt_WarnDeprecatedFlags,
flagSpec "deriving-typeable" Opt_WarnDerivingTypeable,
flagSpec "dodgy-exports" Opt_WarnDodgyExports,
flagSpec "dodgy-foreign-imports" Opt_WarnDodgyForeignImports,
flagSpec "dodgy-imports" Opt_WarnDodgyImports,
flagSpec "empty-enumerations" Opt_WarnEmptyEnumerations,
depFlagSpec "context-quantification" Opt_WarnContextQuantification
"it is subsumed by an error message that cannot be disabled",
depFlagSpec "duplicate-constraints" Opt_WarnDuplicateConstraints
"it is subsumed by -Wredundant-constraints",
flagSpec "redundant-constraints" Opt_WarnRedundantConstraints,
flagSpec "duplicate-exports" Opt_WarnDuplicateExports,
flagSpec "hi-shadowing" Opt_WarnHiShadows,
flagSpec "implicit-prelude" Opt_WarnImplicitPrelude,
flagSpec "incomplete-patterns" Opt_WarnIncompletePatterns,
flagSpec "incomplete-record-updates" Opt_WarnIncompletePatternsRecUpd,
flagSpec "incomplete-uni-patterns" Opt_WarnIncompleteUniPatterns,
flagSpec "inline-rule-shadowing" Opt_WarnInlineRuleShadowing,
flagSpec "identities" Opt_WarnIdentities,
flagSpec "missing-fields" Opt_WarnMissingFields,
flagSpec "missing-import-lists" Opt_WarnMissingImportList,
depFlagSpec "missing-local-sigs" Opt_WarnMissingLocalSignatures
"it is replaced by -Wmissing-local-signatures",
flagSpec "missing-local-signatures" Opt_WarnMissingLocalSignatures,
flagSpec "missing-methods" Opt_WarnMissingMethods,
flagSpec "missing-monadfail-instances" Opt_WarnMissingMonadFailInstances,
flagSpec "semigroup" Opt_WarnSemigroup,
flagSpec "missing-signatures" Opt_WarnMissingSignatures,
depFlagSpec "missing-exported-sigs" Opt_WarnMissingExportedSignatures
"it is replaced by -Wmissing-exported-signatures",
flagSpec "missing-exported-signatures" Opt_WarnMissingExportedSignatures,
flagSpec "monomorphism-restriction" Opt_WarnMonomorphism,
flagSpec "name-shadowing" Opt_WarnNameShadowing,
flagSpec "noncanonical-monad-instances"
Opt_WarnNonCanonicalMonadInstances,
flagSpec "noncanonical-monadfail-instances"
Opt_WarnNonCanonicalMonadFailInstances,
flagSpec "noncanonical-monoid-instances"
Opt_WarnNonCanonicalMonoidInstances,
flagSpec "orphans" Opt_WarnOrphans,
flagSpec "overflowed-literals" Opt_WarnOverflowedLiterals,
flagSpec "overlapping-patterns" Opt_WarnOverlappingPatterns,
flagSpec "missed-specialisations" Opt_WarnMissedSpecs,
flagSpec "missed-specializations" Opt_WarnMissedSpecs,
flagSpec "all-missed-specialisations" Opt_WarnAllMissedSpecs,
flagSpec "all-missed-specializations" Opt_WarnAllMissedSpecs,
flagSpec' "safe" Opt_WarnSafe setWarnSafe,
flagSpec "trustworthy-safe" Opt_WarnTrustworthySafe,
flagSpec "tabs" Opt_WarnTabs,
flagSpec "type-defaults" Opt_WarnTypeDefaults,
flagSpec "typed-holes" Opt_WarnTypedHoles,
flagSpec "partial-type-signatures" Opt_WarnPartialTypeSignatures,
flagSpec "unrecognised-pragmas" Opt_WarnUnrecognisedPragmas,
flagSpec' "unsafe" Opt_WarnUnsafe setWarnUnsafe,
flagSpec "unsupported-calling-conventions"
Opt_WarnUnsupportedCallingConventions,
flagSpec "unsupported-llvm-version" Opt_WarnUnsupportedLlvmVersion,
flagSpec "unticked-promoted-constructors"
Opt_WarnUntickedPromotedConstructors,
flagSpec "unused-do-bind" Opt_WarnUnusedDoBind,
flagSpec "unused-foralls" Opt_WarnUnusedForalls,
flagSpec "unused-imports" Opt_WarnUnusedImports,
flagSpec "unused-local-binds" Opt_WarnUnusedLocalBinds,
flagSpec "unused-matches" Opt_WarnUnusedMatches,
flagSpec "unused-pattern-binds" Opt_WarnUnusedPatternBinds,
flagSpec "unused-top-binds" Opt_WarnUnusedTopBinds,
flagSpec "unused-type-patterns" Opt_WarnUnusedTypePatterns,
flagSpec "warnings-deprecations" Opt_WarnWarningsDeprecations,
flagSpec "wrong-do-bind" Opt_WarnWrongDoBind,
flagSpec "missing-pattern-synonym-signatures"
Opt_WarnMissingPatternSynonymSignatures,
flagSpec "simplifiable-class-constraints" Opt_WarnSimplifiableClassConstraints,
flagSpec "unrecognised-warning-flags" Opt_WarnUnrecognisedWarningFlags ]
-- | These @-\<blah\>@ flags can all be reversed with @-no-\<blah\>@
negatableFlagsDeps :: [(Deprecation, FlagSpec GeneralFlag)]
negatableFlagsDeps = [
flagGhciSpec "ignore-dot-ghci" Opt_IgnoreDotGhci ]
-- | These @-d\<blah\>@ flags can all be reversed with @-dno-\<blah\>@
dFlagsDeps :: [(Deprecation, FlagSpec GeneralFlag)]
dFlagsDeps = [
-- See Note [Updating flag description in the User's Guide]
-- See Note [Supporting CLI completion]
-- Please keep the list of flags below sorted alphabetically
flagSpec "ppr-case-as-let" Opt_PprCaseAsLet,
flagSpec "ppr-ticks" Opt_PprShowTicks,
flagSpec "suppress-coercions" Opt_SuppressCoercions,
flagSpec "suppress-idinfo" Opt_SuppressIdInfo,
flagSpec "suppress-unfoldings" Opt_SuppressUnfoldings,
flagSpec "suppress-module-prefixes" Opt_SuppressModulePrefixes,
flagSpec "suppress-type-applications" Opt_SuppressTypeApplications,
flagSpec "suppress-type-signatures" Opt_SuppressTypeSignatures,
flagSpec "suppress-uniques" Opt_SuppressUniques,
flagSpec "suppress-var-kinds" Opt_SuppressVarKinds]
-- | These @-f\<blah\>@ flags can all be reversed with @-fno-\<blah\>@
fFlags :: [FlagSpec GeneralFlag]
fFlags = map snd fFlagsDeps
fFlagsDeps :: [(Deprecation, FlagSpec GeneralFlag)]
fFlagsDeps = [
-- See Note [Updating flag description in the User's Guide]
-- See Note [Supporting CLI completion]
-- Please keep the list of flags below sorted alphabetically
flagGhciSpec "break-on-error" Opt_BreakOnError,
flagGhciSpec "break-on-exception" Opt_BreakOnException,
flagSpec "building-cabal-package" Opt_BuildingCabalPackage,
flagSpec "call-arity" Opt_CallArity,
flagSpec "case-merge" Opt_CaseMerge,
flagSpec "cmm-elim-common-blocks" Opt_CmmElimCommonBlocks,
flagSpec "cmm-sink" Opt_CmmSink,
flagSpec "cse" Opt_CSE,
flagSpec "cpr-anal" Opt_CprAnal,
flagSpec "defer-type-errors" Opt_DeferTypeErrors,
flagSpec "defer-typed-holes" Opt_DeferTypedHoles,
flagSpec "defer-out-of-scope-variables" Opt_DeferOutOfScopeVariables,
flagSpec "dicts-cheap" Opt_DictsCheap,
flagSpec "dicts-strict" Opt_DictsStrict,
flagSpec "dmd-tx-dict-sel" Opt_DmdTxDictSel,
flagSpec "do-eta-reduction" Opt_DoEtaReduction,
flagSpec "do-lambda-eta-expansion" Opt_DoLambdaEtaExpansion,
flagSpec "eager-blackholing" Opt_EagerBlackHoling,
flagSpec "embed-manifest" Opt_EmbedManifest,
flagSpec "enable-rewrite-rules" Opt_EnableRewriteRules,
flagSpec "error-spans" Opt_ErrorSpans,
flagSpec "excess-precision" Opt_ExcessPrecision,
flagSpec "expose-all-unfoldings" Opt_ExposeAllUnfoldings,
flagSpec "external-interpreter" Opt_ExternalInterpreter,
flagSpec "flat-cache" Opt_FlatCache,
flagSpec "float-in" Opt_FloatIn,
flagSpec "force-recomp" Opt_ForceRecomp,
flagSpec "full-laziness" Opt_FullLaziness,
flagSpec "fun-to-thunk" Opt_FunToThunk,
flagSpec "gen-manifest" Opt_GenManifest,
flagSpec "ghci-history" Opt_GhciHistory,
flagGhciSpec "local-ghci-history" Opt_LocalGhciHistory,
flagSpec "ghci-sandbox" Opt_GhciSandbox,
flagSpec "helpful-errors" Opt_HelpfulErrors,
flagSpec "hpc" Opt_Hpc,
flagSpec "ignore-asserts" Opt_IgnoreAsserts,
flagSpec "ignore-interface-pragmas" Opt_IgnoreInterfacePragmas,
flagGhciSpec "implicit-import-qualified" Opt_ImplicitImportQualified,
flagSpec "irrefutable-tuples" Opt_IrrefutableTuples,
flagSpec "kill-absence" Opt_KillAbsence,
flagSpec "kill-one-shot" Opt_KillOneShot,
flagSpec "late-dmd-anal" Opt_LateDmdAnal,
flagSpec "liberate-case" Opt_LiberateCase,
flagHiddenSpec "llvm-pass-vectors-in-regs" Opt_LlvmPassVectorsInRegisters,
flagHiddenSpec "llvm-tbaa" Opt_LlvmTBAA,
flagHiddenSpec "llvm-fill-undef-with-garbage" Opt_LlvmFillUndefWithGarbage,
flagSpec "loopification" Opt_Loopification,
flagSpec "omit-interface-pragmas" Opt_OmitInterfacePragmas,
flagSpec "omit-yields" Opt_OmitYields,
flagSpec "optimal-applicative-do" Opt_OptimalApplicativeDo,
flagSpec "pedantic-bottoms" Opt_PedanticBottoms,
flagSpec "pre-inlining" Opt_SimplPreInlining,
flagGhciSpec "print-bind-contents" Opt_PrintBindContents,
flagGhciSpec "print-bind-result" Opt_PrintBindResult,
flagGhciSpec "print-evld-with-show" Opt_PrintEvldWithShow,
flagSpec "print-explicit-foralls" Opt_PrintExplicitForalls,
flagSpec "print-explicit-kinds" Opt_PrintExplicitKinds,
flagSpec "print-explicit-coercions" Opt_PrintExplicitCoercions,
flagSpec "print-explicit-runtime-reps" Opt_PrintExplicitRuntimeReps,
flagSpec "print-equality-relations" Opt_PrintEqualityRelations,
flagSpec "print-unicode-syntax" Opt_PrintUnicodeSyntax,
flagSpec "print-expanded-synonyms" Opt_PrintExpandedSynonyms,
flagSpec "print-potential-instances" Opt_PrintPotentialInstances,
flagSpec "print-typechecker-elaboration" Opt_PrintTypecheckerElaboration,
flagSpec "prof-cafs" Opt_AutoSccsOnIndividualCafs,
flagSpec "prof-count-entries" Opt_ProfCountEntries,
flagSpec "regs-graph" Opt_RegsGraph,
flagSpec "regs-iterative" Opt_RegsIterative,
depFlagSpec' "rewrite-rules" Opt_EnableRewriteRules
(useInstead "enable-rewrite-rules"),
flagSpec "shared-implib" Opt_SharedImplib,
flagSpec "spec-constr" Opt_SpecConstr,
flagSpec "specialise" Opt_Specialise,
flagSpec "specialize" Opt_Specialise,
flagSpec "specialise-aggressively" Opt_SpecialiseAggressively,
flagSpec "specialize-aggressively" Opt_SpecialiseAggressively,
flagSpec "cross-module-specialise" Opt_CrossModuleSpecialise,
flagSpec "cross-module-specialize" Opt_CrossModuleSpecialise,
flagSpec "static-argument-transformation" Opt_StaticArgumentTransformation,
flagSpec "strictness" Opt_Strictness,
flagSpec "use-rpaths" Opt_RPath,
flagSpec "write-interface" Opt_WriteInterface,
flagSpec "unbox-small-strict-fields" Opt_UnboxSmallStrictFields,
flagSpec "unbox-strict-fields" Opt_UnboxStrictFields,
flagSpec "vectorisation-avoidance" Opt_VectorisationAvoidance,
flagSpec "vectorise" Opt_Vectorise,
flagSpec "version-macros" Opt_VersionMacros,
flagSpec "worker-wrapper" Opt_WorkerWrapper,
flagSpec "show-warning-groups" Opt_ShowWarnGroups,
flagSpec "hide-source-paths" Opt_HideSourcePaths
]
-- | These @-f\<blah\>@ flags can all be reversed with @-fno-\<blah\>@
fLangFlags :: [FlagSpec LangExt.Extension]
fLangFlags = map snd fLangFlagsDeps
fLangFlagsDeps :: [(Deprecation, FlagSpec LangExt.Extension)]
fLangFlagsDeps = [
-- See Note [Updating flag description in the User's Guide]
-- See Note [Supporting CLI completion]
depFlagSpecOp' "th" LangExt.TemplateHaskell
checkTemplateHaskellOk
(deprecatedForExtension "TemplateHaskell"),
depFlagSpec' "fi" LangExt.ForeignFunctionInterface
(deprecatedForExtension "ForeignFunctionInterface"),
depFlagSpec' "ffi" LangExt.ForeignFunctionInterface
(deprecatedForExtension "ForeignFunctionInterface"),
depFlagSpec' "arrows" LangExt.Arrows
(deprecatedForExtension "Arrows"),
depFlagSpec' "implicit-prelude" LangExt.ImplicitPrelude
(deprecatedForExtension "ImplicitPrelude"),
depFlagSpec' "bang-patterns" LangExt.BangPatterns
(deprecatedForExtension "BangPatterns"),
depFlagSpec' "monomorphism-restriction" LangExt.MonomorphismRestriction
(deprecatedForExtension "MonomorphismRestriction"),
depFlagSpec' "mono-pat-binds" LangExt.MonoPatBinds
(deprecatedForExtension "MonoPatBinds"),
depFlagSpec' "extended-default-rules" LangExt.ExtendedDefaultRules
(deprecatedForExtension "ExtendedDefaultRules"),
depFlagSpec' "implicit-params" LangExt.ImplicitParams
(deprecatedForExtension "ImplicitParams"),
depFlagSpec' "scoped-type-variables" LangExt.ScopedTypeVariables
(deprecatedForExtension "ScopedTypeVariables"),
depFlagSpec' "parr" LangExt.ParallelArrays
(deprecatedForExtension "ParallelArrays"),
depFlagSpec' "PArr" LangExt.ParallelArrays
(deprecatedForExtension "ParallelArrays"),
depFlagSpec' "allow-overlapping-instances" LangExt.OverlappingInstances
(deprecatedForExtension "OverlappingInstances"),
depFlagSpec' "allow-undecidable-instances" LangExt.UndecidableInstances
(deprecatedForExtension "UndecidableInstances"),
depFlagSpec' "allow-incoherent-instances" LangExt.IncoherentInstances
(deprecatedForExtension "IncoherentInstances")
]
supportedLanguages :: [String]
supportedLanguages = map (flagSpecName . snd) languageFlagsDeps
supportedLanguageOverlays :: [String]
supportedLanguageOverlays = map (flagSpecName . snd) safeHaskellFlagsDeps
supportedExtensions :: [String]
supportedExtensions = concatMap toFlagSpecNamePair xFlags
where
toFlagSpecNamePair flg
#ifndef GHCI
-- make sure that `ghc --supported-extensions` omits
-- "TemplateHaskell" when it's known to be unsupported. See also
-- GHC #11102 for rationale
| flagSpecFlag flg == LangExt.TemplateHaskell = [noName]
#endif
| otherwise = [name, noName]
where
noName = "No" ++ name
name = flagSpecName flg
supportedLanguagesAndExtensions :: [String]
supportedLanguagesAndExtensions =
supportedLanguages ++ supportedLanguageOverlays ++ supportedExtensions
-- | These -X<blah> flags cannot be reversed with -XNo<blah>
languageFlagsDeps :: [(Deprecation, FlagSpec Language)]
languageFlagsDeps = [
flagSpec "Haskell98" Haskell98,
flagSpec "Haskell2010" Haskell2010
]
-- | These -X<blah> flags cannot be reversed with -XNo<blah>
-- They are used to place hard requirements on what GHC Haskell language
-- features can be used.
safeHaskellFlagsDeps :: [(Deprecation, FlagSpec SafeHaskellMode)]
safeHaskellFlagsDeps = [mkF Sf_Unsafe, mkF Sf_Trustworthy, mkF Sf_Safe]
where mkF flag = flagSpec (show flag) flag
-- | These -X<blah> flags can all be reversed with -XNo<blah>
xFlags :: [FlagSpec LangExt.Extension]
xFlags = map snd xFlagsDeps
xFlagsDeps :: [(Deprecation, FlagSpec LangExt.Extension)]
xFlagsDeps = [
-- See Note [Updating flag description in the User's Guide]
-- See Note [Supporting CLI completion]
-- See Note [Adding a language extension]
-- Please keep the list of flags below sorted alphabetically
flagSpec "AllowAmbiguousTypes" LangExt.AllowAmbiguousTypes,
flagSpec "AlternativeLayoutRule" LangExt.AlternativeLayoutRule,
flagSpec "AlternativeLayoutRuleTransitional"
LangExt.AlternativeLayoutRuleTransitional,
flagSpec "Arrows" LangExt.Arrows,
flagSpec "AutoDeriveTypeable" LangExt.AutoDeriveTypeable,
flagSpec "BangPatterns" LangExt.BangPatterns,
flagSpec "BinaryLiterals" LangExt.BinaryLiterals,
flagSpec "CApiFFI" LangExt.CApiFFI,
flagSpec "CPP" LangExt.Cpp,
flagSpec "ConstrainedClassMethods" LangExt.ConstrainedClassMethods,
flagSpec "ConstraintKinds" LangExt.ConstraintKinds,
flagSpec "DataKinds" LangExt.DataKinds,
depFlagSpecCond "DatatypeContexts" LangExt.DatatypeContexts
id
("It was widely considered a misfeature, " ++
"and has been removed from the Haskell language."),
flagSpec "DefaultSignatures" LangExt.DefaultSignatures,
flagSpec "DeriveAnyClass" LangExt.DeriveAnyClass,
flagSpec "DeriveDataTypeable" LangExt.DeriveDataTypeable,
flagSpec "DeriveFoldable" LangExt.DeriveFoldable,
flagSpec "DeriveFunctor" LangExt.DeriveFunctor,
flagSpec "DeriveGeneric" LangExt.DeriveGeneric,
flagSpec "DeriveLift" LangExt.DeriveLift,
flagSpec "DeriveTraversable" LangExt.DeriveTraversable,
flagSpec "DerivingStrategies" LangExt.DerivingStrategies,
flagSpec "DisambiguateRecordFields" LangExt.DisambiguateRecordFields,
flagSpec "DoAndIfThenElse" LangExt.DoAndIfThenElse,
depFlagSpec' "DoRec" LangExt.RecursiveDo
(deprecatedForExtension "RecursiveDo"),
flagSpec "DuplicateRecordFields" LangExt.DuplicateRecordFields,
flagSpec "EmptyCase" LangExt.EmptyCase,
flagSpec "EmptyDataDecls" LangExt.EmptyDataDecls,
flagSpec "ExistentialQuantification" LangExt.ExistentialQuantification,
flagSpec "ExplicitForAll" LangExt.ExplicitForAll,
flagSpec "ExplicitNamespaces" LangExt.ExplicitNamespaces,
flagSpec "ExtendedDefaultRules" LangExt.ExtendedDefaultRules,
flagSpec "FlexibleContexts" LangExt.FlexibleContexts,
flagSpec "FlexibleInstances" LangExt.FlexibleInstances,
flagSpec "ForeignFunctionInterface" LangExt.ForeignFunctionInterface,
flagSpec "FunctionalDependencies" LangExt.FunctionalDependencies,
flagSpec "GADTSyntax" LangExt.GADTSyntax,
flagSpec "GADTs" LangExt.GADTs,
flagSpec "GHCForeignImportPrim" LangExt.GHCForeignImportPrim,
flagSpec' "GeneralizedNewtypeDeriving" LangExt.GeneralizedNewtypeDeriving
setGenDeriving,
flagSpec "ImplicitParams" LangExt.ImplicitParams,
flagSpec "ImplicitPrelude" LangExt.ImplicitPrelude,
flagSpec "ImpredicativeTypes" LangExt.ImpredicativeTypes,
flagSpec' "IncoherentInstances" LangExt.IncoherentInstances
setIncoherentInsts,
flagSpec "TypeFamilyDependencies" LangExt.TypeFamilyDependencies,
flagSpec "InstanceSigs" LangExt.InstanceSigs,
flagSpec "ApplicativeDo" LangExt.ApplicativeDo,
flagSpec "InterruptibleFFI" LangExt.InterruptibleFFI,
flagSpec "JavaScriptFFI" LangExt.JavaScriptFFI,
flagSpec "KindSignatures" LangExt.KindSignatures,
flagSpec "LambdaCase" LangExt.LambdaCase,
flagSpec "LiberalTypeSynonyms" LangExt.LiberalTypeSynonyms,
flagSpec "MagicHash" LangExt.MagicHash,
flagSpec "MonadComprehensions" LangExt.MonadComprehensions,
flagSpec "MonadFailDesugaring" LangExt.MonadFailDesugaring,
flagSpec "MonoLocalBinds" LangExt.MonoLocalBinds,
depFlagSpecCond "MonoPatBinds" LangExt.MonoPatBinds
id
"Experimental feature now removed; has no effect",
flagSpec "MonomorphismRestriction" LangExt.MonomorphismRestriction,
flagSpec "MultiParamTypeClasses" LangExt.MultiParamTypeClasses,
flagSpec "MultiWayIf" LangExt.MultiWayIf,
flagSpec "NPlusKPatterns" LangExt.NPlusKPatterns,
flagSpec "NamedFieldPuns" LangExt.RecordPuns,
flagSpec "NamedWildCards" LangExt.NamedWildCards,
flagSpec "NegativeLiterals" LangExt.NegativeLiterals,
flagSpec "NondecreasingIndentation" LangExt.NondecreasingIndentation,
depFlagSpec' "NullaryTypeClasses" LangExt.NullaryTypeClasses
(deprecatedForExtension "MultiParamTypeClasses"),
flagSpec "NumDecimals" LangExt.NumDecimals,
depFlagSpecOp "OverlappingInstances" LangExt.OverlappingInstances
setOverlappingInsts
"instead use per-instance pragmas OVERLAPPING/OVERLAPPABLE/OVERLAPS",
flagSpec "OverloadedLabels" LangExt.OverloadedLabels,
flagSpec "OverloadedLists" LangExt.OverloadedLists,
flagSpec "OverloadedStrings" LangExt.OverloadedStrings,
flagSpec "PackageImports" LangExt.PackageImports,
flagSpec "ParallelArrays" LangExt.ParallelArrays,
flagSpec "ParallelListComp" LangExt.ParallelListComp,
flagSpec "PartialTypeSignatures" LangExt.PartialTypeSignatures,
flagSpec "PatternGuards" LangExt.PatternGuards,
depFlagSpec' "PatternSignatures" LangExt.ScopedTypeVariables
(deprecatedForExtension "ScopedTypeVariables"),
flagSpec "PatternSynonyms" LangExt.PatternSynonyms,
flagSpec "PolyKinds" LangExt.PolyKinds,
flagSpec "PolymorphicComponents" LangExt.RankNTypes,
flagSpec "PostfixOperators" LangExt.PostfixOperators,
flagSpec "QuasiQuotes" LangExt.QuasiQuotes,
flagSpec "Rank2Types" LangExt.RankNTypes,
flagSpec "RankNTypes" LangExt.RankNTypes,
flagSpec "RebindableSyntax" LangExt.RebindableSyntax,
depFlagSpec' "RecordPuns" LangExt.RecordPuns
(deprecatedForExtension "NamedFieldPuns"),
flagSpec "RecordWildCards" LangExt.RecordWildCards,
flagSpec "RecursiveDo" LangExt.RecursiveDo,
flagSpec "RelaxedLayout" LangExt.RelaxedLayout,
depFlagSpecCond "RelaxedPolyRec" LangExt.RelaxedPolyRec
not
"You can't turn off RelaxedPolyRec any more",
flagSpec "RoleAnnotations" LangExt.RoleAnnotations,
flagSpec "ScopedTypeVariables" LangExt.ScopedTypeVariables,
flagSpec "StandaloneDeriving" LangExt.StandaloneDeriving,
flagSpec "StaticPointers" LangExt.StaticPointers,
flagSpec "Strict" LangExt.Strict,
flagSpec "StrictData" LangExt.StrictData,
flagSpec' "TemplateHaskell" LangExt.TemplateHaskell
checkTemplateHaskellOk,
flagSpec "TemplateHaskellQuotes" LangExt.TemplateHaskellQuotes,
flagSpec "TraditionalRecordSyntax" LangExt.TraditionalRecordSyntax,
flagSpec "TransformListComp" LangExt.TransformListComp,
flagSpec "TupleSections" LangExt.TupleSections,
flagSpec "TypeApplications" LangExt.TypeApplications,
flagSpec "TypeInType" LangExt.TypeInType,
flagSpec "TypeFamilies" LangExt.TypeFamilies,
flagSpec "TypeOperators" LangExt.TypeOperators,
flagSpec "TypeSynonymInstances" LangExt.TypeSynonymInstances,
flagSpec "UnboxedTuples" LangExt.UnboxedTuples,
flagSpec "UnboxedSums" LangExt.UnboxedSums,
flagSpec "UndecidableInstances" LangExt.UndecidableInstances,
flagSpec "UndecidableSuperClasses" LangExt.UndecidableSuperClasses,
flagSpec "UnicodeSyntax" LangExt.UnicodeSyntax,
flagSpec "UnliftedFFITypes" LangExt.UnliftedFFITypes,
flagSpec "ViewPatterns" LangExt.ViewPatterns
]
defaultFlags :: Settings -> [GeneralFlag]
defaultFlags settings
-- See Note [Updating flag description in the User's Guide]
= [ Opt_AutoLinkPackages,
Opt_EmbedManifest,
Opt_FlatCache,
Opt_GenManifest,
Opt_GhciHistory,
Opt_GhciSandbox,
Opt_HelpfulErrors,
Opt_KeepHiFiles,
Opt_KeepOFiles,
Opt_OmitYields,
Opt_PrintBindContents,
Opt_ProfCountEntries,
Opt_RPath,
Opt_SharedImplib,
Opt_SimplPreInlining,
Opt_VersionMacros
]
++ [f | (ns,f) <- optLevelFlags, 0 `elem` ns]
-- The default -O0 options
++ default_PIC platform
++ concatMap (wayGeneralFlags platform) (defaultWays settings)
where platform = sTargetPlatform settings
default_PIC :: Platform -> [GeneralFlag]
default_PIC platform =
case (platformOS platform, platformArch platform) of
(OSDarwin, ArchX86_64) -> [Opt_PIC]
(OSOpenBSD, ArchX86_64) -> [Opt_PIC] -- Due to PIE support in
-- OpenBSD since 5.3 release
-- (1 May 2013) we need to
-- always generate PIC. See
-- #10597 for more
-- information.
_ -> []
-- General flags that are switched on/off when other general flags are switched
-- on
impliedGFlags :: [(GeneralFlag, TurnOnFlag, GeneralFlag)]
impliedGFlags = [(Opt_DeferTypeErrors, turnOn, Opt_DeferTypedHoles)
,(Opt_DeferTypeErrors, turnOn, Opt_DeferOutOfScopeVariables)
,(Opt_Strictness, turnOn, Opt_WorkerWrapper)
]
-- General flags that are switched on/off when other general flags are switched
-- off
impliedOffGFlags :: [(GeneralFlag, TurnOnFlag, GeneralFlag)]
impliedOffGFlags = [(Opt_Strictness, turnOff, Opt_WorkerWrapper)]
impliedXFlags :: [(LangExt.Extension, TurnOnFlag, LangExt.Extension)]
impliedXFlags
-- See Note [Updating flag description in the User's Guide]
= [ (LangExt.RankNTypes, turnOn, LangExt.ExplicitForAll)
, (LangExt.ScopedTypeVariables, turnOn, LangExt.ExplicitForAll)
, (LangExt.LiberalTypeSynonyms, turnOn, LangExt.ExplicitForAll)
, (LangExt.ExistentialQuantification, turnOn, LangExt.ExplicitForAll)
, (LangExt.FlexibleInstances, turnOn, LangExt.TypeSynonymInstances)
, (LangExt.FunctionalDependencies, turnOn, LangExt.MultiParamTypeClasses)
, (LangExt.MultiParamTypeClasses, turnOn, LangExt.ConstrainedClassMethods) -- c.f. Trac #7854
, (LangExt.TypeFamilyDependencies, turnOn, LangExt.TypeFamilies)
, (LangExt.RebindableSyntax, turnOff, LangExt.ImplicitPrelude) -- NB: turn off!
, (LangExt.GADTs, turnOn, LangExt.GADTSyntax)
, (LangExt.GADTs, turnOn, LangExt.MonoLocalBinds)
, (LangExt.TypeFamilies, turnOn, LangExt.MonoLocalBinds)
, (LangExt.TypeFamilies, turnOn, LangExt.KindSignatures) -- Type families use kind signatures
, (LangExt.PolyKinds, turnOn, LangExt.KindSignatures) -- Ditto polymorphic kinds
, (LangExt.TypeInType, turnOn, LangExt.DataKinds)
, (LangExt.TypeInType, turnOn, LangExt.PolyKinds)
, (LangExt.TypeInType, turnOn, LangExt.KindSignatures)
-- AutoDeriveTypeable is not very useful without DeriveDataTypeable
, (LangExt.AutoDeriveTypeable, turnOn, LangExt.DeriveDataTypeable)
-- We turn this on so that we can export associated type
-- type synonyms in subordinates (e.g. MyClass(type AssocType))
, (LangExt.TypeFamilies, turnOn, LangExt.ExplicitNamespaces)
, (LangExt.TypeOperators, turnOn, LangExt.ExplicitNamespaces)
, (LangExt.ImpredicativeTypes, turnOn, LangExt.RankNTypes)
-- Record wild-cards implies field disambiguation
-- Otherwise if you write (C {..}) you may well get
-- stuff like " 'a' not in scope ", which is a bit silly
-- if the compiler has just filled in field 'a' of constructor 'C'
, (LangExt.RecordWildCards, turnOn, LangExt.DisambiguateRecordFields)
, (LangExt.ParallelArrays, turnOn, LangExt.ParallelListComp)
, (LangExt.JavaScriptFFI, turnOn, LangExt.InterruptibleFFI)
, (LangExt.DeriveTraversable, turnOn, LangExt.DeriveFunctor)
, (LangExt.DeriveTraversable, turnOn, LangExt.DeriveFoldable)
-- Duplicate record fields require field disambiguation
, (LangExt.DuplicateRecordFields, turnOn, LangExt.DisambiguateRecordFields)
, (LangExt.TemplateHaskell, turnOn, LangExt.TemplateHaskellQuotes)
, (LangExt.Strict, turnOn, LangExt.StrictData)
]
-- Note [Documenting optimisation flags]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- If you change the list of flags enabled for particular optimisation levels
-- please remember to update the User's Guide. The relevant files are:
--
-- * utils/mkUserGuidePart/Options/
-- * docs/users_guide/using.rst
--
-- The first contains the Flag Refrence section, which breifly lists all
-- available flags. The second contains a detailed description of the
-- flags. Both places should contain information whether a flag is implied by
-- -O0, -O or -O2.
optLevelFlags :: [([Int], GeneralFlag)]
optLevelFlags -- see Note [Documenting optimisation flags]
= [ ([0,1,2], Opt_DoLambdaEtaExpansion)
, ([0,1,2], Opt_DoEtaReduction) -- See Note [Eta-reduction in -O0]
, ([0,1,2], Opt_DmdTxDictSel)
, ([0,1,2], Opt_LlvmTBAA)
, ([0,1,2], Opt_VectorisationAvoidance)
-- This one is important for a tiresome reason:
-- we want to make sure that the bindings for data
-- constructors are eta-expanded. This is probably
-- a good thing anyway, but it seems fragile.
, ([0], Opt_IgnoreInterfacePragmas)
, ([0], Opt_OmitInterfacePragmas)
, ([1,2], Opt_CallArity)
, ([1,2], Opt_CaseMerge)
, ([1,2], Opt_CmmElimCommonBlocks)
, ([1,2], Opt_CmmSink)
, ([1,2], Opt_CSE)
, ([1,2], Opt_EnableRewriteRules) -- Off for -O0; see Note [Scoping for Builtin rules]
-- in PrelRules
, ([1,2], Opt_FloatIn)
, ([1,2], Opt_FullLaziness)
, ([1,2], Opt_IgnoreAsserts)
, ([1,2], Opt_Loopification)
, ([1,2], Opt_Specialise)
, ([1,2], Opt_CrossModuleSpecialise)
, ([1,2], Opt_Strictness)
, ([1,2], Opt_UnboxSmallStrictFields)
, ([1,2], Opt_CprAnal)
, ([1,2], Opt_WorkerWrapper)
, ([2], Opt_LiberateCase)
, ([2], Opt_SpecConstr)
-- , ([2], Opt_RegsGraph)
-- RegsGraph suffers performance regression. See #7679
-- , ([2], Opt_StaticArgumentTransformation)
-- Static Argument Transformation needs investigation. See #9374
]
{- Note [Eta-reduction in -O0]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Trac #11562 showed an example which tripped an ASSERT in CoreToStg; a
function was marked as MayHaveCafRefs when in fact it obviously
didn't. Reason was:
* Eta reduction wasn't happening in the simplifier, but it was
happening in CorePrep, on
$fBla = MkDict (/\a. K a)
* Result: rhsIsStatic told TidyPgm that $fBla might have CAF refs
but the eta-reduced version (MkDict K) obviously doesn't
Simple solution: just let the simplifier do eta-reduction even in -O0.
After all, CorePrep does it unconditionally! Not a big deal, but
removes an assertion failure. -}
-- -----------------------------------------------------------------------------
-- Standard sets of warning options
-- Note [Documenting warning flags]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- If you change the list of warning enabled by default
-- please remember to update the User's Guide. The relevant file is:
--
-- * utils/mkUserGuidePart/
-- * docs/users_guide/using-warnings.rst
-- | Warning groups.
--
-- As all warnings are in the Weverything set, it is ignored when
-- displaying to the user which group a warning is in.
warningGroups :: [(String, [WarningFlag])]
warningGroups =
[ ("compat", minusWcompatOpts)
, ("unused-binds", unusedBindsFlags)
, ("default", standardWarnings)
, ("extra", minusWOpts)
, ("all", minusWallOpts)
, ("everything", minusWeverythingOpts)
]
-- | Warning group hierarchies, where there is an explicit inclusion
-- relation.
--
-- Each inner list is a hierarchy of warning groups, ordered from
-- smallest to largest, where each group is a superset of the one
-- before it.
--
-- Separating this from 'warningGroups' allows for multiple
-- hierarchies with no inherent relation to be defined.
--
-- The special-case Weverything group is not included.
warningHierarchies :: [[String]]
warningHierarchies = hierarchies ++ map (:[]) rest
where
hierarchies = [["default", "extra", "all"]]
rest = filter (`notElem` "everything" : concat hierarchies) $
map fst warningGroups
-- | Find the smallest group in every hierarchy which a warning
-- belongs to, excluding Weverything.
smallestGroups :: WarningFlag -> [String]
smallestGroups flag = mapMaybe go warningHierarchies where
-- Because each hierarchy is arranged from smallest to largest,
-- the first group we find in a hierarchy which contains the flag
-- is the smallest.
go (group:rest) = fromMaybe (go rest) $ do
flags <- lookup group warningGroups
guard (flag `elem` flags)
pure (Just group)
go [] = Nothing
-- | Warnings enabled unless specified otherwise
standardWarnings :: [WarningFlag]
standardWarnings -- see Note [Documenting warning flags]
= [ Opt_WarnOverlappingPatterns,
Opt_WarnWarningsDeprecations,
Opt_WarnDeprecatedFlags,
Opt_WarnDeferredTypeErrors,
Opt_WarnTypedHoles,
Opt_WarnDeferredOutOfScopeVariables,
Opt_WarnPartialTypeSignatures,
Opt_WarnUnrecognisedPragmas,
Opt_WarnDuplicateExports,
Opt_WarnOverflowedLiterals,
Opt_WarnEmptyEnumerations,
Opt_WarnMissingFields,
Opt_WarnMissingMethods,
Opt_WarnWrongDoBind,
Opt_WarnUnsupportedCallingConventions,
Opt_WarnDodgyForeignImports,
Opt_WarnInlineRuleShadowing,
Opt_WarnAlternativeLayoutRuleTransitional,
Opt_WarnUnsupportedLlvmVersion,
Opt_WarnTabs,
Opt_WarnUnrecognisedWarningFlags,
Opt_WarnSimplifiableClassConstraints
]
-- | Things you get with -W
minusWOpts :: [WarningFlag]
minusWOpts
= standardWarnings ++
[ Opt_WarnUnusedTopBinds,
Opt_WarnUnusedLocalBinds,
Opt_WarnUnusedPatternBinds,
Opt_WarnUnusedMatches,
Opt_WarnUnusedForalls,
Opt_WarnUnusedImports,
Opt_WarnIncompletePatterns,
Opt_WarnDodgyExports,
Opt_WarnDodgyImports
]
-- | Things you get with -Wall
minusWallOpts :: [WarningFlag]
minusWallOpts
= minusWOpts ++
[ Opt_WarnTypeDefaults,
Opt_WarnNameShadowing,
Opt_WarnMissingSignatures,
Opt_WarnHiShadows,
Opt_WarnOrphans,
Opt_WarnUnusedDoBind,
Opt_WarnTrustworthySafe,
Opt_WarnUntickedPromotedConstructors,
Opt_WarnMissingPatternSynonymSignatures
]
-- | Things you get with -Weverything, i.e. *all* known warnings flags
minusWeverythingOpts :: [WarningFlag]
minusWeverythingOpts = [ toEnum 0 .. ]
-- | Things you get with -Wcompat.
--
-- This is intended to group together warnings that will be enabled by default
-- at some point in the future, so that library authors eager to make their
-- code future compatible to fix issues before they even generate warnings.
minusWcompatOpts :: [WarningFlag]
minusWcompatOpts
= [ Opt_WarnMissingMonadFailInstances
, Opt_WarnSemigroup
, Opt_WarnNonCanonicalMonoidInstances
]
enableUnusedBinds :: DynP ()
enableUnusedBinds = mapM_ setWarningFlag unusedBindsFlags
disableUnusedBinds :: DynP ()
disableUnusedBinds = mapM_ unSetWarningFlag unusedBindsFlags
-- Things you get with -Wunused-binds
unusedBindsFlags :: [WarningFlag]
unusedBindsFlags = [ Opt_WarnUnusedTopBinds
, Opt_WarnUnusedLocalBinds
, Opt_WarnUnusedPatternBinds
]
enableGlasgowExts :: DynP ()
enableGlasgowExts = do setGeneralFlag Opt_PrintExplicitForalls
mapM_ setExtensionFlag glasgowExtsFlags
disableGlasgowExts :: DynP ()
disableGlasgowExts = do unSetGeneralFlag Opt_PrintExplicitForalls
mapM_ unSetExtensionFlag glasgowExtsFlags
glasgowExtsFlags :: [LangExt.Extension]
glasgowExtsFlags = [
LangExt.ConstrainedClassMethods
, LangExt.DeriveDataTypeable
, LangExt.DeriveFoldable
, LangExt.DeriveFunctor
, LangExt.DeriveGeneric
, LangExt.DeriveTraversable
, LangExt.EmptyDataDecls
, LangExt.ExistentialQuantification
, LangExt.ExplicitNamespaces
, LangExt.FlexibleContexts
, LangExt.FlexibleInstances
, LangExt.ForeignFunctionInterface
, LangExt.FunctionalDependencies
, LangExt.GeneralizedNewtypeDeriving
, LangExt.ImplicitParams
, LangExt.KindSignatures
, LangExt.LiberalTypeSynonyms
, LangExt.MagicHash
, LangExt.MultiParamTypeClasses
, LangExt.ParallelListComp
, LangExt.PatternGuards
, LangExt.PostfixOperators
, LangExt.RankNTypes
, LangExt.RecursiveDo
, LangExt.ScopedTypeVariables
, LangExt.StandaloneDeriving
, LangExt.TypeOperators
, LangExt.TypeSynonymInstances
, LangExt.UnboxedTuples
, LangExt.UnicodeSyntax
, LangExt.UnliftedFFITypes ]
foreign import ccall unsafe "rts_isProfiled" rtsIsProfiledIO :: IO CInt
-- | Was the runtime system built with profiling enabled?
rtsIsProfiled :: Bool
rtsIsProfiled = unsafeDupablePerformIO rtsIsProfiledIO /= 0
#ifdef GHCI
-- Consult the RTS to find whether GHC itself has been built with
-- dynamic linking. This can't be statically known at compile-time,
-- because we build both the static and dynamic versions together with
-- -dynamic-too.
foreign import ccall unsafe "rts_isDynamic" rtsIsDynamicIO :: IO CInt
dynamicGhc :: Bool
dynamicGhc = unsafeDupablePerformIO rtsIsDynamicIO /= 0
#else
dynamicGhc :: Bool
dynamicGhc = False
#endif
setWarnSafe :: Bool -> DynP ()
setWarnSafe True = getCurLoc >>= \l -> upd (\d -> d { warnSafeOnLoc = l })
setWarnSafe False = return ()
setWarnUnsafe :: Bool -> DynP ()
setWarnUnsafe True = getCurLoc >>= \l -> upd (\d -> d { warnUnsafeOnLoc = l })
setWarnUnsafe False = return ()
setPackageTrust :: DynP ()
setPackageTrust = do
setGeneralFlag Opt_PackageTrust
l <- getCurLoc
upd $ \d -> d { pkgTrustOnLoc = l }
setGenDeriving :: TurnOnFlag -> DynP ()
setGenDeriving True = getCurLoc >>= \l -> upd (\d -> d { newDerivOnLoc = l })
setGenDeriving False = return ()
setOverlappingInsts :: TurnOnFlag -> DynP ()
setOverlappingInsts False = return ()
setOverlappingInsts True = do
l <- getCurLoc
upd (\d -> d { overlapInstLoc = l })
setIncoherentInsts :: TurnOnFlag -> DynP ()
setIncoherentInsts False = return ()
setIncoherentInsts True = do
l <- getCurLoc
upd (\d -> d { incoherentOnLoc = l })
checkTemplateHaskellOk :: TurnOnFlag -> DynP ()
#ifdef GHCI
checkTemplateHaskellOk _turn_on
= getCurLoc >>= \l -> upd (\d -> d { thOnLoc = l })
#else
-- In stage 1, Template Haskell is simply illegal, except with -M
-- We don't bleat with -M because there's no problem with TH there,
-- and in fact GHC's build system does ghc -M of the DPH libraries
-- with a stage1 compiler
checkTemplateHaskellOk turn_on
| turn_on = do dfs <- liftEwM getCmdLineState
case ghcMode dfs of
MkDepend -> return ()
_ -> addErr msg
| otherwise = return ()
where
msg = "Template Haskell requires GHC with interpreter support\n " ++
"Perhaps you are using a stage-1 compiler?"
#endif
{- **********************************************************************
%* *
DynFlags constructors
%* *
%********************************************************************* -}
type DynP = EwM (CmdLineP DynFlags)
upd :: (DynFlags -> DynFlags) -> DynP ()
upd f = liftEwM (do dflags <- getCmdLineState
putCmdLineState $! f dflags)
updM :: (DynFlags -> DynP DynFlags) -> DynP ()
updM f = do dflags <- liftEwM getCmdLineState
dflags' <- f dflags
liftEwM $ putCmdLineState $! dflags'
--------------- Constructor functions for OptKind -----------------
noArg :: (DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
noArg fn = NoArg (upd fn)
noArgM :: (DynFlags -> DynP DynFlags) -> OptKind (CmdLineP DynFlags)
noArgM fn = NoArg (updM fn)
hasArg :: (String -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
hasArg fn = HasArg (upd . fn)
sepArg :: (String -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
sepArg fn = SepArg (upd . fn)
intSuffix :: (Int -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
intSuffix fn = IntSuffix (\n -> upd (fn n))
intSuffixM :: (Int -> DynFlags -> DynP DynFlags) -> OptKind (CmdLineP DynFlags)
intSuffixM fn = IntSuffix (\n -> updM (fn n))
floatSuffix :: (Float -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
floatSuffix fn = FloatSuffix (\n -> upd (fn n))
optIntSuffixM :: (Maybe Int -> DynFlags -> DynP DynFlags)
-> OptKind (CmdLineP DynFlags)
optIntSuffixM fn = OptIntSuffix (\mi -> updM (fn mi))
setDumpFlag :: DumpFlag -> OptKind (CmdLineP DynFlags)
setDumpFlag dump_flag = NoArg (setDumpFlag' dump_flag)
--------------------------
addWay :: Way -> DynP ()
addWay w = upd (addWay' w)
addWay' :: Way -> DynFlags -> DynFlags
addWay' w dflags0 = let platform = targetPlatform dflags0
dflags1 = dflags0 { ways = w : ways dflags0 }
dflags2 = foldr setGeneralFlag' dflags1
(wayGeneralFlags platform w)
dflags3 = foldr unSetGeneralFlag' dflags2
(wayUnsetGeneralFlags platform w)
in dflags3
removeWayDyn :: DynP ()
removeWayDyn = upd (\dfs -> dfs { ways = filter (WayDyn /=) (ways dfs) })
--------------------------
setGeneralFlag, unSetGeneralFlag :: GeneralFlag -> DynP ()
setGeneralFlag f = upd (setGeneralFlag' f)
unSetGeneralFlag f = upd (unSetGeneralFlag' f)
setGeneralFlag' :: GeneralFlag -> DynFlags -> DynFlags
setGeneralFlag' f dflags = foldr ($) (gopt_set dflags f) deps
where
deps = [ if turn_on then setGeneralFlag' d
else unSetGeneralFlag' d
| (f', turn_on, d) <- impliedGFlags, f' == f ]
-- When you set f, set the ones it implies
-- NB: use setGeneralFlag recursively, in case the implied flags
-- implies further flags
unSetGeneralFlag' :: GeneralFlag -> DynFlags -> DynFlags
unSetGeneralFlag' f dflags = foldr ($) (gopt_unset dflags f) deps
where
deps = [ if turn_on then setGeneralFlag' d
else unSetGeneralFlag' d
| (f', turn_on, d) <- impliedOffGFlags, f' == f ]
-- In general, when you un-set f, we don't un-set the things it implies.
-- There are however some exceptions, e.g., -fno-strictness implies
-- -fno-worker-wrapper.
--
-- NB: use unSetGeneralFlag' recursively, in case the implied off flags
-- imply further flags.
--------------------------
setWarningFlag, unSetWarningFlag :: WarningFlag -> DynP ()
setWarningFlag f = upd (\dfs -> wopt_set dfs f)
unSetWarningFlag f = upd (\dfs -> wopt_unset dfs f)
setFatalWarningFlag, unSetFatalWarningFlag :: WarningFlag -> DynP ()
setFatalWarningFlag f = upd (\dfs -> wopt_set_fatal dfs f)
unSetFatalWarningFlag f = upd (\dfs -> wopt_unset_fatal dfs f)
--------------------------
setExtensionFlag, unSetExtensionFlag :: LangExt.Extension -> DynP ()
setExtensionFlag f = upd (setExtensionFlag' f)
unSetExtensionFlag f = upd (unSetExtensionFlag' f)
setExtensionFlag', unSetExtensionFlag' :: LangExt.Extension -> DynFlags -> DynFlags
setExtensionFlag' f dflags = foldr ($) (xopt_set dflags f) deps
where
deps = [ if turn_on then setExtensionFlag' d
else unSetExtensionFlag' d
| (f', turn_on, d) <- impliedXFlags, f' == f ]
-- When you set f, set the ones it implies
-- NB: use setExtensionFlag recursively, in case the implied flags
-- implies further flags
unSetExtensionFlag' f dflags = xopt_unset dflags f
-- When you un-set f, however, we don't un-set the things it implies
-- (except for -fno-glasgow-exts, which is treated specially)
--------------------------
alterSettings :: (Settings -> Settings) -> DynFlags -> DynFlags
alterSettings f dflags = dflags { settings = f (settings dflags) }
--------------------------
setDumpFlag' :: DumpFlag -> DynP ()
setDumpFlag' dump_flag
= do upd (\dfs -> dopt_set dfs dump_flag)
when want_recomp forceRecompile
where -- Certain dumpy-things are really interested in what's going
-- on during recompilation checking, so in those cases we
-- don't want to turn it off.
want_recomp = dump_flag `notElem` [Opt_D_dump_if_trace,
Opt_D_dump_hi_diffs]
forceRecompile :: DynP ()
-- Whenver we -ddump, force recompilation (by switching off the
-- recompilation checker), else you don't see the dump! However,
-- don't switch it off in --make mode, else *everything* gets
-- recompiled which probably isn't what you want
forceRecompile = do dfs <- liftEwM getCmdLineState
when (force_recomp dfs) (setGeneralFlag Opt_ForceRecomp)
where
force_recomp dfs = isOneShot (ghcMode dfs)
setVerboseCore2Core :: DynP ()
setVerboseCore2Core = setDumpFlag' Opt_D_verbose_core2core
setVerbosity :: Maybe Int -> DynP ()
setVerbosity mb_n = upd (\dfs -> dfs{ verbosity = mb_n `orElse` 3 })
setDebugLevel :: Maybe Int -> DynP ()
setDebugLevel mb_n = upd (\dfs -> dfs{ debugLevel = mb_n `orElse` 2 })
data PkgConfRef
= GlobalPkgConf
| UserPkgConf
| PkgConfFile FilePath
addPkgConfRef :: PkgConfRef -> DynP ()
addPkgConfRef p = upd $ \s -> s { extraPkgConfs = (p:) . extraPkgConfs s }
removeUserPkgConf :: DynP ()
removeUserPkgConf = upd $ \s -> s { extraPkgConfs = filter isNotUser . extraPkgConfs s }
where
isNotUser UserPkgConf = False
isNotUser _ = True
removeGlobalPkgConf :: DynP ()
removeGlobalPkgConf = upd $ \s -> s { extraPkgConfs = filter isNotGlobal . extraPkgConfs s }
where
isNotGlobal GlobalPkgConf = False
isNotGlobal _ = True
clearPkgConf :: DynP ()
clearPkgConf = upd $ \s -> s { extraPkgConfs = const [] }
parsePackageFlag :: String -- the flag
-> ReadP PackageArg -- type of argument
-> String -- string to parse
-> PackageFlag
parsePackageFlag flag arg_parse str
= case filter ((=="").snd) (readP_to_S parse str) of
[(r, "")] -> r
_ -> throwGhcException $ CmdLineError ("Can't parse package flag: " ++ str)
where doc = flag ++ " " ++ str
parse = do
pkg_arg <- tok arg_parse
let mk_expose = ExposePackage doc pkg_arg
( do _ <- tok $ string "with"
fmap (mk_expose . ModRenaming True) parseRns
<++ fmap (mk_expose . ModRenaming False) parseRns
<++ return (mk_expose (ModRenaming True [])))
parseRns = do _ <- tok $ R.char '('
rns <- tok $ sepBy parseItem (tok $ R.char ',')
_ <- tok $ R.char ')'
return rns
parseItem = do
orig <- tok $ parseModuleName
(do _ <- tok $ string "as"
new <- tok $ parseModuleName
return (orig, new)
+++
return (orig, orig))
tok m = m >>= \x -> skipSpaces >> return x
exposePackage, exposePackageId, hidePackage,
exposePluginPackage, exposePluginPackageId,
ignorePackage,
trustPackage, distrustPackage :: String -> DynP ()
exposePackage p = upd (exposePackage' p)
exposePackageId p =
upd (\s -> s{ packageFlags =
parsePackageFlag "-package-id" parseUnitIdArg p : packageFlags s })
exposePluginPackage p =
upd (\s -> s{ pluginPackageFlags =
parsePackageFlag "-plugin-package" parsePackageArg p : pluginPackageFlags s })
exposePluginPackageId p =
upd (\s -> s{ pluginPackageFlags =
parsePackageFlag "-plugin-package-id" parseUnitIdArg p : pluginPackageFlags s })
hidePackage p =
upd (\s -> s{ packageFlags = HidePackage p : packageFlags s })
ignorePackage p =
upd (\s -> s{ ignorePackageFlags = IgnorePackage p : ignorePackageFlags s })
trustPackage p = exposePackage p >> -- both trust and distrust also expose a package
upd (\s -> s{ trustFlags = TrustPackage p : trustFlags s })
distrustPackage p = exposePackage p >>
upd (\s -> s{ trustFlags = DistrustPackage p : trustFlags s })
exposePackage' :: String -> DynFlags -> DynFlags
exposePackage' p dflags
= dflags { packageFlags =
parsePackageFlag "-package" parsePackageArg p : packageFlags dflags }
parsePackageArg :: ReadP PackageArg
parsePackageArg =
fmap PackageArg (munch1 (\c -> isAlphaNum c || c `elem` ":-_."))
parseUnitIdArg :: ReadP PackageArg
parseUnitIdArg =
fmap UnitIdArg parseUnitId
setUnitId :: String -> DynFlags -> DynFlags
setUnitId p d = d { thisInstalledUnitId = stringToInstalledUnitId p }
-- | Given a 'ModuleName' of a signature in the home library, find
-- out how it is instantiated. E.g., the canonical form of
-- A in @p[A=q[]:A]@ is @q[]:A@.
canonicalizeHomeModule :: DynFlags -> ModuleName -> Module
canonicalizeHomeModule dflags mod_name =
case lookup mod_name (thisUnitIdInsts dflags) of
Nothing -> mkModule (thisPackage dflags) mod_name
Just mod -> mod
-- -----------------------------------------------------------------------------
-- | Find the package environment (if one exists)
--
-- We interpret the package environment as a set of package flags; to be
-- specific, if we find a package environment file like
--
-- > clear-package-db
-- > global-package-db
-- > package-db blah/package.conf.d
-- > package-id id1
-- > package-id id2
--
-- we interpret this as
--
-- > [ -hide-all-packages
-- > , -clear-package-db
-- > , -global-package-db
-- > , -package-db blah/package.conf.d
-- > , -package-id id1
-- > , -package-id id2
-- > ]
--
-- There's also an older syntax alias for package-id, which is just an
-- unadorned package id
--
-- > id1
-- > id2
--
interpretPackageEnv :: DynFlags -> IO DynFlags
interpretPackageEnv dflags = do
mPkgEnv <- runMaybeT $ msum $ [
getCmdLineArg >>= \env -> msum [
probeEnvFile env
, probeEnvName env
, cmdLineError env
]
, getEnvVar >>= \env -> msum [
probeEnvFile env
, probeEnvName env
, envError env
]
, notIfHideAllPackages >> msum [
findLocalEnvFile >>= probeEnvFile
, probeEnvName defaultEnvName
]
]
case mPkgEnv of
Nothing ->
-- No environment found. Leave DynFlags unchanged.
return dflags
Just envfile -> do
content <- readFile envfile
let setFlags :: DynP ()
setFlags = do
setGeneralFlag Opt_HideAllPackages
parseEnvFile envfile content
(_, dflags') = runCmdLine (runEwM setFlags) dflags
return dflags'
where
-- Loading environments (by name or by location)
namedEnvPath :: String -> MaybeT IO FilePath
namedEnvPath name = do
appdir <- versionedAppDir dflags
return $ appdir </> "environments" </> name
probeEnvName :: String -> MaybeT IO FilePath
probeEnvName name = probeEnvFile =<< namedEnvPath name
probeEnvFile :: FilePath -> MaybeT IO FilePath
probeEnvFile path = do
guard =<< liftMaybeT (doesFileExist path)
return path
parseEnvFile :: FilePath -> String -> DynP ()
parseEnvFile envfile = mapM_ parseEntry . lines
where
parseEntry str = case words str of
("package-db": _) -> addPkgConfRef (PkgConfFile (envdir </> db))
-- relative package dbs are interpreted relative to the env file
where envdir = takeDirectory envfile
db = drop 11 str
["clear-package-db"] -> clearPkgConf
["global-package-db"] -> addPkgConfRef GlobalPkgConf
["user-package-db"] -> addPkgConfRef UserPkgConf
["package-id", pkgid] -> exposePackageId pkgid
(('-':'-':_):_) -> return () -- comments
-- and the original syntax introduced in 7.10:
[pkgid] -> exposePackageId pkgid
[] -> return ()
_ -> throwGhcException $ CmdLineError $
"Can't parse environment file entry: "
++ envfile ++ ": " ++ str
-- Various ways to define which environment to use
getCmdLineArg :: MaybeT IO String
getCmdLineArg = MaybeT $ return $ packageEnv dflags
getEnvVar :: MaybeT IO String
getEnvVar = do
mvar <- liftMaybeT $ try $ getEnv "GHC_ENVIRONMENT"
case mvar of
Right var -> return var
Left err -> if isDoesNotExistError err then mzero
else liftMaybeT $ throwIO err
notIfHideAllPackages :: MaybeT IO ()
notIfHideAllPackages =
guard (not (gopt Opt_HideAllPackages dflags))
defaultEnvName :: String
defaultEnvName = "default"
-- e.g. .ghc.environment.x86_64-linux-7.6.3
localEnvFileName :: FilePath
localEnvFileName = ".ghc.environment" <.> versionedFilePath dflags
-- Search for an env file, starting in the current dir and looking upwards.
-- Fail if we get to the users home dir or the filesystem root. That is,
-- we don't look for an env file in the user's home dir. The user-wide
-- env lives in ghc's versionedAppDir/environments/default
findLocalEnvFile :: MaybeT IO FilePath
findLocalEnvFile = do
curdir <- liftMaybeT getCurrentDirectory
homedir <- tryMaybeT getHomeDirectory
let probe dir | isDrive dir || dir == homedir
= mzero
probe dir = do
let file = dir </> localEnvFileName
exists <- liftMaybeT (doesFileExist file)
if exists
then return file
else probe (takeDirectory dir)
probe curdir
-- Error reporting
cmdLineError :: String -> MaybeT IO a
cmdLineError env = liftMaybeT . throwGhcExceptionIO . CmdLineError $
"Package environment " ++ show env ++ " not found"
envError :: String -> MaybeT IO a
envError env = liftMaybeT . throwGhcExceptionIO . CmdLineError $
"Package environment "
++ show env
++ " (specified in GHC_ENVIRIONMENT) not found"
-- If we're linking a binary, then only targets that produce object
-- code are allowed (requests for other target types are ignored).
setTarget :: HscTarget -> DynP ()
setTarget l = setTargetWithPlatform (const l)
setTargetWithPlatform :: (Platform -> HscTarget) -> DynP ()
setTargetWithPlatform f = upd set
where
set dfs = let l = f (targetPlatform dfs)
in if ghcLink dfs /= LinkBinary || isObjectTarget l
then dfs{ hscTarget = l }
else dfs
-- Changes the target only if we're compiling object code. This is
-- used by -fasm and -fllvm, which switch from one to the other, but
-- not from bytecode to object-code. The idea is that -fasm/-fllvm
-- can be safely used in an OPTIONS_GHC pragma.
setObjTarget :: HscTarget -> DynP ()
setObjTarget l = updM set
where
set dflags
| isObjectTarget (hscTarget dflags)
= return $ dflags { hscTarget = l }
| otherwise = return dflags
setOptLevel :: Int -> DynFlags -> DynP DynFlags
setOptLevel n dflags = return (updOptLevel n dflags)
checkOptLevel :: Int -> DynFlags -> Either String DynFlags
checkOptLevel n dflags
| hscTarget dflags == HscInterpreted && n > 0
= Left "-O conflicts with --interactive; -O ignored."
| otherwise
= Right dflags
-- -Odph is equivalent to
--
-- -O2 optimise as much as possible
-- -fmax-simplifier-iterations20 this is necessary sometimes
-- -fsimplifier-phases=3 we use an additional simplifier phase for fusion
--
setDPHOpt :: DynFlags -> DynP DynFlags
setDPHOpt dflags = setOptLevel 2 (dflags { maxSimplIterations = 20
, simplPhases = 3
})
setMainIs :: String -> DynP ()
setMainIs arg
| not (null main_fn) && isLower (head main_fn)
-- The arg looked like "Foo.Bar.baz"
= upd $ \d -> d { mainFunIs = Just main_fn,
mainModIs = mkModule mainUnitId (mkModuleName main_mod) }
| isUpper (head arg) -- The arg looked like "Foo" or "Foo.Bar"
= upd $ \d -> d { mainModIs = mkModule mainUnitId (mkModuleName arg) }
| otherwise -- The arg looked like "baz"
= upd $ \d -> d { mainFunIs = Just arg }
where
(main_mod, main_fn) = splitLongestPrefix arg (== '.')
addLdInputs :: Option -> DynFlags -> DynFlags
addLdInputs p dflags = dflags{ldInputs = ldInputs dflags ++ [p]}
-----------------------------------------------------------------------------
-- Paths & Libraries
addImportPath, addLibraryPath, addIncludePath, addFrameworkPath :: FilePath -> DynP ()
-- -i on its own deletes the import paths
addImportPath "" = upd (\s -> s{importPaths = []})
addImportPath p = upd (\s -> s{importPaths = importPaths s ++ splitPathList p})
addLibraryPath p =
upd (\s -> s{libraryPaths = libraryPaths s ++ splitPathList p})
addIncludePath p =
upd (\s -> s{includePaths = includePaths s ++ splitPathList p})
addFrameworkPath p =
upd (\s -> s{frameworkPaths = frameworkPaths s ++ splitPathList p})
#ifndef mingw32_TARGET_OS
split_marker :: Char
split_marker = ':' -- not configurable (ToDo)
#endif
splitPathList :: String -> [String]
splitPathList s = filter notNull (splitUp s)
-- empty paths are ignored: there might be a trailing
-- ':' in the initial list, for example. Empty paths can
-- cause confusion when they are translated into -I options
-- for passing to gcc.
where
#ifndef mingw32_TARGET_OS
splitUp xs = split split_marker xs
#else
-- Windows: 'hybrid' support for DOS-style paths in directory lists.
--
-- That is, if "foo:bar:baz" is used, this interpreted as
-- consisting of three entries, 'foo', 'bar', 'baz'.
-- However, with "c:/foo:c:\\foo;x:/bar", this is interpreted
-- as 3 elts, "c:/foo", "c:\\foo", "x:/bar"
--
-- Notice that no attempt is made to fully replace the 'standard'
-- split marker ':' with the Windows / DOS one, ';'. The reason being
-- that this will cause too much breakage for users & ':' will
-- work fine even with DOS paths, if you're not insisting on being silly.
-- So, use either.
splitUp [] = []
splitUp (x:':':div:xs) | div `elem` dir_markers
= ((x:':':div:p): splitUp rs)
where
(p,rs) = findNextPath xs
-- we used to check for existence of the path here, but that
-- required the IO monad to be threaded through the command-line
-- parser which is quite inconvenient. The
splitUp xs = cons p (splitUp rs)
where
(p,rs) = findNextPath xs
cons "" xs = xs
cons x xs = x:xs
-- will be called either when we've consumed nought or the
-- "<Drive>:/" part of a DOS path, so splitting is just a Q of
-- finding the next split marker.
findNextPath xs =
case break (`elem` split_markers) xs of
(p, _:ds) -> (p, ds)
(p, xs) -> (p, xs)
split_markers :: [Char]
split_markers = [':', ';']
dir_markers :: [Char]
dir_markers = ['/', '\\']
#endif
-- -----------------------------------------------------------------------------
-- tmpDir, where we store temporary files.
setTmpDir :: FilePath -> DynFlags -> DynFlags
setTmpDir dir = alterSettings (\s -> s { sTmpDir = normalise dir })
-- we used to fix /cygdrive/c/.. on Windows, but this doesn't
-- seem necessary now --SDM 7/2/2008
-----------------------------------------------------------------------------
-- RTS opts
setRtsOpts :: String -> DynP ()
setRtsOpts arg = upd $ \ d -> d {rtsOpts = Just arg}
setRtsOptsEnabled :: RtsOptsEnabled -> DynP ()
setRtsOptsEnabled arg = upd $ \ d -> d {rtsOptsEnabled = arg}
-----------------------------------------------------------------------------
-- Hpc stuff
setOptHpcDir :: String -> DynP ()
setOptHpcDir arg = upd $ \ d -> d {hpcDir = arg}
-----------------------------------------------------------------------------
-- Via-C compilation stuff
-- There are some options that we need to pass to gcc when compiling
-- Haskell code via C, but are only supported by recent versions of
-- gcc. The configure script decides which of these options we need,
-- and puts them in the "settings" file in $topdir. The advantage of
-- having these in a separate file is that the file can be created at
-- install-time depending on the available gcc version, and even
-- re-generated later if gcc is upgraded.
--
-- The options below are not dependent on the version of gcc, only the
-- platform.
picCCOpts :: DynFlags -> [String]
picCCOpts dflags
= case platformOS (targetPlatform dflags) of
OSDarwin
-- Apple prefers to do things the other way round.
-- PIC is on by default.
-- -mdynamic-no-pic:
-- Turn off PIC code generation.
-- -fno-common:
-- Don't generate "common" symbols - these are unwanted
-- in dynamic libraries.
| gopt Opt_PIC dflags -> ["-fno-common", "-U__PIC__", "-D__PIC__"]
| otherwise -> ["-mdynamic-no-pic"]
OSMinGW32 -- no -fPIC for Windows
| gopt Opt_PIC dflags -> ["-U__PIC__", "-D__PIC__"]
| otherwise -> []
_
-- we need -fPIC for C files when we are compiling with -dynamic,
-- otherwise things like stub.c files don't get compiled
-- correctly. They need to reference data in the Haskell
-- objects, but can't without -fPIC. See
-- http://ghc.haskell.org/trac/ghc/wiki/Commentary/PositionIndependentCode
| gopt Opt_PIC dflags || WayDyn `elem` ways dflags ->
["-fPIC", "-U__PIC__", "-D__PIC__"]
| otherwise -> []
picPOpts :: DynFlags -> [String]
picPOpts dflags
| gopt Opt_PIC dflags = ["-U__PIC__", "-D__PIC__"]
| otherwise = []
-- -----------------------------------------------------------------------------
-- Splitting
can_split :: Bool
can_split = cSupportsSplitObjs == "YES"
-- -----------------------------------------------------------------------------
-- Compiler Info
compilerInfo :: DynFlags -> [(String, String)]
compilerInfo dflags
= -- We always make "Project name" be first to keep parsing in
-- other languages simple, i.e. when looking for other fields,
-- you don't have to worry whether there is a leading '[' or not
("Project name", cProjectName)
-- Next come the settings, so anything else can be overridden
-- in the settings file (as "lookup" uses the first match for the
-- key)
: rawSettings dflags
++ [("Project version", projectVersion dflags),
("Project Git commit id", cProjectGitCommitId),
("Booter version", cBooterVersion),
("Stage", cStage),
("Build platform", cBuildPlatformString),
("Host platform", cHostPlatformString),
("Target platform", cTargetPlatformString),
("Have interpreter", cGhcWithInterpreter),
("Object splitting supported", cSupportsSplitObjs),
("Have native code generator", cGhcWithNativeCodeGen),
("Support SMP", cGhcWithSMP),
("Tables next to code", cGhcEnableTablesNextToCode),
("RTS ways", cGhcRTSWays),
("RTS expects libdw", showBool cGhcRtsWithLibdw),
-- Whether or not we support @-dynamic-too@
("Support dynamic-too", showBool $ not isWindows),
-- Whether or not we support the @-j@ flag with @--make@.
("Support parallel --make", "YES"),
-- Whether or not we support "Foo from foo-0.1-XXX:Foo" syntax in
-- installed package info.
("Support reexported-modules", "YES"),
-- Whether or not we support extended @-package foo (Foo)@ syntax.
("Support thinning and renaming package flags", "YES"),
-- Whether or not we support Backpack.
("Support Backpack", "YES"),
-- If true, we require that the 'id' field in installed package info
-- match what is passed to the @-this-unit-id@ flag for modules
-- built in it
("Requires unified installed package IDs", "YES"),
-- Whether or not we support the @-this-package-key@ flag. Prefer
-- "Uses unit IDs" over it.
("Uses package keys", "YES"),
-- Whether or not we support the @-this-unit-id@ flag
("Uses unit IDs", "YES"),
-- Whether or not GHC compiles libraries as dynamic by default
("Dynamic by default", showBool $ dYNAMIC_BY_DEFAULT dflags),
-- Whether or not GHC was compiled using -dynamic
("GHC Dynamic", showBool dynamicGhc),
-- Whether or not GHC was compiled using -prof
("GHC Profiled", showBool rtsIsProfiled),
("Leading underscore", cLeadingUnderscore),
("Debug on", show debugIsOn),
("LibDir", topDir dflags),
-- The path of the global package database used by GHC
("Global Package DB", systemPackageConfig dflags)
]
where
showBool True = "YES"
showBool False = "NO"
isWindows = platformOS (targetPlatform dflags) == OSMinGW32
-- Produced by deriveConstants
#include "GHCConstantsHaskellWrappers.hs"
bLOCK_SIZE_W :: DynFlags -> Int
bLOCK_SIZE_W dflags = bLOCK_SIZE dflags `quot` wORD_SIZE dflags
wORD_SIZE_IN_BITS :: DynFlags -> Int
wORD_SIZE_IN_BITS dflags = wORD_SIZE dflags * 8
tAG_MASK :: DynFlags -> Int
tAG_MASK dflags = (1 `shiftL` tAG_BITS dflags) - 1
mAX_PTR_TAG :: DynFlags -> Int
mAX_PTR_TAG = tAG_MASK
-- Might be worth caching these in targetPlatform?
tARGET_MIN_INT, tARGET_MAX_INT, tARGET_MAX_WORD :: DynFlags -> Integer
tARGET_MIN_INT dflags
= case platformWordSize (targetPlatform dflags) of
4 -> toInteger (minBound :: Int32)
8 -> toInteger (minBound :: Int64)
w -> panic ("tARGET_MIN_INT: Unknown platformWordSize: " ++ show w)
tARGET_MAX_INT dflags
= case platformWordSize (targetPlatform dflags) of
4 -> toInteger (maxBound :: Int32)
8 -> toInteger (maxBound :: Int64)
w -> panic ("tARGET_MAX_INT: Unknown platformWordSize: " ++ show w)
tARGET_MAX_WORD dflags
= case platformWordSize (targetPlatform dflags) of
4 -> toInteger (maxBound :: Word32)
8 -> toInteger (maxBound :: Word64)
w -> panic ("tARGET_MAX_WORD: Unknown platformWordSize: " ++ show w)
{- -----------------------------------------------------------------------------
Note [DynFlags consistency]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are a number of number of DynFlags configurations which either
do not make sense or lead to unimplemented or buggy codepaths in the
compiler. makeDynFlagsConsistent is responsible for verifying the validity
of a set of DynFlags, fixing any issues, and reporting them back to the
caller.
GHCi and -O
---------------
When using optimization, the compiler can introduce several things
(such as unboxed tuples) into the intermediate code, which GHCi later
chokes on since the bytecode interpreter can't handle this (and while
this is arguably a bug these aren't handled, there are no plans to fix
it.)
While the driver pipeline always checks for this particular erroneous
combination when parsing flags, we also need to check when we update
the flags; this is because API clients may parse flags but update the
DynFlags afterwords, before finally running code inside a session (see
T10052 and #10052).
-}
-- | Resolve any internal inconsistencies in a set of 'DynFlags'.
-- Returns the consistent 'DynFlags' as well as a list of warnings
-- to report to the user.
makeDynFlagsConsistent :: DynFlags -> (DynFlags, [Located String])
-- Whenever makeDynFlagsConsistent does anything, it starts over, to
-- ensure that a later change doesn't invalidate an earlier check.
-- Be careful not to introduce potential loops!
makeDynFlagsConsistent dflags
-- Disable -dynamic-too on Windows (#8228, #7134, #5987)
| os == OSMinGW32 && gopt Opt_BuildDynamicToo dflags
= let dflags' = gopt_unset dflags Opt_BuildDynamicToo
warn = "-dynamic-too is not supported on Windows"
in loop dflags' warn
| hscTarget dflags == HscC &&
not (platformUnregisterised (targetPlatform dflags))
= if cGhcWithNativeCodeGen == "YES"
then let dflags' = dflags { hscTarget = HscAsm }
warn = "Compiler not unregisterised, so using native code generator rather than compiling via C"
in loop dflags' warn
else let dflags' = dflags { hscTarget = HscLlvm }
warn = "Compiler not unregisterised, so using LLVM rather than compiling via C"
in loop dflags' warn
| gopt Opt_Hpc dflags && hscTarget dflags == HscInterpreted
= let dflags' = gopt_unset dflags Opt_Hpc
warn = "Hpc can't be used with byte-code interpreter. Ignoring -fhpc."
in loop dflags' warn
| hscTarget dflags == HscAsm &&
platformUnregisterised (targetPlatform dflags)
= loop (dflags { hscTarget = HscC })
"Compiler unregisterised, so compiling via C"
| hscTarget dflags == HscAsm &&
cGhcWithNativeCodeGen /= "YES"
= let dflags' = dflags { hscTarget = HscLlvm }
warn = "No native code generator, so using LLVM"
in loop dflags' warn
| hscTarget dflags == HscLlvm &&
not ((arch == ArchX86_64) && (os == OSLinux || os == OSDarwin || os == OSFreeBSD)) &&
not ((isARM arch) && (os == OSLinux)) &&
(gopt Opt_PIC dflags || WayDyn `elem` ways dflags)
= if cGhcWithNativeCodeGen == "YES"
then let dflags' = dflags { hscTarget = HscAsm }
warn = "Using native code generator rather than LLVM, as LLVM is incompatible with -fPIC and -dynamic on this platform"
in loop dflags' warn
else throwGhcException $ CmdLineError "Can't use -fPIC or -dynamic on this platform"
| os == OSDarwin &&
arch == ArchX86_64 &&
not (gopt Opt_PIC dflags)
= loop (gopt_set dflags Opt_PIC)
"Enabling -fPIC as it is always on for this platform"
| Left err <- checkOptLevel (optLevel dflags) dflags
= loop (updOptLevel 0 dflags) err
| LinkInMemory <- ghcLink dflags
, not (gopt Opt_ExternalInterpreter dflags)
, rtsIsProfiled
, isObjectTarget (hscTarget dflags)
, WayProf `notElem` ways dflags
= loop dflags{ways = WayProf : ways dflags}
"Enabling -prof, because -fobject-code is enabled and GHCi is profiled"
| otherwise = (dflags, [])
where loc = mkGeneralSrcSpan (fsLit "when making flags consistent")
loop updated_dflags warning
= case makeDynFlagsConsistent updated_dflags of
(dflags', ws) -> (dflags', L loc warning : ws)
platform = targetPlatform dflags
arch = platformArch platform
os = platformOS platform
--------------------------------------------------------------------------
-- Do not use unsafeGlobalDynFlags!
--
-- unsafeGlobalDynFlags is a hack, necessary because we need to be able
-- to show SDocs when tracing, but we don't always have DynFlags
-- available.
--
-- Do not use it if you can help it. You may get the wrong value, or this
-- panic!
-- | This is the value that 'unsafeGlobalDynFlags' takes before it is
-- initialized.
defaultGlobalDynFlags :: DynFlags
defaultGlobalDynFlags =
(defaultDynFlags settings) { verbosity = 2 }
where
settings = panic "v_unsafeGlobalDynFlags: not initialised"
GLOBAL_VAR(v_unsafeGlobalDynFlags, defaultGlobalDynFlags, DynFlags)
unsafeGlobalDynFlags :: DynFlags
unsafeGlobalDynFlags = unsafePerformIO $ readIORef v_unsafeGlobalDynFlags
setUnsafeGlobalDynFlags :: DynFlags -> IO ()
setUnsafeGlobalDynFlags = writeIORef v_unsafeGlobalDynFlags
-- -----------------------------------------------------------------------------
-- SSE and AVX
-- TODO: Instead of using a separate predicate (i.e. isSse2Enabled) to
-- check if SSE is enabled, we might have x86-64 imply the -msse2
-- flag.
data SseVersion = SSE1
| SSE2
| SSE3
| SSE4
| SSE42
deriving (Eq, Ord)
isSseEnabled :: DynFlags -> Bool
isSseEnabled dflags = case platformArch (targetPlatform dflags) of
ArchX86_64 -> True
ArchX86 -> sseVersion dflags >= Just SSE1
_ -> False
isSse2Enabled :: DynFlags -> Bool
isSse2Enabled dflags = case platformArch (targetPlatform dflags) of
ArchX86_64 -> -- SSE2 is fixed on for x86_64. It would be
-- possible to make it optional, but we'd need to
-- fix at least the foreign call code where the
-- calling convention specifies the use of xmm regs,
-- and possibly other places.
True
ArchX86 -> sseVersion dflags >= Just SSE2
_ -> False
isSse4_2Enabled :: DynFlags -> Bool
isSse4_2Enabled dflags = sseVersion dflags >= Just SSE42
isAvxEnabled :: DynFlags -> Bool
isAvxEnabled dflags = avx dflags || avx2 dflags || avx512f dflags
isAvx2Enabled :: DynFlags -> Bool
isAvx2Enabled dflags = avx2 dflags || avx512f dflags
isAvx512cdEnabled :: DynFlags -> Bool
isAvx512cdEnabled dflags = avx512cd dflags
isAvx512erEnabled :: DynFlags -> Bool
isAvx512erEnabled dflags = avx512er dflags
isAvx512fEnabled :: DynFlags -> Bool
isAvx512fEnabled dflags = avx512f dflags
isAvx512pfEnabled :: DynFlags -> Bool
isAvx512pfEnabled dflags = avx512pf dflags
-- -----------------------------------------------------------------------------
-- Linker/compiler information
-- LinkerInfo contains any extra options needed by the system linker.
data LinkerInfo
= GnuLD [Option]
| GnuGold [Option]
| DarwinLD [Option]
| SolarisLD [Option]
| AixLD [Option]
| UnknownLD
deriving Eq
-- CompilerInfo tells us which C compiler we're using
data CompilerInfo
= GCC
| Clang
| AppleClang
| AppleClang51
| UnknownCC
deriving Eq
-- -----------------------------------------------------------------------------
-- RTS hooks
-- Convert sizes like "3.5M" into integers
decodeSize :: String -> Integer
decodeSize str
| c == "" = truncate n
| c == "K" || c == "k" = truncate (n * 1000)
| c == "M" || c == "m" = truncate (n * 1000 * 1000)
| c == "G" || c == "g" = truncate (n * 1000 * 1000 * 1000)
| otherwise = throwGhcException (CmdLineError ("can't decode size: " ++ str))
where (m, c) = span pred str
n = readRational m
pred c = isDigit c || c == '.'
foreign import ccall unsafe "setHeapSize" setHeapSize :: Int -> IO ()
foreign import ccall unsafe "enableTimingStats" enableTimingStats :: IO ()
| mettekou/ghc | compiler/main/DynFlags.hs | bsd-3-clause | 220,181 | 1 | 41 | 58,601 | 39,376 | 21,550 | 17,826 | -1 | -1 |
{-# LANGUAGE CPP, FlexibleContexts, ScopedTypeVariables #-}
{-# LANGUAGE ConstraintKinds #-} -- for LFScan
{-# OPTIONS_GHC -Wall #-}
-- {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP
-- {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP
----------------------------------------------------------------------
-- |
-- Module : LambdaCCC.RadixSort
-- Copyright : (c) 2014 Tabula, Inc.
--
-- Maintainer : conal@tabula.com
-- Stability : experimental
--
-- Parallel radix sort
----------------------------------------------------------------------
module LambdaCCC.RadixSort where
-- TODO: explicit exports
import Prelude hiding (sum)
import Data.Foldable (Foldable,sum,toList)
import Control.Applicative (Applicative(..),liftA2)
import Control.Arrow ((***),first)
import TypeUnary.Nat -- (IsNat(..))
import TypeUnary.Vec (Vec,vec1,(<+>))
import Circat.RTree
import Circat.Scan (LScan(..),LFScan,lsums)
type Bits n = Vec n Bool
oneTree :: (IsNat n, Num b) => Bits n -> Tree n b
oneTree v = update v (const 1) (pure 0)
histogramStep :: (IsNat n, Num b) =>
RTree n b -> Bits n -> RTree n b
histogramStep w v = w + oneTree v
histogramFold :: (Foldable f, Functor f, IsNat n, Num b) =>
f (Bits n) -> RTree n b
histogramFold = sum . fmap oneTree
histogramScan :: (LFScan f, IsNat n, Num b) =>
f (Bits n) -> (f (RTree n b), RTree n b)
histogramScan = lsums . fmap oneTree
#if 0
oneTree :: Bits n -> Tree n b
fmap oneTree :: f (Bits n) -> f (Tree n b)
lsums :: f (Tree n b) -> (f (Tree n b), b)
#endif
positions :: (Applicative f, LScan f, LScan (RTree n), IsNat n, Num b) =>
f (Bits n) -> f b
positions vs = liftA2 combine partials vs
where
(partials,hist) = histogramScan vs
(starts,_) = lsums hist
combine partial v = (starts + partial) ! v
#if 0
vs :: f (Bits n)
partials :: f (RTree n b)
hist :: RTree n b
starts :: RTree n b
combine :: RTree n b -> Bits n -> b
#endif
#if 0
-- Variation: (starts + partial) ! v --> (starts ! v) + (partial ! v).
-- I get somewhat larger circuits.
positions' :: (Applicative f, LScan f, LScan (RTree n), IsNat n, Num b) =>
f (Bits n) -> f b
positions' vs = liftA2 combine partials vs
where
(partials,hist) = histogramScan vs
(starts,_) = lsums hist
combine partial v = (starts ! v) + (partial ! v)
#endif
-- TODO: Generalize to other tries
{--------------------------------------------------------------------
Tests
--------------------------------------------------------------------}
-- Test histogramFold
testHF :: (Functor f, Foldable f, IsNat n, Num b) =>
f (Bits n) -> [b]
testHF = toList . histogramFold
-- Test histogramScan
testHS :: (LFScan f, Foldable f, IsNat n, Num b) =>
f (Bits n) -> ([[b]], [b])
testHS = first toList . (fmap toList *** toList) . histogramScan
-- Test positions
testPs :: (Foldable f, Applicative f, LScan f, LScan (RTree n), IsNat n, Num b) =>
f (Bits n) -> [b]
testPs = toList . positions
-- testSort vs =
f,t :: Bits N1
f = vec1 False
t = vec1 True
l1 :: [Bits N1]
l1 = [t,f,f,t,f]
t1 :: Tree N2 (Bits N1)
t1 = tree2 t f f t
-- > testHF l1
-- [3,2]
-- > testHF t1
-- [2,2]
-- > testHF l2
-- [3,2,2,1]
--
-- > testHS t1
-- ([[0,0],[0,1],[1,1],[2,1]],[2,2])
--
-- > testPs t1
-- [2,0,1,3]
ff,ft,tf,tt :: Bits N2
[ff,ft,tf,tt] = liftA2 (<+>) [f,t] [f,t]
l2 :: [Bits N2]
l2 = [tf,ft,ff,tt,tf,ff,ff,ft]
t2 :: Tree N3 (Bits N2)
t2 = tree3 tf ft ff tt tf ff ff ft
-- > testHS t1
-- ([[0,0],[0,1],[1,1],[2,1]],[2,2])
--
-- > testHS t2
-- ([[0,0,0,0],[0,0,1,0],[0,1,1,0],[1,1,1,0],[1,1,1,1],[1,1,2,1],[2,1,2,1],[3,1,2,1]],[3,2,2,1])
--
-- > testPs t2
-- [5,3,0,7,6,1,2,4]
| conal/lambda-ccc | src/LambdaCCC/RadixSort.hs | bsd-3-clause | 3,778 | 52 | 22 | 842 | 1,304 | 708 | 596 | 52 | 1 |
{-# LANGUAGE NPlusKPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Chap01 where
import Prelude hiding (last, foldr, foldl, take, drop, zip, concat, inits, reverse)
-- | Ex 1.1
-- solution 1
k1 x = if k1 x == 0 then 1 else 0
-- solution 2
k2 x = not (k2 x)
-- solution 3
k3 x = k3 x + 1
-- solution 4
k4 x = k4 x ++ [x]
-- | Ex 1.2
m (x, y) = if x == y
then y + 1
else m (x, m (x - 1, y + 1))
-- solution 1
m1 (x, y) = x + 1
-- solution 2
m2 (x, y) = if x >= y
then x + 1
else y - 1
-- | Ex 1.3
data Nat = Zero
| Succ Nat
deriving (Show, Eq)
foldn (c, f) = u
where u Zero = c
u (Succ n) = f (u n)
data NatPlus = One
| Next NatPlus
deriving (Show, Eq)
foldnplus (c, f) = u
where u One = c
u (Next n) = f (u n)
f :: NatPlus -> Nat
f = foldnplus (Zero, Succ)
g :: Nat -> NatPlus
g = foldn (One, Next)
-- test
testfgIsId n = n == (f . g) n
testgfIsId n = n == (g . f) n
-- | Ex 1.4
plus = foldn (id, (Succ .))
outl (x, _) = x
sqr = f . foldn (c, h)
where f = outl
c = (zero, zero)
h (s, dn) = (plus (plus s dn) one, plus dn two)
(zero, one, two) = (Zero, Succ zero, Succ one)
-- | Ex 1.5
last :: (Nat -> Bool) -> Nat -> Nat
last p = f . foldn (c, h)
where f = outl
c = (Zero, Succ Zero)
h (l, n) = if p n then (n, Succ n) else (l, Succ n)
test_1_5 = map (fromNat . last evenp . toNat) [0..100]
where
evenp, oddp :: Nat -> Bool
evenp Zero = True
evenp (Succ n) = oddp n
oddp Zero = False
oddp (Succ n) = evenp n
toNat 0 = Zero
toNat n = Succ (toNat (n-1))
fromNat Zero = 0
fromNat (Succ n) = 1 + fromNat n
-- | Ex 1.6
foldn' :: (a, a -> a) -> Int -> a
foldn' (c, f) = u
where u 0 = c
u (n+1) = f (u n)
{--
cack :: Nat -> Nat -> Nat
cack = foldn (Succ, swap f)
where f = foldn (ap1, ap2)
ap1 g = g (Succ Zero)
ap2 g h = h (g h)
swap f a b = f b a
--}
{--
-- step 0
ack (0, y) = y + 1
ack (x+1, 0) = ack (x, 1)
ack (x+1, y+1) = ack (x, ack (x+1, y))
--}
{--
-- step 1
cack 0 y = y + 1
cack (x+1) 0 = cack x 1
cack (x+1) (y+1) = cack x (cack (x+1) y)
--}
{--
-- step 2
f y = y + 1 -- f = cack 0
g 0 = h 1 -- g = cack (x+1), h = cack x
g (y+1) = h (g y) -- same as above
--}
{--
-- step 3
f = (+1)
g = foldn' (h 1, h)
--}
{--
-- step 4
cack 0 = (+1)
cack (x+1) = foldn' (cack x 1, cack x)
--}
{--
-- step 5
cack 0 = (+1)
cack (x+1) = k (cack x)
where k v = foldn' (v 1, v)
--}
-- step 6
cack = foldn' ((+1), k)
where k v = foldn' (v 1, v)
cack' = foldn' ((+1), swap f)
where f = foldn' (ap1, ap2)
ap1 g = g 1
ap2 g h = h (g h)
swap f a b = f b a
-- | Ex 1.7
data ListR a = Nil
| Cons (a, ListR a)
deriving Show
foldr :: (b, (a, b) -> b) -> ListR a -> b
foldr (c, f) = u
where u Nil = c
u (Cons (a, xs)) = f (a, u xs)
-- [nil, cons]
-- [a] <------------------------- 1 + a * [a]
-- | |
-- | u = (|nil, cons . (f * id)|) | 1 + 1 * u
-- | |
-- v v
-- [b] <----- 1 + b * [b] ------ 1 + a * [b]
-- [nil, cons] 1 + f * id
-- <------------------------
-- [nil, cons] . (1 + f * id) == [nil, cons . (f * id)
--
listr :: (a -> b) -> ListR a -> ListR b
listr f = foldr (Nil, Cons . cross (f, id))
data ListL a = SNil
| Snoc (ListL a, a)
deriving Show
foldl :: (b, (b, a) -> b) -> ListL a -> b
foldl (c, f) = u
where u SNil = c
u (Snoc (xs, a)) = f (u xs, a)
-- [snil, snoc]
-- [a] <------------------------- 1 + [a] * a
-- | |
-- | u = (|snil, snoc . (id * f)|) | 1 + u * 1
-- | |
-- v v
-- [b] <----- 1 + [b] * b ------ 1 + [b] * a
-- [snil, snoc] 1 + id * f
-- <------------------------
-- [snil, snoc] . (1 + id * f) == [snil, snoc . (id * f)
listl :: (a -> b) -> ListL a -> ListL b
listl f = foldl (SNil, g)
where g (xs, x) = Snoc (xs, f x)
{--
convert :: ListL a -> ListR a
convert SNil = Nil
convert (Snoc (xs, a)) = snocr (convert xs, a)
--}
convert :: ListL a -> ListR a
convert = foldl (Nil, snocr)
{--
snocr :: (ListR a, a) -> ListR a
snocr (Nil, b) = Cons (b, Nil)
snocr (Cons (a, x), b) = Cons (a, snocr (x, b))
--}
snocr :: (ListR a, a) -> ListR a
snocr = uncurry . flip $ snocr'
where
snocr' :: a -> ListR a -> ListR a
snocr' b = foldr (c, f)
where c = Cons (b, Nil)
f = Cons
{--
snocr' b Nil = Cons (b, Nil)
snocr' b (Cons (a, x)) = Cons (a, snocr' b x)
--}
-- | Ex 1.8
{--
-- step 1
catconv SNil = id
catconv (Snoc (xs, a)) = \ys -> catconv xs (Cons (a, ys))
--}
{--
-- step 2
-- h (catconv xs, a) === \ys -> catconv xs (Cons (a, ys))
-- h (catconv xs, a) ys === catconv xs (Cons (a, ys))
-- h (f, a) ys === f (Cons (a, ys)) where f = catconv xs
catconv SNil = id
catconv (Snoc (xs, a)) = h (catconv xs, a)
where h (f, a) ys = f (Cons (a, ys))
--}
-- step 3
catconv :: ListL a -> ListR a -> ListR a
catconv = foldl (c, h)
where c = id
h (f, a) ys = f (Cons (a, ys))
convert' :: ListL a -> ListR a
convert' x = catconv x Nil
-- | Ex 1.9
{--
nil ++ ys == ys
i) ys = nil の場合:
nil ++ nil = nil
ii) ys = Snoc (ys', y') の場合:
nil ++ Snoc (ys', y')
= {- ++ の定義 -}
Snoc (nil ++ ys', y')
= {- 帰納法 -}
Snoc (ys', y')
--}
-- | Ex 1.10
cat :: ListR a -> (ListR a -> ListR a)
cat Nil = id
cat (Cons (x, xs)) = \ys -> Cons (x, cat xs ys)
-- | Ex 1.11
-- foldl over cons-list
{--
-- step 1
foldL :: (b, (b, a) -> b) -> ListR a -> b
foldL (c, f) Nil = c
foldL (c, f) (Cons (x, xs)) = foldL (f (c, x), f) xs
--}
-- [a] <----------------------- 1 + a * [a]
-- | |
-- |u = (|id, g|) | 1 + 1 * (|id, g|)
-- | |
-- v v
-- b^b <----------------------- 1 + a * b^b
--
foldL :: forall a b. (b, (b, a) -> b) -> ListR a -> b
foldL (c, f) x = foldr (id, g) x c
where
g :: (a, b -> b) -> b -> b
g (a, h) c = h (f (c, a))
-- | Ex 1.12
take :: Nat -> ListR a -> ListR a
take n x = foldr (c, h) x n
where
c :: Nat -> ListR a
c a = Nil
h :: (a, Nat -> ListR a) -> Nat -> ListR a
h (a, f) Zero = Nil
h (a, f) (Succ n) = Cons (a, f n)
drop :: Nat -> ListR a -> ListR a
drop n x = foldr (d, k) x n
where
d :: Nat -> ListR a
d a = Nil
k :: (a, Nat -> ListR a) -> Nat -> ListR a
k (a, f) Zero = Cons (a, f Zero)
k (a, f) (Succ n) = f n
-- | Ex 1.13
data GTree a = Node (a, ListL (GTree a)) deriving Show
-- Node
-- GTree A <----------------- A * [Gtree A]
-- | |
-- | (|f|) | F(|f|)
-- | |
-- v v
-- B <----------------- A * [B]
-- f
foldg :: ((a, ListL b) -> b) -> GTree a -> b
foldg f = u
where u (Node (x, ts)) = f (x, listl u ts)
size :: GTree a -> Integer
size = foldg g
where g (x, ts) = 1 + sum ts
sum :: ListL Integer -> Integer
sum = foldl (c, f)
where c = 0
f = uncurry (+)
depth :: GTree a -> Integer
depth = foldg g
where g (a, SNil) = 0
g (a, Snoc (xs, x)) = 1 + maxlist xs
maxlist :: ListL Integer -> Integer
maxlist = foldl (c, f)
where c = 0
f = uncurry max
-- | Ex 1.14
data Tree a = Tip a
| Bin (Tree a, Tree a)
deriving Show
foldt :: (a -> b, (b, b) -> b) -> Tree a -> b
foldt (f, g) = u
where u (Tip a) = f a
u (Bin (l, r)) = g (u l, u r)
tree :: (a -> b) -> Tree a -> Tree b
tree f = foldt (Tip . f, Bin)
curryT :: GTree a -> Tree a
curryT = foldg h
where h (x, ts) = foldl (Tip x, Bin) ts
uncurryT :: Tree a -> GTree a
uncurryT = foldt (f, g)
where f a = Node (a, SNil)
g (Node (a, xs), ys) = Node (a, Snoc (xs, ys))
-- f (g (a, b), h (c), d)
test_1_14 = Node ('f', ghd)
where a = Node ('a', SNil)
b = Node ('b', SNil)
c = Node ('c', SNil)
d = Node ('d', SNil)
ab = Snoc (Snoc (SNil, a), b)
gab = Node ('g', ab)
hc = Node ('h', Snoc (SNil, c))
ghd = Snoc (Snoc (Snoc (SNil, gab), hc), d)
-- | Ex 1.15
-- [a] <----------------------- 1 + a * [a]
-- | |
-- |u = (|id, g|) | 1 + 1 * (|id, g|)
-- | |
-- v v
-- [(a,b)]^[b] <--------------- 1 + a * [(a,b)]^[b]
--
zip :: ListR a -> ListR b -> ListR (a, b)
zip = foldr (c, h)
where
c :: ListR b -> ListR (a, b)
c ys = Nil
h :: (a, ListR b -> ListR (a, b)) -> ListR b -> ListR (a, b)
h (x, f) Nil = Nil
h (x, f) (Cons (y, ys)) = Cons ((x, y), f ys)
-- | Ex 1.16
data D = D0 | D1 | D2 | D3 | D4 | D5 | D6 | D7 | D8 | D9 deriving Show
data D' = D'1 | D'2 | D'3 | D'4 | D'5 | D'6 | D'7 | D'8 | D'9 deriving Show
data Digits = Wrap D' | Add (Digits, D) deriving Show
foldd :: (D' -> a, (a, D) -> a) -> Digits -> a
foldd (f, g) = u
where u (Wrap d) = f d
u (Add (ds, d)) = g (u ds, d)
plus' :: NatPlus -> NatPlus -> NatPlus
plus' = foldnplus (c, f)
where c y = Next y
f g y = Next (g y)
-- plus' One y = Next y
-- plus' (Next x) y = Next (plus' x y)
times' :: NatPlus -> NatPlus -> NatPlus
times' = foldnplus (c, f)
where c y = y
f g y = plus' y (g y)
-- times' One y = y
-- times' (Next x) y = plus' y (times' x y)
unfoldd :: (a -> Either D' (a, D)) -> a -> Digits
unfoldd psi = v
where v x = case psi x of
Left d' -> Wrap d'
Right (ds, d) -> Add (v ds, d)
toD' :: Integer -> D'
toD' 1 = D'1
toD' 2 = D'2
toD' 3 = D'3
toD' 4 = D'4
toD' 5 = D'5
toD' 6 = D'6
toD' 7 = D'7
toD' 8 = D'8
toD' 9 = D'9
toD' _ = error "Oops!"
fromD' :: D' -> NatPlus
fromD' D'1 = One
fromD' D'2 = Next (fromD' D'1)
fromD' D'3 = Next (fromD' D'2)
fromD' D'4 = Next (fromD' D'3)
fromD' D'5 = Next (fromD' D'4)
fromD' D'6 = Next (fromD' D'5)
fromD' D'7 = Next (fromD' D'6)
fromD' D'8 = Next (fromD' D'7)
fromD' D'9 = Next (fromD' D'8)
toD :: Integer -> D
toD 0 = D0
toD 1 = D1
toD 2 = D2
toD 3 = D3
toD 4 = D4
toD 5 = D5
toD 6 = D6
toD 7 = D7
toD 8 = D8
toD 9 = D9
toD _ = error "Oops!"
fromD :: D -> NatPlus
fromD D0 = error "Oops!"
fromD D1 = One
fromD D2 = Next (fromD D1)
fromD D3 = Next (fromD D2)
fromD D4 = Next (fromD D3)
fromD D5 = Next (fromD D4)
fromD D6 = Next (fromD D5)
fromD D7 = Next (fromD D6)
fromD D8 = Next (fromD D7)
fromD D9 = Next (fromD D8)
decimal :: NatPlus -> Digits
decimal = unfoldd psi
where psi :: NatPlus -> Either D' (NatPlus, D)
psi n = case fromNatPlus n `divMod` 10 of
(0, m) -> Left (toD' m)
(d, m) -> Right (toNatPlus d, toD m)
eval :: Digits -> NatPlus
eval = foldd (fromD', p)
where d10 = Next (fromD D9)
p :: (NatPlus, D) -> NatPlus
p (n, D0) = n `times'` d10
p (n, d) = (n `times'` d10) `plus'` fromD d
fromNatPlus :: NatPlus -> Integer
fromNatPlus = foldnplus (1, (1+))
toNatPlus :: Integer -> NatPlus
toNatPlus n | n == 1 = One
| n > 1 = Next (toNatPlus (n-1))
| otherwise = error "Oops!"
test_1_16 :: NatPlus
test_1_16 = eval (Add (Wrap D'4, D2))
test_1_16' :: Integer -> Digits
test_1_16' = decimal . toNatPlus
-- | Ex 1.17
--
-- 1. listr f . concat == concat . listr (listr f)
-- where concat = foldr (nil, cat)
-- cat x = foldl (x, snoc)
--
concat :: ListR (ListR a) -> ListR a
concat = foldr (Nil, uncurry cat)
-- Lemma. map f (xs ++ ys) == map f xs ++ map f ys を証明する(ref IFPH exercise 4.3.4)
--
-- base case: xs == [] {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- map f ([] ++ ys)
-- == {- cat の定義 -}
-- map f ys
--
-- base case: xs == [] {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- map f xs ++ map f ys
-- == {- map(foldr) の定義 -}
-- [] ++ map f ys
-- == {- cat の定義 -}
-- map f ys
--
-- inductive case: xs == (x:xs) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- map f ((x:xs) ++ ys)
-- == {- cat の定義 -}
-- map f (x:(xs ++ ys))
-- == {- map(foldr) の定義 -}
-- f x:map f (xs ++ ys)
-- == {- 帰納法 -}
-- f x:(map f xs ++ map f ys)
--
-- inductive case: xs == (x:xs) {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- map f (x:xs) ++ map f ys
-- == {- map(foldr) の定義 -}
-- (f x:map f xs) ++ map f ys
-- == {- cat の定義 -}
-- f x:(map f xs ++ map f ys)
--
-- Theorem: map f (concat xss) == concat (map (map f) xss)
--
-- base case: xss == [] {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- map f (concat [])
-- == {- concat の定義 -}
-- map f (foldr (nil, cat) [])
-- == {- foldr の定義 -}
-- map f []
-- == {- map(foldr) の定義 -}
-- []
--
-- base case: xss == [] {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- concat (map (map f) [])
-- == {- map(folr) の定義 -}
-- concat []
-- == {- concat の定義 -}
-- foldr (nil, cat) []
-- == {- foldr の定義 -}
-- []
--
-- base case: xss == (xs:xss) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- map f (concat (xs:xss)
-- == {- concat の定義 -}
-- map f (foldr (nil, cat) (xs:xss))
-- == {- foldr の定義 -}
-- map f (cat (xs, foldr (nil, cat) xss))
-- == {- cat の定義と concat の定義から foldr を concat に戻す -}
-- map f (xs ++ concat xss)
-- == {- Lamma から -}
-- map f xs ++ map f (concat xss)
-- == {- 帰納法 -}
-- map f xs ++ concat (map (map f) xss)
--
-- base case: xss == (xs:xss) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- concat (map (map f) (xs:xss))
-- == {- map(foldr) の定義 -}
-- concat (map f xs: map (map f) xss)
-- == {- concat の定義 -}
-- foldr (nil, cat) (map f xs: map (map f) xss)
-- == {- foldr の定義と concat の定義から foldr を concat に戻す -}
-- cat (map f xs, concat (map (map f) xss))
-- == {- cat は ++ -}
-- map f xs ++ concat (map (map f) xss)
--
-- 2. listl (listl f) . inits == inits . listl f
-- where inits = foldl ([nil], f)
-- where f (snoc (xs, x), a) = snoc (snoc (xs, x), snoc (x, a))
inits :: ListL a -> ListL (ListL a)
inits = foldl (c, f)
where c = Snoc (SNil, SNil)
f :: (ListL (ListL a), a) -> ListL (ListL a)
f (yys@(Snoc (ys, y)), z) = Snoc (yys, Snoc (y, z))
lastL :: ListL a -> a
lastL (Snoc (xs, x)) = x
--
-- Lamma: lastL (inits xs) == xs
--
-- base case (xs = [])
-- ~~~~~~~~~~~~~~~~~~~~
-- lastL (inits [])
-- == {- inits の定義 -}
-- lastL [[]]
-- == {- lastL の定義 -}
-- []
--
-- inductive case (xs = xs>:x)
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- lastL (inits (xs>:x))
-- == {- inits の定義 -}
-- lastL (foldl ([nil], g) (xs>:x)) where g (ys>:y, z) = (ys>:y) >: (y>:z)
-- == {- foldl の定義 -}
-- lastL (g (inits xs, x))
-- == {- g の定義 -}
-- lastL (inits xs >: (y>:x)) where y = last (inits xs)
-- == {- 帰納法の仮定 -}
-- lastL (inits xs >: (xs>:x))
-- == {- lastL の定義 -}
-- xs>:x
--
-- base case xs == [] {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
-- listl (listl f) (inits [])
-- == {- inits(foldl)の定義 -}
-- listl (listl f) [[]]
-- == {- Snocリストの構成 -}
-- listl (listl f) (Snoc ([],[]))
-- == {- listl の定義 -}
-- foldl (SNil, g) (Snoc ([],[])) where g (xs, x) = Snoc (xs, f x)
-- == {- foldl の定義 -}
-- g ([], [])
-- == {- g の定義 -}
-- Snoc ([], listl f [])
-- == {- listl の定義 -}
-- Snoc ([], [])
-- == {- Snoc構成 -}
-- [[]]
--
-- base case xs == [] {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
-- inits (listl f [])
-- == {- listl の定義 -}
-- inits []
-- == {- inits の定義 -}
-- [[]]
--
-- inductive case xs == Snoc (xss, xs) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- listl (listl f) (inits (xs:x))
-- == {- inits の定義 -}
-- listl (listl f) (foldl ([nil], g) (xs>:x)) where g (Snoc (ys, y), z) = Snoc (Snoc (ys, y), Snoc (y, z))
-- == {- foldl の定義 -}
-- listl (listl f) (g (inits xs, x))
-- == {- g の定義 -}
-- listl (listl f) (inits xs >: (y >: x)) where y = last (inits xs)
-- == {- Lemma: lastL . inits == id -}
-- listl (listl f) (inits xs >: (xs >: x))
-- == {- listl の定義 -}
-- listl (listl f) (inits xs) >: (listl f (xs >: x))
-- == {- 帰納法の仮定とlistlの定義 -}
-- inits (listl f xs) >: (listl f xs >: f x)
--
-- inductive case xs == Snoc (xss, xs) {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- inits (listl f (xs>:x))
-- == {- listl の定義 -}
-- inits (foldl (SNil, g) (xs>:x)) where g (xs, x) = xs >: f x
-- == {- foldl の定義 -}
-- inits (g (listl f xs, x))
-- == {- g の定義 -}
-- inits (listl f xs >: f x)
-- == {- inits の定義 -}
-- foldl ([nil], g) (listl f xs >: f x) where g (ys >: y, z) = (ys >: y) >: (y >: z)
-- == {- foldl の定義 -}
-- g (inits (listl f xs), f x)
-- == {- g の定義 -}
-- inits (listl f xs) >: (y >: f x) where y = last (inits (listl f xs))
-- == {- Lemma: lastL . inits == id -}
-- inits (listl f xs) >: (listl f xs >: f x)
--
-- 3. listr f . reverse == reverse . listr f
--
reverse :: ListR a -> ListR a
reverse = foldr (Nil, append)
where append (a, x) = snocr (x, a)
--
-- base case (xs = []) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-- list f (reverse [])
-- ==
-- listl f []
-- ==
-- []
--
-- base case (xs = []) {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-- reverse (listf f [])
-- ==
-- reverse []
-- ==
-- []
--
-- inductive case (xs = x:xs) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- listr f (reverse (x:xs))
-- == {- reverse の定義 -}
-- listr f (foldr ([], append) (x:xs))
-- == {- foldr の定義 -}
-- listr f (append (x, reverse xs))
-- == {- append(snocr) の定義 -}
-- listr f (reverse xs ++ [x])
-- == {- Lemma: map f (xs ++ ys) == map f xs ++ map f ys -}
-- listr f (reverse xs) ++ listr f [x]
-- == {- 帰納法とlistrの定義 -}
-- reverse (listr f xs) ++ [f x]
--
-- inductive case (xs = x:xs) {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- reverse (listr f (x:xs))
-- == {- listr の定義 -}
-- reverse (f x:listr f xs)
-- == {- reverse の定義 -}
-- foldr ([], append) (f x:listr f xs)
-- == {- foldr の定義 -}
-- append (f x, reverse (listr f xs))
-- == {- append(snocr)の定義 -}
-- reverse (listr f xs) ++ [f x]
--
-- 4 . listr (cross (f, g)) . zip == zip . cross (listr f, listr g)
--
-- base case (xs = []) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-- listr (cross (f, g)) (uncurry zip ([], ys))
-- == {- zip の定義 -}
-- listr (cross (f, g)) []
-- == {- listr の定義 -}
-- []
--
-- base case (xs = []) {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-- uncurry zip (cross (listr f, listr g) ([], ys))
-- == {- cross の定義 -}
-- uncurry zip (listr f [], listr g ys)
-- == {- listr の定義 -}
-- uncurry zip ([], listr g ys)
-- == {- zip の定義 -}
-- []
--
-- inductive case (xs = x:xs, ys = []) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- listr (cross (f, g)) (uncurry zip (x:xs, []))
-- ==
-- listr (cross (f, g)) (zip (x:xs) [])
-- ==
-- listr (cross (f, g)) (foldr (c, h) (x:xs) []) where c ys = [], h (x, f) [] = [], h (x, f) (y:ys) = (x,y):f ys
-- ==
-- listr (cross (f, g)) (h (x, zip xs) [])
-- ==
-- listr (cross (f, g)) []
-- ==
-- []
-- inductive case (xs = x:xs, ys = []) {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- uncurry zip (cross (listr f, listr g) (x:xs, []))
-- ==
-- uncurry zip (listr f (x:xs), listr g [])
-- ==
-- uncurry zip (listr f (x:xs), [])
-- ==
-- zip (listr f (x:xs)) []
-- ==
-- zip (f x:listr f xs) []
-- ==
-- foldr (c, h) (f x:listr f xs) [] where c ys = [], h (x, f) [] = [], h (x, f) (y:ys) = (x,y):f ys
-- ==
-- h (f x, zip (listr f xs)) []
-- ==
-- []
--
-- inductive case (xs = x:xs, ys = y:ys) {lhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- listr (cross (f, g)) (uncurry zip (x:xs, y:ys))
-- ==
-- listr (cross (f, g)) (zip (x:xs) (y:ys))
-- ==
-- listr (cross (f, g)) (foldr (c, h) (x:xs) (y:ys)) where c ys = [], h (x, f) [] = [], h (x, f) (y:ys) = (x,y):f ys
-- ==
-- listr (cross (f, g)) (h (x, zip xs) (y:ys))
-- ==
-- listr (cross (f, g)) ((x,y):zip xs ys)
-- ==
-- cross (f, g) (x, y):listr (cross (f, g)) (zip xs ys)
-- ==
-- (f x, g y):listr (cross (f, g)) (zip xs ys)
--
-- inductive case (xs = x:xs, ys = y:ys) {rhs}
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- uncurry zip (cross (listr f, listr g) (x:xs, y:ys))
-- ==
-- uncurry zip (listr f (x:xs), listr g (y:ys))
-- ==
-- uncurry zip (listr f (x:xs), listr g (y:ys))
-- ==
-- zip (listr f (x:xs)) (listr g (y:ys))
-- ==
-- zip (f x:listr f xs) (g y:listr g ys)
-- ==
-- foldr (c, h) (f x:listr f xs) (g y:listr g ys) where c ys = [], h (x, f) [] = [], h (x, f) (y:ys) = (x,y):f ys
-- ==
-- h (f x, zip (listr f xs)) (g y:listr g ys)
-- ==
-- (f x, g y):zip (listr f xs) (listr g ys)
-- ==
-- (f x, g y):uncurry zip (listr f xs, listr g ys)
-- ==
-- (f x, g y):uncurry zip (cross (listr f, listr g) (xs, ys))
-- ==
-- (f x, g y):listr (cross (f, g)) (uncurry zip (xs, ys))
-- ==
-- (f x, g y):listr (cross (f, g)) (zip xs ys)
-- | Ex 1.18
-- foo xs y
-- Tree (A * B) <------------------ ListR A * B A B
-- | | | |
-- | Tree (f, g) | ListR f * g |f |g
-- | | | |
-- v v v v
-- Tree (C * D) <------------------ LIstR C * D C D
-- foo
--
-- tree (cross (f, g)) . foo == foo . cross (listr f, g)
--
-- | Ex 1.19
-- foo
-- ListL A <--------------- GTree A A
-- | | |
-- |ListL f |Gtree f |f
-- | | |
-- v v v
-- ListL B <--------------- GTree B B
-- foo
--
-- listl f . foo == foo . gtree f
--
-- Node
-- GTree a <------------------------------ a * ListL (GTree a) a
-- | | |
-- | u = (|node . (f * id)|) | 1 * listl u |f
-- | | |
-- v Node f * id v v
-- GTree b <---- b * ListL (GTree b) ----- a * ListL (GTree b) b
-- <------------------------------
-- Node . (f * id)
--
gtree :: (a -> b) -> GTree a -> GTree b
gtree f = foldg (Node . cross (f, id))
cross :: (a -> c, b -> d) -> (a, b) -> (c, d)
cross (f, g) (x, y) = (f x, g y)
| cutsea110/aop | src/Chap01.hs | bsd-3-clause | 22,782 | 1 | 12 | 7,226 | 5,152 | 2,943 | 2,209 | 273 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Futhark.Representation.Kernels.Kernel
( Kernel(..)
, KernelBody
, NestedKernelBody
, GenKernelBody(..)
, KernelStm(..)
, KernelSpace(..)
, spaceDimensions
, SpaceStructure(..)
, scopeOfKernelSpace
, GroupStreamLambda(..)
, WhichThreads(..)
, KernelResult(..)
, KernelSize(..)
, chunkedKernelNonconcatOutputs
, typeCheckKernel
, aliasAnalyseKernelStm
-- * Generic traversal
, KernelMapper(..)
, identityKernelMapper
, mapKernelM
)
where
import Control.Applicative
import Control.Monad.Writer
import Control.Monad.Identity
import qualified Data.HashSet as HS
import qualified Data.HashMap.Lazy as HM
import Data.List
import Data.Maybe
import Prelude
import Futhark.Representation.AST
import qualified Futhark.Analysis.Alias as Alias
import qualified Futhark.Analysis.UsageTable as UT
import qualified Futhark.Util.Pretty as PP
import Futhark.Util.Pretty
((</>), (<+>), ppr, comma, commasep, Pretty, parens, text)
import Futhark.Transform.Substitute
import Futhark.Transform.Rename
import Futhark.Optimise.Simplifier.Lore
import Futhark.Representation.Ranges
(Ranges, removeLambdaRanges, removeBodyRanges)
import Futhark.Representation.AST.Attributes.Ranges
import Futhark.Representation.AST.Attributes.Aliases
import Futhark.Representation.Aliases
(Aliases, removeLambdaAliases, removeBodyAliases, Names'(..), removeBindingAliases)
import Futhark.Analysis.Usage
import qualified Futhark.TypeCheck as TC
import Futhark.Analysis.Metrics
import Futhark.Tools (partitionChunkedKernelLambdaParameters)
import qualified Futhark.Analysis.Range as Range
data Kernel lore =
ScanKernel Certificates SubExp
KernelSize
(LambdaT lore)
(LambdaT lore)
[SubExp]
[VName]
| WriteKernel Certificates SubExp
(LambdaT lore)
[VName]
[(SubExp, VName)]
-- See SOAC.hs for what the different WriteKernel arguments mean.
| NumGroups
| GroupSize
| Kernel Certificates
KernelSpace
[Type]
(KernelBody lore)
deriving (Eq, Show, Ord)
data KernelSpace = KernelSpace { spaceGlobalId :: VName
, spaceLocalId :: VName
, spaceGroupId :: VName
, spaceNumThreads :: SubExp
, spaceNumGroups :: SubExp
, spaceGroupSize :: SubExp -- flat group size
, spaceStructure :: SpaceStructure
}
deriving (Eq, Show, Ord)
data SpaceStructure = FlatSpace
[(VName, SubExp)] -- gtids and dim sizes
| NestedSpace
[(VName, -- gtid
SubExp, -- global dim size
VName, -- ltid
SubExp -- local dim sizes
)]
deriving (Eq, Show, Ord)
-- | Global thread IDs and their upper bound.
spaceDimensions :: KernelSpace -> [(VName, SubExp)]
spaceDimensions = structureDimensions . spaceStructure
where structureDimensions (FlatSpace dims) = dims
structureDimensions (NestedSpace dims) =
let (gtids, gdim_sizes, _, _) = unzip4 dims
in zip gtids gdim_sizes
type KernelBody = GenKernelBody KernelResult
type NestedKernelBody = GenKernelBody SubExp
-- | A kernel body parametrised over its result.
data GenKernelBody res lore = KernelBody { kernelBodyStms :: [KernelStm lore]
, kernelBodyResult :: [res]
}
deriving (Eq, Show, Ord)
data KernelResult = ThreadsReturn WhichThreads SubExp
| ConcatReturns
StreamOrd -- Permuted?
SubExp -- The final size.
SubExp -- Per-thread (max) chunk size.
VName -- Chunk by this thread.
deriving (Eq, Show, Ord)
data WhichThreads = AllThreads
| OneThreadPerGroup SubExp -- Which one.
| ThreadsPerGroup [(VName,SubExp)] -- All threads before this one.
| ThreadsInSpace
deriving (Eq, Show, Ord)
data KernelStm lore = SplitArray (VName, [PatElem (LetAttr lore)]) StreamOrd SubExp SubExp [VName]
| Thread WhichThreads (Binding lore)
| Combine (PatElem (LetAttr lore)) [(VName,SubExp)] SubExp
| GroupReduce [PatElem (LetAttr lore)] SubExp
(Lambda lore) [(SubExp,VName)]
| GroupStream [PatElem (LetAttr lore)]
SubExp SubExp
(GroupStreamLambda lore) [SubExp] [VName]
| GroupIf [PatElem (LetAttr lore)] SubExp (NestedKernelBody lore) (NestedKernelBody lore)
deriving instance Annotations lore => Eq (KernelStm lore)
deriving instance Annotations lore => Show (KernelStm lore)
deriving instance Annotations lore => Ord (KernelStm lore)
boundByKernelStm :: KernelStm lore -> Names
boundByKernelStm = HS.fromList . HM.keys . scopeOf
data GroupStreamLambda lore = GroupStreamLambda
{ groupStreamChunkSize :: VName
, groupStreamChunkOffset :: VName
, groupStreamAccParams :: [LParam lore]
, groupStreamArrParams :: [LParam lore]
, groupStreamLambdaBody :: NestedKernelBody lore
}
deriving instance Annotations lore => Eq (GroupStreamLambda lore)
deriving instance Annotations lore => Show (GroupStreamLambda lore)
deriving instance Annotations lore => Ord (GroupStreamLambda lore)
data KernelSize = KernelSize { kernelWorkgroups :: SubExp
, kernelWorkgroupSize :: SubExp
, kernelElementsPerThread :: SubExp
, kernelTotalElements :: SubExp
, kernelThreadOffsetMultiple :: SubExp
, kernelNumThreads :: SubExp
}
deriving (Eq, Ord, Show)
-- | Like 'Mapper', but just for 'Kernel's.
data KernelMapper flore tlore m = KernelMapper {
mapOnKernelSubExp :: SubExp -> m SubExp
, mapOnKernelLambda :: Lambda flore -> m (Lambda tlore)
, mapOnKernelBody :: Body flore -> m (Body tlore)
, mapOnKernelVName :: VName -> m VName
, mapOnKernelCertificates :: Certificates -> m Certificates
, mapOnKernelLParam :: LParam flore -> m (LParam tlore)
, mapOnKernelKernelBody :: KernelBody flore -> m (KernelBody tlore)
}
-- | A mapper that simply returns the 'Kernel' verbatim.
identityKernelMapper :: Monad m => KernelMapper lore lore m
identityKernelMapper = KernelMapper { mapOnKernelSubExp = return
, mapOnKernelLambda = return
, mapOnKernelBody = return
, mapOnKernelVName = return
, mapOnKernelCertificates = return
, mapOnKernelLParam = return
, mapOnKernelKernelBody = return
}
-- | Map a monadic action across the immediate children of a
-- Kernel. The mapping does not descend recursively into subexpressions
-- and is done left-to-right.
mapKernelM :: (Applicative m, Monad m) =>
KernelMapper flore tlore m -> Kernel flore -> m (Kernel tlore)
mapKernelM tv (ScanKernel cs w kernel_size fun fold_fun nes arrs) =
ScanKernel <$>
mapOnKernelCertificates tv cs <*>
mapOnKernelSubExp tv w <*>
mapOnKernelSize tv kernel_size <*>
mapOnKernelLambda tv fun <*>
mapOnKernelLambda tv fold_fun <*>
mapM (mapOnKernelSubExp tv) nes <*>
mapM (mapOnKernelVName tv) arrs
mapKernelM tv (WriteKernel cs len lam ivs as) =
WriteKernel <$>
mapOnKernelCertificates tv cs <*>
mapOnKernelSubExp tv len <*>
mapOnKernelLambda tv lam <*>
mapM (mapOnKernelVName tv) ivs <*>
mapM (\(aw,a) -> (,) <$> mapOnKernelSubExp tv aw <*> mapOnKernelVName tv a) as
mapKernelM _ NumGroups = pure NumGroups
mapKernelM _ GroupSize = pure GroupSize
mapKernelM tv (Kernel cs space ts kernel_body) =
Kernel <$> mapOnKernelCertificates tv cs <*>
mapOnKernelSpace space <*>
mapM (mapOnKernelType tv) ts <*>
mapOnKernelKernelBody tv kernel_body
where mapOnKernelSpace (KernelSpace gtid ltid gid num_threads num_groups group_size structure) =
KernelSpace gtid ltid gid -- all in binding position
<$> mapOnKernelSubExp tv num_threads
<*> mapOnKernelSubExp tv num_groups
<*> mapOnKernelSubExp tv group_size
<*> mapOnKernelStructure structure
mapOnKernelStructure (FlatSpace dims) =
FlatSpace <$> (zip gtids <$> mapM (mapOnKernelSubExp tv) gdim_sizes)
where (gtids, gdim_sizes) = unzip dims
mapOnKernelStructure (NestedSpace dims) =
NestedSpace <$> (zip4 gtids
<$> mapM (mapOnKernelSubExp tv) gdim_sizes
<*> pure ltids
<*> mapM (mapOnKernelSubExp tv) ldim_sizes)
where (gtids, gdim_sizes, ltids, ldim_sizes) = unzip4 dims
mapOnKernelType :: (Monad m, Applicative m, Functor m) =>
KernelMapper flore tlore m -> Type -> m Type
mapOnKernelType _tv (Prim pt) = pure $ Prim pt
mapOnKernelType tv (Array pt shape u) = Array pt <$> f shape <*> pure u
where f (Shape dims) = Shape <$> mapM (mapOnKernelSubExp tv) dims
mapOnKernelType _tv (Mem se s) = pure $ Mem se s
mapOnKernelSize :: (Monad m, Applicative m) =>
KernelMapper flore tlore m -> KernelSize -> m KernelSize
mapOnKernelSize tv (KernelSize num_workgroups workgroup_size
per_thread_elements num_elements offset_multiple num_threads) =
KernelSize <$>
mapOnKernelSubExp tv num_workgroups <*>
mapOnKernelSubExp tv workgroup_size <*>
mapOnKernelSubExp tv per_thread_elements <*>
mapOnKernelSubExp tv num_elements <*>
mapOnKernelSubExp tv offset_multiple <*>
mapOnKernelSubExp tv num_threads
instance FreeIn KernelSize where
freeIn (KernelSize num_workgroups workgroup_size elems_per_thread
num_elems thread_offset num_threads) =
mconcat $ map freeIn [num_workgroups,
workgroup_size,
elems_per_thread,
num_elems,
thread_offset,
num_threads]
instance (Attributes lore, FreeIn (LParamAttr lore)) =>
FreeIn (Kernel lore) where
freeIn e = execWriter $ mapKernelM free e
where walk f x = tell (f x) >> return x
free = KernelMapper { mapOnKernelSubExp = walk freeIn
, mapOnKernelLambda = walk freeInLambda
, mapOnKernelBody = walk freeInBody
, mapOnKernelVName = walk freeIn
, mapOnKernelCertificates = walk freeIn
, mapOnKernelLParam = walk freeIn
, mapOnKernelKernelBody = walk freeIn
}
instance FreeIn KernelResult where
freeIn (ThreadsReturn which what) = freeIn which <> freeIn what
freeIn (ConcatReturns _ w per_thread_elems v) =
freeIn w <> freeIn per_thread_elems <> freeIn v
instance FreeIn WhichThreads where
freeIn AllThreads = mempty
freeIn (OneThreadPerGroup which) = freeIn which
freeIn (ThreadsPerGroup limit) = freeIn limit
freeIn ThreadsInSpace = mempty
instance (Attributes lore, FreeIn res) => FreeIn (GenKernelBody res lore) where
freeIn (KernelBody stms res) =
(free_in_stms <> free_in_res) `HS.difference` bound_in_stms
where free_in_stms = mconcat $ map freeIn stms
free_in_res = freeIn res
bound_in_stms = mconcat $ map boundByKernelStm stms
instance Attributes lore => FreeIn (KernelStm lore) where
freeIn (SplitArray (n,chunks) _ w elems_per_thread vs) =
freeIn n <> freeIn chunks <> freeIn w <> freeIn elems_per_thread <> freeIn vs
freeIn (Thread which bnd) =
freeIn which <> freeInBinding bnd
freeIn (Combine pe cspace v) =
freeIn pe <> freeIn cspace <> freeIn v
freeIn (GroupReduce pes w lam input) =
freeIn pes <> freeIn w <> freeInLambda lam <> freeIn input
freeIn (GroupStream pes w maxchunk lam accs arrs) =
freeIn pes <> freeIn w <> freeIn maxchunk <> freeIn lam <> freeIn accs <> freeIn arrs
freeIn (GroupIf pes cond tb fb) =
freeIn pes <> freeIn cond <> freeIn [tb,fb]
instance Attributes lore => FreeIn (GroupStreamLambda lore) where
freeIn (GroupStreamLambda chunk_size chunk_offset acc_params arr_params body) =
freeIn body `HS.difference` bound_here
where bound_here = HS.fromList $
chunk_offset : chunk_size :
map paramName (acc_params ++ arr_params)
instance (Attributes lore, Substitute res) => Substitute (GenKernelBody res lore) where
substituteNames subst (KernelBody stms res) =
KernelBody (substituteNames subst stms) $ substituteNames subst res
instance Substitute KernelResult where
substituteNames subst (ThreadsReturn who se) =
ThreadsReturn (substituteNames subst who) (substituteNames subst se)
substituteNames subst (ConcatReturns ord w per_thread_elems v) =
ConcatReturns
ord
(substituteNames subst w)
(substituteNames subst per_thread_elems)
(substituteNames subst v)
instance Substitute WhichThreads where
substituteNames _ AllThreads =
AllThreads
substituteNames subst (OneThreadPerGroup which) =
OneThreadPerGroup $ substituteNames subst which
substituteNames subst (ThreadsPerGroup limit) =
ThreadsPerGroup $ substituteNames subst limit
substituteNames _ ThreadsInSpace =
ThreadsInSpace
instance Attributes lore => Substitute (KernelStm lore) where
substituteNames subst (SplitArray (n,arrs) o w elems_per_thread vs) =
SplitArray (n,arrs) o
(substituteNames subst w)
(substituteNames subst elems_per_thread)
(substituteNames subst vs)
substituteNames subst (Thread which bnd) =
Thread
(substituteNames subst which)
(substituteNames subst bnd)
substituteNames subst (Combine pe cspace v) =
Combine (substituteNames subst pe)
(substituteNames subst cspace) (substituteNames subst v)
substituteNames subst (GroupReduce pes w lam input) =
GroupReduce (substituteNames subst pes) (substituteNames subst w)
(substituteNames subst lam) (substituteNames subst input)
substituteNames subst (GroupStream pes w maxchunk lam accs arrs) =
GroupStream (substituteNames subst pes)
(substituteNames subst w) (substituteNames subst maxchunk)
(substituteNames subst lam)
(substituteNames subst accs) (substituteNames subst arrs)
substituteNames subst (GroupIf pes cond tb fb) =
GroupIf
(substituteNames subst pes)
(substituteNames subst cond)
(substituteNames subst tb)
(substituteNames subst fb)
instance Attributes lore => Substitute (GroupStreamLambda lore) where
substituteNames
subst (GroupStreamLambda chunk_size chunk_offset acc_params arr_params body) =
GroupStreamLambda
(substituteNames subst chunk_size)
(substituteNames subst chunk_offset)
(substituteNames subst acc_params)
(substituteNames subst arr_params)
(substituteNames subst body)
instance Substitute KernelSpace where
substituteNames subst (KernelSpace gtid ltid gid num_threads num_groups group_size structure) =
KernelSpace (substituteNames subst gtid)
(substituteNames subst ltid)
(substituteNames subst gid)
(substituteNames subst num_threads)
(substituteNames subst num_groups)
(substituteNames subst group_size)
(substituteNames subst structure)
instance Substitute SpaceStructure where
substituteNames subst (FlatSpace dims) =
FlatSpace (map (substituteNames subst) dims)
substituteNames subst (NestedSpace dims) =
NestedSpace (map (substituteNames subst) dims)
instance Attributes lore => Substitute (Kernel lore) where
substituteNames subst (Kernel cs space ts kbody) =
Kernel
(substituteNames subst cs)
(substituteNames subst space)
(substituteNames subst ts)
(substituteNames subst kbody)
substituteNames subst k = runIdentity $ mapKernelM substitute k
where substitute =
KernelMapper { mapOnKernelSubExp = return . substituteNames subst
, mapOnKernelLambda = return . substituteNames subst
, mapOnKernelBody = return . substituteNames subst
, mapOnKernelVName = return . substituteNames subst
, mapOnKernelCertificates = return . substituteNames subst
, mapOnKernelLParam = return . substituteNames subst
, mapOnKernelKernelBody = return . substituteNames subst
}
instance (Attributes lore, Rename res) => Rename (GenKernelBody res lore) where
rename (KernelBody [] res) =
KernelBody [] <$> rename res
rename (KernelBody (stm:stms) res) =
bindingForRename (HS.toList $ boundByKernelStm stm) $ do
stm' <- rename stm
KernelBody stms' res' <- rename $ KernelBody stms res
return $ KernelBody (stm':stms') res'
instance (Attributes lore, Renameable lore) => Rename (KernelStm lore) where
rename (SplitArray (n,chunks) o w elems_per_thread vs) =
SplitArray <$> ((,) <$> rename n <*> rename chunks)
<*> pure o
<*> rename w
<*> rename elems_per_thread
<*> rename vs
rename (GroupReduce pes w lam input) =
GroupReduce <$> rename pes <*> rename w <*> rename lam <*> rename input
rename (Combine pe cspace v) =
Combine <$> rename pe <*> rename cspace <*> rename v
rename (Thread which bnd) =
Thread <$> rename which <*> rename bnd
rename (GroupStream pes w maxchunk lam accs arrs) =
GroupStream <$> rename pes <*> rename w <*> rename maxchunk <*>
rename lam <*> rename accs <*> rename arrs
rename (GroupIf pes cond tb fb) =
GroupIf <$> rename pes <*> rename cond <*> rename tb <*> rename fb
instance (Attributes lore, Renameable lore) => Rename (GroupStreamLambda lore) where
rename (GroupStreamLambda chunk_size chunk_offset acc_params arr_params body) =
bindingForRename (chunk_size : chunk_offset :
map paramName (acc_params++arr_params)) $
GroupStreamLambda <$>
rename chunk_size <*>
rename chunk_offset <*>
rename acc_params <*>
rename arr_params <*>
rename body
instance Rename KernelResult where
rename = substituteRename
instance Rename WhichThreads where
rename = substituteRename
scopeOfKernelSpace :: KernelSpace -> Scope lore
scopeOfKernelSpace (KernelSpace gtid ltid gid _ _ _ structure) =
HM.fromList $ zip ([gtid, ltid, gid] ++ structure') $ repeat IndexInfo
where structure' = case structure of
FlatSpace dims -> map fst dims
NestedSpace dims ->
let (gtids, _, ltids, _) = unzip4 dims
in gtids ++ ltids
instance LParamAttr lore1 ~ LParamAttr lore2 =>
Scoped lore1 (GroupStreamLambda lore2) where
scopeOf (GroupStreamLambda chunk_size chunk_offset acc_params arr_params _) =
HM.insert chunk_size IndexInfo $
HM.insert chunk_offset IndexInfo $
scopeOfLParams (acc_params ++ arr_params)
instance Scoped lore (KernelStm lore) where
scopeOf (SplitArray (size, chunks) _ _ _ _) =
mconcat (map scopeOf chunks) <>
HM.singleton size IndexInfo
scopeOf (Thread _ bnd) =
scopeOf bnd
scopeOf (Combine pe _ _) = scopeOf pe
scopeOf (GroupReduce pes _ _ _) =
mconcat $ map scopeOf pes
scopeOf (GroupStream pes _ _ _ _ _) =
mconcat $ map scopeOf pes
scopeOf (GroupIf pes _ _ _) =
scopeOf pes
instance Attributes lore => Rename (Kernel lore) where
rename = mapKernelM renamer
where renamer = KernelMapper rename rename rename rename rename rename rename
kernelType :: Kernel lore -> [Type]
kernelType (ScanKernel _ w size lam foldlam nes _) =
let arr_row_tp = drop (length nes) $ lambdaReturnType foldlam
in map (`arrayOfRow` w) (lambdaReturnType lam) ++
map (`arrayOfRow` kernelWorkgroups size) (lambdaReturnType lam) ++
map (`arrayOfRow` kernelTotalElements size) arr_row_tp
kernelType (WriteKernel _ _ lam _ input) =
zipWith arrayOfRow (snd $ splitAt (n `div` 2) lam_ts) ws
where lam_ts = lambdaReturnType lam
n = length lam_ts
ws = map fst input
kernelType (Kernel _ space ts body) =
zipWith resultShape ts $ kernelBodyResult body
where dims = map snd $ spaceDimensions space
num_groups = spaceNumGroups space
num_threads = spaceNumThreads space
resultShape t (ThreadsReturn AllThreads _) =
t `arrayOfRow` num_threads
resultShape t (ThreadsReturn OneThreadPerGroup{} _) =
t `arrayOfRow` num_groups
resultShape t (ThreadsReturn (ThreadsPerGroup limit) _) =
t `arrayOfShape` Shape (map snd limit) `arrayOfRow` num_groups
resultShape t (ThreadsReturn ThreadsInSpace _) =
foldr (flip arrayOfRow) t dims
resultShape t (ConcatReturns _ w _ _) =
t `arrayOfRow` w
kernelType NumGroups =
[Prim int32]
kernelType GroupSize =
[Prim int32]
chunkedKernelNonconcatOutputs :: Lambda lore -> Int
chunkedKernelNonconcatOutputs fun =
length $ takeWhile (not . outerSizeIsChunk) $ lambdaReturnType fun
where outerSizeIsChunk = (==Var (paramName chunk)) . arraySize 0
(_, chunk, _) = partitionChunkedKernelLambdaParameters $ lambdaParams fun
instance TypedOp (Kernel lore) where
opType = pure . staticShapes . kernelType
instance (Attributes lore, Aliased lore) => AliasedOp (Kernel lore) where
opAliases = map (const mempty) . kernelType
consumedInOp (Kernel _ _ _ kbody) =
consumedInKernelBody kbody
consumedInOp _ = mempty
aliasAnalyseKernelBody :: (Attributes lore,
Attributes (Aliases lore),
CanBeAliased (Op lore)) =>
GenKernelBody res lore
-> GenKernelBody res (Aliases lore)
aliasAnalyseKernelBody (KernelBody stms res) =
KernelBody (map aliasAnalyseKernelStm stms) res
aliasAnalyseKernelStm :: (Attributes lore,
Attributes (Aliases lore),
CanBeAliased (Op lore)) =>
KernelStm lore -> KernelStm (Aliases lore)
aliasAnalyseKernelStm (SplitArray (size, chunks) o w elems_per_thread arrs) =
SplitArray (size, chunks') o w elems_per_thread arrs
where chunks' = [ fmap (Names' $ HS.singleton arr,) chunk
| (chunk, arr) <- zip chunks arrs ]
aliasAnalyseKernelStm (Thread which bnd) =
Thread which $ Alias.analyseBinding bnd
aliasAnalyseKernelStm (Combine pe cspace v) =
Combine ((mempty,) <$> pe) cspace v
aliasAnalyseKernelStm (GroupReduce pes w lam input) =
GroupReduce pes' w lam' input
where pes' = map (fmap (mempty,)) pes
lam' = Alias.analyseLambda lam
aliasAnalyseKernelStm (GroupStream pes w maxchunk lam accs arrs) =
GroupStream pes' w maxchunk lam' accs arrs
where pes' = map (fmap (mempty,)) pes
lam' = analyseGroupStreamLambda lam
analyseGroupStreamLambda (GroupStreamLambda chunk_size chunk_offset acc_params arr_params body) =
GroupStreamLambda chunk_size chunk_offset acc_params arr_params $
aliasAnalyseKernelBody body
aliasAnalyseKernelStm (GroupIf pes cond tb fb) =
GroupIf pes' cond tb' fb'
where pes' = map (fmap (mempty,)) pes
tb' = aliasAnalyseKernelBody tb
fb' = aliasAnalyseKernelBody fb
instance (Attributes lore,
Attributes (Aliases lore),
CanBeAliased (Op lore)) => CanBeAliased (Kernel lore) where
type OpWithAliases (Kernel lore) = Kernel (Aliases lore)
addOpAliases = runIdentity . mapKernelM alias
where alias = KernelMapper return (return . Alias.analyseLambda)
(return . Alias.analyseBody) return return return
(return . aliasAnalyseKernelBody)
removeOpAliases = runIdentity . mapKernelM remove
where remove = KernelMapper return (return . removeLambdaAliases)
(return . removeBodyAliases) return return return
(return . removeKernelBodyAliases)
removeKernelBodyAliases :: GenKernelBody res (Aliases lore)
-> GenKernelBody res lore
removeKernelBodyAliases (KernelBody stms res) =
KernelBody (map removeStmAliases stms) res
removeStmAliases (SplitArray (size, chunks) o w elems_per_thread arrs) =
SplitArray (size, chunks') o w elems_per_thread arrs
where chunks' = map (fmap snd) chunks
removeStmAliases (Thread which bnd) =
Thread which $ removeBindingAliases bnd
removeStmAliases (Combine pe cspace v) =
Combine (snd <$> pe) cspace v
removeStmAliases (GroupReduce pes w lam input) =
GroupReduce (map (fmap snd) pes) w (removeLambdaAliases lam) input
removeStmAliases (GroupStream pes w maxchunk lam accs arrs) =
GroupStream (map (fmap snd) pes) w maxchunk (removeGroupStreamLambdaAliases lam) accs arrs
removeStmAliases (GroupIf pes cond tb fb) =
GroupIf (map (fmap snd) pes) cond (removeKernelBodyAliases tb) (removeKernelBodyAliases fb)
removeGroupStreamLambdaAliases (GroupStreamLambda chunk_size chunk_offset acc_params arr_params body) =
GroupStreamLambda chunk_size chunk_offset acc_params arr_params $
removeKernelBodyAliases body
instance Attributes lore => IsOp (Kernel lore) where
safeOp _ = False
instance Ranged inner => RangedOp (Kernel inner) where
opRanges op = replicate (length $ kernelType op) unknownRange
instance (Attributes lore, CanBeRanged (Op lore)) => CanBeRanged (Kernel lore) where
type OpWithRanges (Kernel lore) = Kernel (Ranges lore)
removeOpRanges = runIdentity . mapKernelM remove
where remove = KernelMapper return (return . removeLambdaRanges)
(return . removeBodyRanges) return return return
(return . removeKernelBodyRanges)
removeKernelBodyRanges = undefined
addOpRanges = Range.runRangeM . mapKernelM add
where add = KernelMapper return Range.analyseLambda
Range.analyseBody return return return addKernelBodyRanges
addKernelBodyRanges = undefined
instance (Attributes lore, CanBeWise (Op lore)) => CanBeWise (Kernel lore) where
type OpWithWisdom (Kernel lore) = Kernel (Wise lore)
removeOpWisdom = runIdentity . mapKernelM remove
where remove = KernelMapper return
(return . removeLambdaWisdom)
(return . removeBodyWisdom)
return return return
(return . removeKernelBodyWisdom)
removeKernelBodyWisdom :: GenKernelBody res (Wise lore)
-> GenKernelBody res lore
removeKernelBodyWisdom (KernelBody stms res) =
KernelBody (map removeKernelStatementWisdom stms) res
removeKernelStatementWisdom (Thread which bnd) =
Thread which $ removeBindingWisdom bnd
removeKernelStatementWisdom (Combine pe cspace v) =
Combine (removePatElemWisdom pe) cspace v
removeKernelStatementWisdom (SplitArray (size,chunks) o w elems_per_thread arrs) =
SplitArray (size, map removePatElemWisdom chunks) o w elems_per_thread arrs
removeKernelStatementWisdom (GroupReduce pes w lam input) =
GroupReduce (map removePatElemWisdom pes) w (removeLambdaWisdom lam) input
removeKernelStatementWisdom (GroupStream pes w maxchunk lam accs arrs) =
GroupStream (map removePatElemWisdom pes) w maxchunk (removeGroupStreamLambdaWisdom lam) accs arrs
removeKernelStatementWisdom (GroupIf pes cond tb fb) =
GroupIf (map removePatElemWisdom pes) cond (removeKernelBodyWisdom tb) (removeKernelBodyWisdom fb)
removeGroupStreamLambdaWisdom (GroupStreamLambda chunk_size chunk_offset acc_params arr_params body) =
GroupStreamLambda chunk_size chunk_offset acc_params arr_params $
removeKernelBodyWisdom body
instance (Attributes lore, Aliased lore, UsageInOp (Op lore)) => UsageInOp (Kernel lore) where
usageInOp (ScanKernel _ _ _ _ foldfun _ arrs) =
usageInLambda foldfun arrs
usageInOp (WriteKernel _ _ _ _ as) =
mconcat $ map (UT.consumedUsage . snd) as
usageInOp (Kernel _ _ _ kbody) =
mconcat $ map UT.consumedUsage $ HS.toList $ consumedInKernelBody kbody
usageInOp NumGroups = mempty
usageInOp GroupSize = mempty
consumedInKernelBody :: (Attributes lore, Aliased lore) =>
GenKernelBody res lore -> Names
consumedInKernelBody (KernelBody stms _) =
-- We need to figure out what is consumed in stms. We do this by
-- moving backwards through the stms, using the alias information to
-- update.
let consumed = foldr update mempty stms
in consumed `HS.difference` bound_in_stms
where bound_in_stms = HS.fromList $ HM.keys $ scopeOf stms
update stm consumed =
let aliasmap = HM.map nameAndAliases $ scopeOf stm
in aliasClosure aliasmap consumed <> consumedByKernelStm stm
aliasClosure aliasmap names =
names `HS.union` mconcat (map look $ HS.toList names)
where look k = HM.lookupDefault mempty k aliasmap
nameAndAliases (LetInfo attr) = aliasesOf attr
nameAndAliases _ = mempty
consumedByKernelStm :: (Attributes lore, Aliased lore) =>
KernelStm lore -> Names
consumedByKernelStm (Thread _ bnd) = consumedInBinding bnd
consumedByKernelStm Combine{} = mempty
consumedByKernelStm SplitArray{} = mempty
consumedByKernelStm (GroupReduce _ _ _ input) =
HS.fromList $ map snd input
consumedByKernelStm (GroupStream _ _ _ lam _ arrs) =
HS.map consumedArray $ consumedInKernelBody body
where GroupStreamLambda _ _ _ arr_params body = lam
consumedArray v = fromMaybe v $ lookup v params_to_arrs
params_to_arrs = zip (map paramName arr_params) arrs
consumedByKernelStm (GroupIf _ _ tb fb) =
consumedInKernelBody tb <> consumedInKernelBody fb
typeCheckKernel :: TC.Checkable lore => Kernel (Aliases lore) -> TC.TypeM lore ()
typeCheckKernel (ScanKernel cs w kernel_size fun foldfun nes arrs) = do
checkKernelCrud cs w kernel_size
let index_arg = (Prim int32, mempty)
arrargs <- TC.checkSOACArrayArgs w arrs
accargs <- mapM TC.checkArg nes
TC.checkLambda foldfun $ index_arg : index_arg : accargs ++ arrargs
TC.checkLambda fun $ index_arg : index_arg : accargs ++ accargs
let startt = map TC.argType accargs
funret = lambdaReturnType fun
foldret = lambdaReturnType foldfun
(fold_accret, _fold_arrret) = splitAt (length nes) foldret
unless (startt == funret) $
TC.bad $ TC.TypeError $
"Neutral value is of type " ++ prettyTuple startt ++
", but scan function returns type " ++ prettyTuple funret ++ "."
unless (startt == fold_accret) $
TC.bad $ TC.TypeError $
"Neutral value is of type " ++ prettyTuple startt ++
", but scan function returns type " ++ prettyTuple foldret ++ "."
typeCheckKernel (WriteKernel cs w lam _ivs as) = do
-- Requirements:
--
-- 0. @lambdaReturnType@ of @lam@ must be a list
-- [index types..., value types].
--
-- 1. The number of index types must be equal to the number of value types
-- and the number of arrays in @as@.
--
-- 2. Each index type must have the type i32.
--
-- 3. Each array pair in @as@ and the value types must have the same type
-- (though not necessarily the same length).
--
-- 4. Each array in @as@ is consumed. This is not really a check, but more
-- of a requirement, so that e.g. the source is not hoisted out of a
-- loop, which will mean it cannot be consumed.
--
-- Code:
-- First check the certificates and input size.
mapM_ (TC.requireI [Prim Cert]) cs
TC.require [Prim int32] w
-- 0.
let rts = lambdaReturnType lam
rtsLen = length rts `div` 2
rtsI = take rtsLen rts
rtsV = drop rtsLen rts
-- 1.
unless (rtsLen == length as)
$ TC.bad $ TC.TypeError "Write: Uneven number of index types, value types, and I/O arrays."
-- 2.
forM_ rtsI $ \rtI -> unless (Prim int32 == rtI)
$ TC.bad $ TC.TypeError "Write: Index return type must be i32."
forM_ (zip rtsV as) $ \(rtV, (aw, a)) -> do
-- All lengths must have type i32.
TC.require [Prim int32] aw
-- 3.
aType <- lookupType a
case (rtV, rowType aType) of
(Prim pt0, Prim pt1) | pt0 == pt1 ->
return ()
(Array pt0 _ _, Array pt1 _ _) | pt0 == pt1 ->
return ()
_ ->
TC.bad $ TC.TypeError
"Write values and input arrays do not have the same primitive type"
-- 4.
TC.consume =<< TC.lookupAliases a
typeCheckKernel NumGroups = return ()
typeCheckKernel GroupSize = return ()
typeCheckKernel (Kernel cs space kts kbody) = do
mapM_ (TC.requireI [Prim Cert]) cs
checkSpace space
mapM_ TC.checkType kts
mapM_ (TC.require [Prim int32] . snd) $ spaceDimensions space
TC.binding (scopeOfKernelSpace space) $
checkKernelBody kts kbody
where checkSpace (KernelSpace _ _ _ num_threads num_groups group_size structure) = do
mapM_ (TC.require [Prim int32]) [num_threads,num_groups,group_size]
case structure of
FlatSpace dims ->
mapM_ (TC.require [Prim int32] . snd) dims
NestedSpace dims ->
let (_, gdim_sizes, _, ldim_sizes) = unzip4 dims
in mapM_ (TC.require [Prim int32]) $ gdim_sizes ++ ldim_sizes
checkKernelBody ts (KernelBody stms res) =
checkKernelStms stms $ zipWithM_ checkKernelResult res ts
checkNestedKernelBody ts (KernelBody stms res) =
checkKernelStms stms $ zipWithM_ (TC.require . pure) ts res
checkKernelResult (ThreadsReturn which what) t = do
checkWhich which
TC.require [t] what
checkKernelResult (ConcatReturns _ w per_thread_elems v) t = do
TC.require [Prim int32] w
TC.require [Prim int32] per_thread_elems
vt <- lookupType v
unless (vt == t `arrayOfRow` arraySize 0 vt) $
TC.bad $ TC.TypeError $ "Invalid type for ConcatReturns " ++ pretty v
checkWhich AllThreads =
return ()
checkWhich (OneThreadPerGroup which) =
TC.require [Prim int32] which
checkWhich (ThreadsPerGroup limit) = do
mapM_ (TC.requireI [Prim int32] . fst) limit
mapM_ (TC.require [Prim int32] . snd) limit
checkWhich ThreadsInSpace =
return ()
checkKernelStms [] m = m
checkKernelStms (stm:stms') m = do
checkKernelStm stm
TC.binding (scopeOf stm) $ checkKernelStms stms' m
checkKernelStm (Thread which bnd) = do
checkWhich which
TC.checkBinding (bindingPattern bnd) (bindingExp bnd) $ return ()
checkKernelStm (SplitArray (size, chunks) _ w elems_per_thread arrs) = do
TC.require [Prim int32] elems_per_thread
TC.require [Prim int32] w
arrts <- map TC.argType <$> TC.checkSOACArrayArgs w arrs
forM_ (zip arrts chunks) $ \(arrt, chunk) -> do
let chunk_t = arrt `arrayOfRow` Var size
unless (chunk_t == patElemType chunk) $
TC.bad $ TC.TypeError "Invalid type annotation for splitArray chunk."
checkKernelStm (Combine pe cspace v) = do
mapM_ (TC.requireI [Prim int32]) is
mapM_ (TC.require [Prim int32]) ws
v_t <- subExpType v
let res_t = v_t `arrayOfShape` Shape ws
unless (patElemType pe == res_t) $
TC.bad $ TC.TypeError $ "Pattern element " ++ pretty pe
++ " must have type " ++ pretty res_t
where (is, ws) = unzip cspace
checkKernelStm (GroupReduce pes w lam input) = do
TC.require [Prim int32] w
let (nes, arrs) = unzip input
asArg t = (t, mempty)
neargs <- mapM TC.checkArg nes
arrargs <- TC.checkSOACArrayArgs w arrs
TC.checkLambda lam $
map asArg [Prim int32, Prim int32] ++
map TC.noArgAliases (neargs ++ arrargs)
unless (lambdaReturnType lam == map patElemType pes) $
TC.bad $ TC.TypeError
"Invalid type annotation for kernel reduction."
checkKernelStm (GroupStream pes w maxchunk lam accs arrs) = do
TC.require [Prim int32] w
TC.require [Prim int32] maxchunk
acc_args <- mapM (fmap TC.noArgAliases . TC.checkArg) accs
arr_args <- TC.checkSOACArrayArgs w arrs
checkGroupStreamLambda lam acc_args arr_args
unless (map TC.argType acc_args == map patElemType pes) $
TC.bad $ TC.TypeError
"Invalid type annotations for kernel group stream pattern."
checkKernelStm (GroupIf pes cond tb fb) = do
TC.require [Prim Bool] cond
checkNestedKernelBody (map patElemType pes) tb
checkNestedKernelBody (map patElemType pes) fb
checkGroupStreamLambda lam@(GroupStreamLambda block_size _ acc_params arr_params body) acc_args arr_args = do
unless (map TC.argType acc_args == map paramType acc_params) $
TC.bad $ TC.TypeError
"checkGroupStreamLambda: wrong accumulator arguments."
let arr_block_ts =
map ((`arrayOfRow` Var block_size) . TC.argType) arr_args
unless (map paramType arr_params == arr_block_ts) $
TC.bad $ TC.TypeError
"checkGroupStreamLambda: wrong array arguments."
let acc_consumable =
zip (map paramName acc_params) (map TC.argAliases acc_args)
arr_consumable =
zip (map paramName arr_params) (map TC.argAliases arr_args)
consumable = acc_consumable ++ arr_consumable
TC.binding (scopeOf lam) $ TC.consumeOnlyParams consumable $ do
TC.checkLambdaParams acc_params
TC.checkLambdaParams arr_params
checkNestedKernelBody (map TC.argType acc_args) body
checkKernelCrud :: TC.Checkable lore =>
[VName] -> SubExp -> KernelSize -> TC.TypeM lore ()
checkKernelCrud cs w kernel_size = do
mapM_ (TC.requireI [Prim Cert]) cs
TC.require [Prim int32] w
typeCheckKernelSize kernel_size
typeCheckKernelSize :: TC.Checkable lore =>
KernelSize -> TC.TypeM lore ()
typeCheckKernelSize (KernelSize num_groups workgroup_size per_thread_elements
num_elements offset_multiple num_threads) = do
TC.require [Prim int32] num_groups
TC.require [Prim int32] workgroup_size
TC.require [Prim int32] per_thread_elements
TC.require [Prim int32] num_elements
TC.require [Prim int32] offset_multiple
TC.require [Prim int32] num_threads
instance OpMetrics (Op lore) => OpMetrics (Kernel lore) where
opMetrics (ScanKernel _ _ _ lam foldfun _ _) =
inside "ScanKernel" $ lambdaMetrics lam >> lambdaMetrics foldfun
opMetrics (WriteKernel _cs _len lam _ivs _as) =
inside "WriteKernel" $ lambdaMetrics lam
opMetrics (Kernel _ _ _ kbody) =
inside "Kernel" $ kernelBodyMetrics kbody
where kernelBodyMetrics :: GenKernelBody res lore -> MetricsM ()
kernelBodyMetrics = mapM_ kernelStmMetrics . kernelBodyStms
kernelStmMetrics SplitArray{} =
seen "SplitArray"
kernelStmMetrics (Thread _ bnd) =
inside "Thread" $ bindingMetrics bnd
kernelStmMetrics Combine{} =
seen "Combine"
kernelStmMetrics (GroupReduce _ _ lam _) =
inside "GroupReduce" $ lambdaMetrics lam
kernelStmMetrics (GroupStream _ _ _ lam _ _) =
inside "GroupStream" $ groupStreamLambdaMetrics lam
kernelStmMetrics (GroupIf _ _ tb fb) =
inside "GroupIf" $ do
kernelBodyMetrics tb
kernelBodyMetrics fb
groupStreamLambdaMetrics =
kernelBodyMetrics . groupStreamLambdaBody
opMetrics NumGroups = seen "NumGroups"
opMetrics GroupSize = seen "GroupSize"
instance PrettyLore lore => PP.Pretty (Kernel lore) where
ppr (ScanKernel cs w kernel_size fun foldfun nes arrs) =
ppCertificates' cs <> text "scanKernel" <>
parens (ppr w <> comma </>
ppr kernel_size <> comma </>
PP.braces (commasep $ map ppr nes) <> comma </>
commasep (map ppr arrs) <> comma </>
ppr fun <> comma </> ppr foldfun)
ppr (WriteKernel cs len lam ivs as) =
ppCertificates' cs <> text "writeKernel" <>
parens (ppr len <> comma </>
commasep (map ppr ivs) <> comma </>
commasep (map ppr as) <> comma </>
ppr lam)
ppr NumGroups = text "$num_groups()"
ppr GroupSize = text "$group_size()"
ppr (Kernel cs space ts body) =
ppCertificates' cs <>
text "kernel" <>
PP.align (ppr space) <+>
PP.colon <+> ppTuple' ts <+> text "{" </>
PP.indent 2 (ppr body) </>
text "}"
instance Pretty KernelSpace where
ppr (KernelSpace f_gtid f_ltid gid num_threads num_groups group_size structure) =
parens (commasep [text "num groups:" <+> ppr num_groups,
text "group size:" <+> ppr group_size,
text "num threads:" <+> ppr num_threads,
text "global TID ->" <+> ppr f_gtid,
text "local TID ->" <+> ppr f_ltid,
text "group ID ->" <+> ppr gid]) </> structure'
where structure' =
case structure of
FlatSpace space ->
parens (commasep $ do
(i,d) <- space
return $ ppr i <+> "<" <+> ppr d)
NestedSpace space ->
parens (commasep $ do
(gtid,gd,ltid,ld) <- space
return $ ppr (gtid,ltid) <+> "<" <+> ppr (gd,ld))
instance PrettyLore lore => Pretty (KernelBody lore) where
ppr (KernelBody stms res) =
PP.stack (map ppr stms) </>
text "return" <+> PP.braces (PP.commasep $ map ppr res)
instance PrettyLore lore => Pretty (NestedKernelBody lore) where
ppr (KernelBody stms res) =
PP.stack (map ppr stms) </>
PP.braces (PP.commasep $ map ppr res)
instance PrettyLore lore => Pretty (KernelStm lore) where
ppr (SplitArray (n,chunks) o w elems_per_thread arrs) =
PP.annot (mapMaybe ppAnnot chunks) $
text "let" <+> parens (commasep $ ppr n : map ppr chunks) <+> PP.equals <+>
text ("splitArray" <> suff) <> parens (commasep $ ppr w : ppr elems_per_thread : map ppr arrs)
where suff = case o of InOrder -> ""
Disorder -> "Unordered"
ppr (Thread threads bnd@(Let pat attr e)) =
bindingAnnotation bnd $ PP.align $
text "let" <+> threads' <> PP.align (ppr pat) <+>
case (linebreak, ppExpLore attr e) of
(True, Nothing) -> PP.equals </>
PP.indent 2 e'
(_, Just ann) -> PP.equals </>
PP.indent 2 (ann </> e')
(False, Nothing) -> PP.equals <+> PP.align e'
where e' = ppr e
linebreak = case e of
DoLoop{} -> True
Op{} -> True
If{} -> True
PrimOp ArrayLit{} -> False
_ -> False
threads' = case threads of
AllThreads -> mempty
OneThreadPerGroup which -> mempty <+> ppr which
ThreadsPerGroup limit -> text " <" <+> ppr limit
ThreadsInSpace -> text "active "
ppr (Combine pe cspace what) =
PP.annot (mapMaybe ppAnnot [pe]) $
text "let" <+> PP.braces (ppr pe) <+> PP.equals <+>
text "combine" <> PP.apply (map f cspace ++ [ppr what])
where f (i, w) = ppr i <+> text "<" <+> ppr w
ppr (GroupReduce pes w lam input) =
PP.annot (mapMaybe ppAnnot pes) $
text "let" <+> PP.braces (PP.commasep $ map ppr pes) <+> PP.equals </>
PP.indent 2 (text "reduce" <> parens (commasep [ppr w,
ppr lam,
PP.braces (commasep $ map ppr nes),
commasep $ map ppr els]))
where (nes,els) = unzip input
ppr (GroupStream pes w maxchunk lam accs arrs) =
PP.annot (mapMaybe ppAnnot pes) $
text "let" <+> PP.braces (PP.commasep $ map ppr pes) <+> PP.equals </>
PP.indent 2
(text "stream" <>
parens (commasep [ppr w,
ppr maxchunk,
ppr lam,
PP.braces (commasep $ map ppr accs),
commasep $ map ppr arrs]))
ppr (GroupIf pes cond tb fb) =
PP.annot (mapMaybe ppAnnot pes) $
text "let" <+> PP.braces (PP.commasep $ map ppr pes) <+> PP.equals </>
PP.indent 2
(text "if" <+> ppr cond </>
text "then" <+> PP.align (ppr tb) </>
text "else" <+> PP.align (ppr fb))
instance PrettyLore lore => Pretty (GroupStreamLambda lore) where
ppr (GroupStreamLambda block_size block_offset acc_params arr_params body) =
PP.annot (mapMaybe ppAnnot params) $
text "fn" <+>
parens (commasep (block_size' : block_offset' : map ppr params)) <+>
text "=>" </> PP.indent 2 (ppr body)
where params = acc_params ++ arr_params
block_size' = text "int" <+> ppr block_size
block_offset' = text "int" <+> ppr block_offset
instance Pretty KernelResult where
ppr (ThreadsReturn AllThreads what) =
ppr what
ppr (ThreadsReturn (OneThreadPerGroup who) what) =
text "thread" <+> ppr who <+> text "returns" <+> ppr what
ppr (ThreadsReturn (ThreadsPerGroup limit) what) =
text "thread <" <+> ppr limit <+> text "returns" <+> ppr what
ppr (ThreadsReturn ThreadsInSpace what) =
text "thread in space returns" <+> ppr what
ppr (ConcatReturns o w per_thread_elems v) =
text "concat" <> suff <>
parens (commasep [ppr w, ppr per_thread_elems]) <+>
ppr v
where suff = case o of InOrder -> ""
Disorder -> "Permuted"
instance Pretty KernelSize where
ppr (KernelSize
num_chunks workgroup_size per_thread_elements
num_elements offset_multiple num_threads) =
PP.braces $ commasep [ppr num_chunks,
ppr workgroup_size,
ppr per_thread_elements,
ppr num_elements,
ppr offset_multiple,
ppr num_threads
]
| mrakgr/futhark | src/Futhark/Representation/Kernels/Kernel.hs | bsd-3-clause | 47,575 | 0 | 20 | 13,296 | 13,732 | 6,845 | 6,887 | 950 | 14 |
module Language.Iso.Target.Scala where
import Language.Iso.App
import Language.Iso.Fls
import Language.Iso.Ite
import Language.Iso.Lam
import Language.Iso.Tru
import Language.Iso.Var
newtype Scala = Scala { runScala :: String }
instance Show Scala where
show (Scala ps) = ps
instance Var Scala where
var x = Scala x
instance Lam Scala where
lam v b = Scala $ "(" ++ v ++ ") => {" ++ runScala b ++ "}"
instance App Scala where
app f x = Scala $ "(" ++ runScala f ++ ") (" ++ runScala x ++ ")"
instance Tru Scala where
tru = Scala $ "true"
instance Fls Scala where
fls = Scala $ "false"
instance Ite Scala where
ite b t f = Scala $
"if (" ++ runScala b ++
") {" ++ runScala t ++
"} else {" ++ runScala f ++
"}"
| joneshf/iso | src/Language/Iso/Target/Scala.hs | bsd-3-clause | 798 | 0 | 12 | 224 | 274 | 145 | 129 | 26 | 0 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeFamilies #-}
module HMenu.Search (
createIndex,
search,
Index,
tokenCount,
Indexable(..)
) where
import ClassyPrelude hiding (Index)
import Control.DeepSeq
import Control.Monad.State (State, execState, modify')
import Data.Binary
import qualified Data.HashMap.Strict as HM
import Data.Text (inits)
import Data.BinaryRef
type Token = Text
type Weight = Double
type WeightMap k = HashMap k Weight
type Index_ a = HashMap Token (WeightMap a)
newtype Index a = Index (Index_ a)
deriving (Eq, Show, Generic)
instance NFData a => NFData (Index a)
class (Hashable a, Eq a) => Indexable a where
data IndexableField a
fieldWeight :: IndexableField a -> Weight
fieldValue :: IndexableField a -> a -> Maybe Text
fieldList :: [IndexableField a]
type Indexer a = State (Index_ a) ()
createIndex :: Indexable a => [a] -> Index a
createIndex entries =
let rawIndex = execState (mapM_ indexEntry entries) mempty
tokens = mapToList rawIndex
filtered = mapMaybe weighten tokens
in Index $ mapFromList filtered
where
count = fromIntegral $ length entries
weighten (t, m) =
let f = (count - fromIntegral (length m)) / count
in if f < 0.1
then Nothing
else Just (t, map (f *) m)
tokenCount :: Indexable a => Index a -> Int
tokenCount (Index i) = length i
search :: Indexable a => Index a -> Text -> [a]
search _ terms | null terms = []
search (Index index) terms =
let tokens = tokenize terms
matches = mapMaybe (`lookup` index) tokens
pairs = unionsWith (+) matches
in map fst $ sortOn (Down . snd) $ mapToList pairs
indexEntry :: Indexable a => a -> Indexer a
indexEntry e =
forM_ fieldList $ \f ->
forM_ (fieldValue f e) $ \v ->
indexField (fieldWeight f) v
where
indexField w t = do
let ts = tokenize t
d = fromIntegral $ length ts
forM_ ts $ indexToken (w / d)
indexToken t w = modify' $ addToken e t w
addToken :: Indexable a => a -> Weight -> Token -> Index_ a -> Index_ a
addToken e w = alterMap $ \m ->
Just $ case m of
Nothing -> singletonMap e w
Just m' -> insertWith (+) e w m'
tokenize :: Text -> [Text]
tokenize t = concatMap (nGrams 3 8) (words $ toCaseFold t)
nGrams :: Int -> Int -> Text -> [Text]
nGrams a b t = t : drop a (take (b+1) $ inits t)
instance (Indexable a, Binary a) => Binary (Index a) where
put (Index i) = putWithRefs go
where
go = do
lift (put $ HM.size i)
oforM_ (HM.toList i) putTokenPair
putTokenPair (t, es) = do
lift $ put t
lift (put $ HM.size es)
oforM_ (HM.toList es) putEntryPair
putEntryPair (e, w) = do
putRef e
lift $ put w
get = do
i <- getWithRefs go
return $ Index i
where
go = do
il <- lift get
ts <- replicateM il getTokenPair
return $ HM.fromList ts
getTokenPair = do
t <- lift (get :: Get Token)
el <- lift get
es <- replicateM el getEntryPair
return (t, HM.fromList es)
getEntryPair = do
e <- getRef
w <- lift get
return (e, w)
| Adirelle/hmenu | src/HMenu/Search.hs | bsd-3-clause | 3,635 | 0 | 16 | 1,316 | 1,286 | 649 | 637 | 97 | 2 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module VYPe15.Internal.AssemblyGenerator
( generateAssembly
)
where
import Prelude (Integral, Num, error, fromIntegral, negate)
import Control.Applicative (pure)
import Control.Monad (mapM_, return, (>>), (>>=))
import Control.Monad.State (get, modify, put)
import Control.Monad.Writer (tell)
import Data.Bool (Bool(False, True))
import Data.Char (ord)
import Data.Function (flip, ($), (.))
import Data.Functor ((<$>))
import Data.Int (Int32)
import Data.List (groupBy, reverse)
import qualified Data.Map as M (empty, foldlWithKey)
import Data.Maybe (Maybe(Just, Nothing))
import Data.Monoid ((<>))
import Data.Text (Text, unlines)
import VYPe15.Internal.Util (showText)
import VYPe15.Types.Assembly
( ASM(ADD, ADDIU, ADDU, AND, ANDI, Asciz', B, BEQZ, BGEZ, BGTZ, BLEZ, BLTZ,
BNEZ, Break, Byte', DIV, Data', JAL, JR, LA, LB, LI, LW, Label,
MFHi, MFLo, MOV, MOVZ, MUL, OR, Org',
PrintChar, PrintInt, PrintString, ReadChar, ReadInt, ReadString,
SB, SUB, SW, Text')
, Address(Data, RAM)
, Assembly
, AssemblyState(AssemblyState, functionLabel, functionType, labelCounter,
paramCounter, stringCounter, stringTable, variableCounter, variableTable)
, Register(A0, FP, RA, S0, SP, T0, T1, T2, T3, T7, V0, V1, Zero)
, addParam
, addString
, addVariable
, evalAssembly
, getFunctionLabel
, getFunctionType
, getReturnLabel
, getVarAddr
, lookupVarAddr
, mkLabel
)
import VYPe15.Types.AST
( DataType(DChar, DInt, DString)
, Identifier(getId)
, Param(AnonymousParam, Param)
, getTypeSize
)
import VYPe15.Types.SymbolTable
(Function(functionParams, functionReturn), Variable(Variable, varType))
import VYPe15.Types.TAC (Constant, Label, Operator, TAC)
import qualified VYPe15.Types.TAC as C (Constant(Char, Int, String))
import qualified VYPe15.Types.TAC as TAC
( TAC(Assign, Begin, Call, ChrStr, GetAt, Goto, JmpZ, Label, PopParams,
Print, PushParam, Read, Return, SetAt, Strcat)
)
import qualified VYPe15.Types.TAC as Op
( Operator(Add, And, Const, Div, Eq, GE, GT, LE, LT, MaskByte, Mod, Mul,
Neq, Not, Or, Set, Sub)
)
generateAssembly :: [TAC] -> Text
generateAssembly tac =
let asm = postProcessAssembly . evalAssembly initialState
. mapM_ generateAssembly' . reverse $ functions tac
in unlines $ showText <$> asm
-- intercalate "\n\n" (showText <$> functions tac)
-- showText $ fmap (evalAssembly initialState . generateAssembly') $ functions tac
where
functions = groupBy (\_ b -> isBegin b)
isBegin = \case
TAC.Begin _ _ -> False
_ -> True
initialState = AssemblyState
{ variableTable = M.empty
, stringTable = M.empty
, stringCounter = 0
, paramCounter = 0
, variableCounter = 0
, labelCounter = 0
, functionLabel = "__quit_program__"
, functionType = Nothing
}
generateAssembly' :: [TAC] -> Assembly ()
generateAssembly' tac' = do
state <- get
let (state', asm) = evalAssembly state $ mapM_ handleTAC tac'
put state'
postProcessFunction asm
postProcessAssembly :: (AssemblyState, [ASM]) -> [ASM]
postProcessAssembly (AssemblyState{..}, asm) = asm' <> asm
where
asm' = dataSection <> codeSection
dataSection = Data' : Byte' "__null_string_" [0] :
M.foldlWithKey (\b -> ((: b) .) . flip Asciz') [] stringTable
codeSection =
[ Text'
, Org' 0
, LI SP 0x8000
, MOV FP SP
, ADDIU S0 SP 0x04 -- Small gap between stack and heap just for safety.
, JAL "main"
, Break
]
postProcessFunction :: [ASM] -> Assembly ()
postProcessFunction asm = do
returnL <- getReturnLabel
functionL <- getFunctionLabel
stackSize <- variableCounter <$> get
-- Point-free version of lambda function: ((. varSize) . (+))
-- Intro
tell
[ Label functionL
, SW RA (sp 0)
, SW FP (sp (-4))
, SW S0 (sp (-8))
, ADDIU SP SP (-12)
, MOV FP SP
, ADDIU SP SP stackSize -- stackSize is already negative
]
tell asm
-- Return check
getFunctionType >>= \case
Just DString -> tell [LA V0 $ Data "__null_string_"]
_ -> tell [LI V0 0]
tell [B returnL]
-- Outro
tell
[ Label returnL
, ADDIU SP FP 12
, LW RA (sp 0)
, LW FP (sp (-4))
, LW S0 (sp (-8))
]
getFunctionType >>= \case
Just DString -> do
tell [MOV V1 S0]
copyString' S0 V0
tell [MOV V0 V1]
_ -> pure ()
tell [ JR RA ]
sp :: Int32 -> Address
sp = RAM SP
handleTAC :: TAC -> Assembly ()
handleTAC t = case t of
TAC.Assign var op -> handleAssign var op
TAC.Call mvar l -> handleCall mvar l
TAC.PushParam var -> handlePushParam var
TAC.PopParams n -> tell [ADDIU SP SP $ fromIntegral n]
TAC.Label l -> tell [Label l]
TAC.Begin l fn -> handleBegin l fn
TAC.JmpZ var l -> handleJmpZ var l
TAC.Goto l -> tell [B l]
TAC.Return mvar -> handleReturn mvar
TAC.Print var -> handlePrint var
TAC.Read var -> handleRead var
TAC.GetAt dst src off -> handleGetAt dst src off
TAC.SetAt dst src off char -> handleSetAt dst src off char
TAC.Strcat dst src1 src2 -> handleStrcat dst src1 src2
TAC.ChrStr dst src -> handleChrStr dst src
handleAssign :: Variable -> Operator -> Assembly ()
handleAssign dst = \case
Op.Mul v1 v2 -> binaryOpMFReg MUL MFLo v1 v2
Op.Div v1 v2 -> binaryOpMFReg DIV MFLo v1 v2
Op.Mod v1 v2 -> binaryOpMFReg DIV MFHi v1 v2
Op.Sub v1 v2 -> binaryOp SUB v1 v2
Op.Add v1 v2 -> binaryOp ADD v1 v2
Op.Set v -> loadVar T0 v >> storeVar T0 dst -- TODO: Type casting
Op.And v1 v2 -> binaryOp AND v1 v2
Op.Or v1 v2 -> binaryOp OR v1 v2
Op.Not v -> negateLogic v
Op.Eq v1 v2 -> binaryOpLogic BEQZ v1 v2 "Eq"
Op.Neq v1 v2 -> binaryOpLogic BNEZ v1 v2 "Neq"
Op.LT v1 v2 -> binaryOpLogic BLTZ v1 v2 "LT"
Op.LE v1 v2 -> binaryOpLogic BLEZ v1 v2 "LE"
Op.GT v1 v2 -> binaryOpLogic BGTZ v1 v2 "GT"
Op.GE v1 v2 -> binaryOpLogic BGEZ v1 v2 "GE"
Op.Const c -> loadConstant c
Op.MaskByte v -> do
loadVar T0 v
tell [ ANDI T2 T0 0xff ] -- Mask lowest byte
storeVar T2 dst
where
negateLogic :: Variable -> Assembly ()
negateLogic v = do
loadVar T0 v
tell
[ LI T1 1
, MOVZ T0 T1 T0
]
storeVar T0 dst
binaryOp
:: (Register -> Register -> Register -> ASM)
-> Variable
-> Variable
-> Assembly ()
binaryOp op v1 v2 = do
loadVar T0 v1
loadVar T1 v2
tell [op T2 T0 T1]
storeVar T2 dst
binaryOpMFReg
:: (Register -> Register -> ASM)
-> (Register -> ASM)
-> Variable
-> Variable
-> Assembly ()
binaryOpMFReg op mf v1 v2 = do
loadVar T0 v1
loadVar T1 v2
tell
[ op T0 T1
, mf T2
]
storeVar T2 dst
binaryOpLogic
:: (Register -> Label -> ASM)
-> Variable
-> Variable
-> Text
-> Assembly ()
binaryOpLogic branch v1 v2 labelName = do
loadVar T0 v1
loadVar T1 v2
l <- mkLabel labelName
tell
[ LI T2 1
, SUB T3 T0 T1
, branch T3 l
, LI T2 0
, Label l
]
storeVar T2 dst
loadConstant :: Constant -> Assembly ()
loadConstant = \case
C.Int n -> loadVal dst n
C.Char n -> loadVal dst $ ord n
C.String s -> loadString s
loadString :: Text -> Assembly ()
loadString s = do -- TODO: Copy string into it's own memory.
addr <- addString s
tell [LA T0 addr]
storeVar T0 dst
loadVal :: (Integral a) => Variable -> a -> Assembly ()
loadVal v n = do
tell [LI T0 $ fromIntegral n]
storeVar T0 v
handleBegin :: Label -> Function -> Assembly ()
handleBegin l fn = do
modify (\s -> s
{ variableTable = M.empty
, functionLabel = l
, functionType = functionReturn fn
, variableCounter = 0
, paramCounter = 12 -- There is offset due to stack frame.
})
mapM_ (addParam . paramToVar) $ functionParams fn
where
paramToVar (Param dt id) = Variable (getId id) dt
paramToVar (AnonymousParam _) = error "BUG: Unexpected anonymous param."
handlePushParam :: Variable -> Assembly ()
handlePushParam v = do
v' <- getVarAddr v
tell
[ lv v A0 v'
, sv v A0 (RAM SP 0)
, ADDIU SP SP $ negate pSize
]
where
pSize = varSize v
handleCall :: Maybe Variable -> Label -> Assembly ()
handleCall mvar l = do
assign <- case mvar of
Just v -> do
v' <- addVariable v
return [sv v V0 v']
Nothing -> pure []
tell $ [JAL l] <> assign
handleReturn :: Maybe Variable -> Assembly ()
handleReturn mvar = do
assign <- case mvar of
Just v -> do
v' <- getVarAddr v
return [lv v V0 v']
Nothing -> pure []
retLabel <- getReturnLabel
tell $ assign <> [B retLabel]
handleJmpZ :: Variable -> Label -> Assembly ()
handleJmpZ v l = do
v' <- getVarAddr v
tell
[ lv v T0 v'
, BEQZ T0 l
]
handlePrint :: Variable -> Assembly ()
handlePrint v@(Variable _ vType) = do
v' <- getVarAddr v
let prtFn = case vType of
DInt -> PrintInt
DChar -> PrintChar
DString -> PrintString
tell
[ lv v T0 v'
, prtFn T0
]
handleRead :: Variable -> Assembly ()
handleRead v@(Variable _ vType) = do
v' <- addVariable v
case vType of
DInt -> readPrimitive v' ReadInt
DChar -> readPrimitive v' ReadChar
DString -> readString v'
where
readPrimitive :: Address -> (Register -> ASM) -> Assembly ()
readPrimitive addr readFn = tell
[ readFn T0
, sv v T0 addr
]
readString :: Address -> Assembly ()
readString addr = tell
[ ReadString S0 T0
, sv v S0 addr
, ADDU S0 S0 T0
]
copyString :: Variable -> Variable -> Assembly ()
copyString dst src = do
loadVar T0 src
tell [MOV V1 S0]
copyString' S0 T0
storeVar V1 dst
copyString' :: Register -> Register -> Assembly ()
copyString' dst src = do
begin <- mkLabel "copy_string"
tell
[ Label begin
, LB T7 (RAM src 0)
, SB T7 (RAM dst 0)
-- v Strings are stored on heap which grows from low to high
, ADDIU src src 1
, ADDIU dst dst 1
, BNEZ T7 begin
]
handleGetAt :: Variable -> Variable -> Variable -> Assembly ()
handleGetAt dst src off = do
mkLabel "get_at" >>= tell . (:[]) . Label
loadVar T0 src
loadVar T1 off
tell
[ ADDU T2 T0 T1
, LB T3 $ RAM T2 0
]
storeVar T3 dst
handleSetAt :: Variable -> Variable -> Variable -> Variable -> Assembly ()
handleSetAt dst src off char = do
mkLabel "set_at" >>= tell . (:[]) . Label
copyString dst src
loadVar T0 off
loadVar T1 char
tell
[ ADDU T2 V1 T0
, SB T1 $ RAM T2 0
]
handleStrcat :: Variable -> Variable -> Variable -> Assembly ()
handleStrcat dst src1 src2 = do
mkLabel "strcat" >>= tell . (:[]) . Label
tell [MOV V1 S0]
loadVar T0 src1
copyString' S0 T0
tell [ADDIU S0 S0 (-1)] -- Rewind back to terminating character.
loadVar T0 src2
copyString' S0 T0
storeVar V1 dst
handleChrStr :: Variable -> Variable -> Assembly ()
handleChrStr dst src = do
mkLabel "chrStr" >>= tell . (:[]) . Label
loadVar T7 src
tell
[ MOV V1 S0
, SB T7 (RAM S0 0)
, SB Zero (RAM S0 1) -- String terminator
, ADDIU S0 S0 2
]
storeVar V1 dst
loadVar :: Register -> Variable -> Assembly ()
loadVar r v = do
v' <- getVarAddr v
tell [lv v r v']
storeVar :: Register -> Variable -> Assembly ()
storeVar r v = do
v' <- lookupVarAddr v >>= \case
Just addr -> return addr
Nothing -> addVariable v
tell [sv v r v']
lv :: Variable -> Register -> Address -> ASM
lv (Variable _ vType) = case vType of
DInt -> LW
DChar -> LB
DString -> LW
sv :: Variable -> Register -> Address -> ASM
sv (Variable _ vType) = case vType of
DInt -> SW
DChar -> SB
DString -> SW
varSize :: (Num n) => Variable -> n
varSize = fromIntegral . getTypeSize . varType
| Tr1p0d/VYPe15 | src/VYPe15/Internal/AssemblyGenerator.hs | bsd-3-clause | 12,875 | 0 | 17 | 4,095 | 4,467 | 2,318 | 2,149 | 372 | 19 |
{-# LANGUAGE AllowAmbiguousTypes #-}
-- | Different key/value serialization helpers abstracted over
-- 'MonadDB'.
module Pos.DB.Functions
(
-- * Encoded putting/getting
dbGetBi
, dbPutBi
-- * Decoding/encoding primitives and iteration related
, encodeWithKeyPrefix
, processIterEntry
) where
import Universum
import qualified Data.ByteString as BS (drop, isPrefixOf)
import Formatting (sformat, shown, string, (%))
import Pos.Binary.Class (Bi, decodeFull', serialize')
import Pos.DB.Class (DBIteratorClass (..), DBTag, IterType,
MonadDB (..), MonadDBRead (..))
import Pos.DB.Error (DBError (..))
import Pos.Util.Util (maybeThrow)
-- | Read serialized value (with version) associated with given key from pure DB.
dbGetBi
:: forall v m.
(Bi v, MonadDBRead m)
=> DBTag -> ByteString -> m (Maybe v)
dbGetBi tag key =
dbGet tag key >>= traverse (either throwM pure . dbDecodeIgnoreVersion)
-- | Write serializable value to DB for given key. Uses simple versioning.
dbPutBi :: (Bi v, MonadDB m) => DBTag -> ByteString -> v -> m ()
dbPutBi tag k v = dbPut tag k (serialize' v)
dbDecodeIgnoreVersion :: forall v . Bi v => ByteString -> Either DBError v
dbDecodeIgnoreVersion bytes = case decodeFull' @v bytes of
Right val -> Right val
Left _ -> bimap DBMalformed snd $ decodeFull' @(Word8, v) bytes
dbDecodeMaybe :: (Bi v) => ByteString -> Maybe v
dbDecodeMaybe = rightToMaybe . decodeFull'
-- Parse maybe
dbDecodeMaybeWP
:: forall i . (DBIteratorClass i, Bi (IterKey i))
=> ByteString -> Maybe (IterKey i)
dbDecodeMaybeWP s
| BS.isPrefixOf (iterKeyPrefix @i) s =
dbDecodeMaybe . BS.drop (length $ iterKeyPrefix @i) $ s
| otherwise = Nothing
-- | Encode iterator key using iterator prefix defined in
-- 'DBIteratorClass'.
encodeWithKeyPrefix
:: forall i . (DBIteratorClass i, Bi (IterKey i))
=> IterKey i -> ByteString
encodeWithKeyPrefix = (iterKeyPrefix @i <>) . serialize'
-- | Given a @(k,v)@ as pair of strings, try to decode both.
processIterEntry ::
forall i m.
(Bi (IterKey i), Bi (IterValue i), MonadThrow m, DBIteratorClass i)
=> (ByteString, ByteString)
-> m (Maybe (IterType i))
processIterEntry (key,val)
| BS.isPrefixOf prefix key = do
k <- maybeThrow (DBMalformed $ fmt key "key invalid")
(dbDecodeMaybeWP @i key)
v <- either throwM pure (dbDecodeIgnoreVersion val)
pure $ Just (k, v)
| otherwise = pure Nothing
where
prefix = iterKeyPrefix @i
fmt k err =
sformat
("Iterator entry with keyPrefix = "%shown%" is malformed: \
\key = "%shown%", err: " %string)
prefix k err
| input-output-hk/pos-haskell-prototype | db/src/Pos/DB/Functions.hs | mit | 2,818 | 0 | 13 | 723 | 806 | 431 | 375 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
module Statistics.Graph
( graphF
, writeGraph
) where
import Universum hiding (unlines)
import Control.Foldl (Fold (..))
import Data.Graph.Inductive.Graph (Graph (mkGraph))
import Data.Graph.Inductive.PatriciaTree (Gr)
import Data.GraphViz (DotGraph)
import qualified Data.GraphViz as G
import qualified Data.GraphViz.Attributes.Complete as A
import Data.GraphViz.Commands.IO (hPutDot)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Set (Set)
import qualified Data.Set as S
import Pos.Util (withTempFile)
import Prelude (unlines)
import System.Exit (ExitCode (ExitSuccess))
import System.IO (hGetContents, hPutStrLn)
import System.Process (readProcessWithExitCode)
import JSONLog (IndexedJLTimedEvent)
import Statistics.Block (BlockHeader (..), blockChain, blockHeadersF)
import Types
graphF :: Fold IndexedJLTimedEvent (DotGraph Int)
graphF = f <$> blockHeadersF
where
f :: Map BlockHash BlockHeader -> DotGraph Int
f m =
let nodes = zip [1..] $ [bh | (_, bh) <- M.toList m]
h2i = M.fromList [(bhHash bh, i) | (i, bh) <- nodes]
edges = do
(i, bh) <- nodes
let h = bhHash bh
h' = bhPrevBlock $ m M.! h
guard $ M.member h' h2i
return (i, h2i M.! h', ())
g = mkGraph nodes edges :: Gr BlockHeader ()
in G.graphToDot G.nonClusteredParams { G.fmtNode = fmt } g
where
chain :: Set BlockHash
chain = blockChain m
fmt :: (Int, BlockHeader) -> G.Attributes
fmt (_, bh) = [ A.Label $ A.StrLabel $ toLText $ unlines labels
, A.FillColor (if S.member (bhHash bh) chain
then [A.WC (A.X11Color G.Yellow) Nothing]
else [A.WC (A.X11Color G.LightGray) Nothing])
, A.Style [A.SItem A.Filled []]
]
where
labels :: [String]
labels = [ '#' : take 6 (toString $ bhHash bh)
, show (bhNode bh)
, show (bhSlot bh)
]
writeGraph :: FilePath -> DotGraph Int -> IO Bool
writeGraph f g = withTempFile "." "graph.dot" $ \_ h -> do
hPutDot h g
b <- G.isGraphvizInstalled
case b of
True -> do
input <- hGetContents h
ex <- view _1 <$> readProcessWithExitCode "dot" ["-Tpng", "-o" ++ f] input
case ex of
ExitSuccess -> return True
_ -> hPutStrLn Universum.stderr ("Creating the graph failed, " ++ show ex) >> return False
False -> hPutStrLn stderr "Cannot produce graph without dot. Please install graphviz." >> return False
| input-output-hk/pos-haskell-prototype | tools/post-mortem/src/Statistics/Graph.hs | mit | 2,946 | 0 | 21 | 1,037 | 888 | 482 | 406 | 62 | 3 |
module Language.Entry (
main,
) where
import Language.Repl
main :: IO ()
main = repl
| airtial/hellolisp.scala | src/Language/Entry.hs | mit | 89 | 0 | 6 | 19 | 32 | 19 | 13 | 5 | 1 |
module Test where
import Test.QuickCheck
import Debug.Trace
import Arbitrary
import Data.Map (Map)
import qualified Data.Map as Map
import Text.ParserCombinators.Parsec
import Syntax
import Parser
import Printer
bags :: Gen (Map Int Int)
bags = do
let positivePairs = arbitrary :: Gen (Int, Positive Int)
list <- listOf positivePairs
let listNum = map (\(x, Positive y) -> (x, y)) list
return (Map.fromList listNum)
prop_printParse_ident :: [Term] -> Bool
prop_printParse_ident t =
let str = showTerms t in
case parse Parser.term "<quickcheck>" str of
Left err -> False
Right t' -> t == t'
| jff/TeLLer | tests/Test.hs | gpl-3.0 | 623 | 0 | 14 | 124 | 225 | 119 | 106 | 22 | 2 |
{-# LANGUAGE RankNTypes #-}
{-| Implementation of a reader for the job queue.
-}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.JQScheduler
( JQStatus
, jqLivelock
, emptyJQStatus
, selectJobsToRun
, scheduleSomeJobs
, initJQScheduler
, enqueueNewJobs
, dequeueJob
, setJobPriority
, cleanupIfDead
, updateStatusAndScheduleSomeJobs
, configChangeNeedsRescheduling
) where
import Control.Applicative (liftA2)
import Control.Arrow
import Control.Concurrent
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Data.Function (on)
import Data.IORef (IORef, atomicModifyIORef,
atomicModifyIORef', newIORef, readIORef)
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import Data.Ord (comparing)
import Data.Set (Set)
import qualified Data.Set as S
import System.INotify
import Ganeti.BasicTypes
import Ganeti.Compat
import Ganeti.Constants as C
import Ganeti.Errors
import Ganeti.JQScheduler.Filtering (applyingFilter, jobFiltering)
import Ganeti.JQScheduler.Types
import Ganeti.JQScheduler.ReasonRateLimiting (reasonRateLimit)
import Ganeti.JQueue as JQ
import Ganeti.JSON (fromContainer)
import Ganeti.Lens hiding (chosen)
import Ganeti.Logging
import Ganeti.Objects
import Ganeti.Path
import Ganeti.Types
import Ganeti.Utils
import Ganeti.Utils.Livelock
import Ganeti.Utils.MVarLock
{-| Representation of the job queue
We keep two lists of jobs (together with information about the last
fstat result observed): the jobs that are enqueued, but not yet handed
over for execution, and the jobs already handed over for execution. They
are kept together in a single IORef, so that we can atomically update
both, in particular when scheduling jobs to be handed over for execution.
-}
data JQStatus = JQStatus
{ jqJobs :: IORef Queue
, jqConfig :: IORef (Result ConfigData)
, jqLivelock :: Livelock
, jqForkLock :: Lock
}
emptyJQStatus :: IORef (Result ConfigData) -> IO JQStatus
emptyJQStatus config = do
jqJ <- newIORef Queue { qEnqueued = [], qRunning = [], qManipulated = [] }
(_, livelock) <- mkLivelockFile C.luxiLivelockPrefix
forkLock <- newLock
return JQStatus { jqJobs = jqJ, jqConfig = config, jqLivelock = livelock
, jqForkLock = forkLock }
-- When updating the job lists, force the elements to WHNF, otherwise it is
-- easy to leak the resources held onto by the lazily parsed job file.
-- This can happen, eg, if updateJob is called, but the resulting QueuedJob
-- isn't used by the scheduler, for example when the inotify watcher or the
-- the polling loop re-reads a job with a new message appended to it.
-- | Apply a function on the running jobs.
onRunningJobs :: ([JobWithStat] -> [JobWithStat]) -> Queue -> Queue
onRunningJobs f q@Queue { qRunning = qr } =
let qr' = (foldr seq () qr) `seq` f qr -- force list els to WHNF
in q { qRunning = qr' }
-- | Apply a function on the queued jobs.
onQueuedJobs :: ([JobWithStat] -> [JobWithStat]) -> Queue -> Queue
onQueuedJobs f q@Queue { qEnqueued = qe } =
let qe' = (foldr seq () qe) `seq` f qe -- force list els to WHNF
in q { qEnqueued = qe' }
-- | Obtain a JobWithStat from a QueuedJob.
unreadJob :: QueuedJob -> JobWithStat
unreadJob job = JobWithStat {jJob=job, jStat=nullFStat, jINotify=Nothing}
-- | Reload interval for polling the running jobs for updates in microseconds.
watchInterval :: Int
watchInterval = C.luxidJobqueuePollInterval * 1000000
-- | Read a cluster parameter from the configuration, using a default if the
-- configuration is not available.
getConfigValue :: (Cluster -> a) -> a -> JQStatus -> IO a
getConfigValue param defaultvalue =
liftM (genericResult (const defaultvalue) (param . configCluster))
. readIORef . jqConfig
-- | Get the maximual number of jobs to be run simultaneously from the
-- configuration. If the configuration is not available, be conservative
-- and use the smallest possible value, i.e., 1.
getMaxRunningJobs :: JQStatus -> IO Int
getMaxRunningJobs = getConfigValue clusterMaxRunningJobs 1
-- | Get the maximual number of jobs to be tracked simultaneously from the
-- configuration. If the configuration is not available, be conservative
-- and use the smallest possible value, i.e., 1.
getMaxTrackedJobs :: JQStatus -> IO Int
getMaxTrackedJobs = getConfigValue clusterMaxTrackedJobs 1
-- | Get the number of jobs currently running.
getRQL :: JQStatus -> IO Int
getRQL = liftM (length . qRunning) . readIORef . jqJobs
-- | Wrapper function to atomically update the jobs in the queue status.
modifyJobs :: JQStatus -> (Queue -> Queue) -> IO ()
modifyJobs qstat f = atomicModifyIORef' (jqJobs qstat) (flip (,) () . f)
-- | Reread a job from disk, if the file has changed.
readJobStatus :: JobWithStat -> IO (Maybe JobWithStat)
readJobStatus jWS@(JobWithStat {jStat=fstat, jJob=job}) = do
let jid = qjId job
qdir <- queueDir
let fpath = liveJobFile qdir jid
logDebug $ "Checking if " ++ fpath ++ " changed on disk."
changedResult <- try $ needsReload fstat fpath
:: IO (Either IOError (Maybe FStat))
let changed = either (const $ Just nullFStat) id changedResult
case changed of
Nothing -> do
logDebug $ "File " ++ fpath ++ " not changed on disk."
return Nothing
Just fstat' -> do
let jids = show $ fromJobId jid
logDebug $ "Rereading job " ++ jids
readResult <- loadJobFromDisk qdir True jid
case readResult of
Bad s -> do
logWarning $ "Failed to read job " ++ jids ++ ": " ++ s
return Nothing
Ok (job', _) -> do
logDebug $ "Read job " ++ jids ++ ", status is "
++ show (calcJobStatus job')
return . Just $ jWS {jStat=fstat', jJob=job'}
-- jINotify unchanged
-- | Update a job in the job queue, if it is still there. This is the
-- pure function for inserting a previously read change into the queue.
-- as the change contains its time stamp, we don't have to worry about a
-- later read change overwriting a newer read state. If this happens, the
-- fstat value will be outdated, so the next poller run will fix this.
updateJobStatus :: JobWithStat -> [JobWithStat] -> [JobWithStat]
updateJobStatus job' =
let jid = qjId $ jJob job' in
map (\job -> if qjId (jJob job) == jid then job' else job)
-- | Update a single job by reading it from disk, if necessary.
updateJob :: JQStatus -> JobWithStat -> IO ()
updateJob state jb = do
jb' <- readJobStatus jb
maybe (return ()) (modifyJobs state . onRunningJobs . updateJobStatus) jb'
when (maybe True (jobFinalized . jJob) jb') . (>> return ()) . forkIO $ do
logDebug "Scheduler noticed a job to have finished."
cleanupFinishedJobs state
scheduleSomeJobs state
-- | Move a job from one part of the queue to another.
-- Return the job that was moved, or 'Nothing' if it wasn't found in
-- the queue.
moveJob :: Lens' Queue [JobWithStat] -- ^ from queue
-> Lens' Queue [JobWithStat] -- ^ to queue
-> JobId
-> Queue
-> (Queue, Maybe JobWithStat)
moveJob fromQ toQ jid queue =
-- traverse over the @(,) [JobWithStats]@ functor to extract the job
case traverseOf fromQ (partition ((== jid) . qjId . jJob)) queue of
(job : _, queue') -> (over toQ (++ [job]) queue', Just job)
_ -> (queue, Nothing)
-- | Atomically move a job from one part of the queue to another.
-- Return the job that was moved, or 'Nothing' if it wasn't found in
-- the queue.
moveJobAtomic :: Lens' Queue [JobWithStat] -- ^ from queue
-> Lens' Queue [JobWithStat] -- ^ to queue
-> JobId
-> JQStatus
-> IO (Maybe JobWithStat)
moveJobAtomic fromQ toQ jid qstat =
atomicModifyIORef (jqJobs qstat) (moveJob fromQ toQ jid)
-- | Manipulate a running job by atomically moving it from 'qRunning'
-- into 'qManipulated', running a given IO action and then atomically
-- returning it back.
--
-- Returns the result of the IO action, or 'Nothing', if the job wasn't found
-- in the queue.
manipulateRunningJob :: JQStatus -> JobId -> IO a -> IO (Maybe a)
manipulateRunningJob qstat jid k = do
jobOpt <- moveJobAtomic qRunningL qManipulatedL jid qstat
case jobOpt of
Nothing -> return Nothing
Just _ -> (Just `liftM` k)
`finally` moveJobAtomic qManipulatedL qRunningL jid qstat
-- | Sort out the finished jobs from the monitored part of the queue.
-- This is the pure part, splitting the queue into a remaining queue
-- and the jobs that were removed.
sortoutFinishedJobs :: Queue -> (Queue, [JobWithStat])
sortoutFinishedJobs queue =
let (fin, run') = partition (jobFinalized . jJob) . qRunning $ queue
in (queue {qRunning=run'}, fin)
-- | Actually clean up the finished jobs. This is the IO wrapper around
-- the pure `sortoutFinishedJobs`.
cleanupFinishedJobs :: JQStatus -> IO ()
cleanupFinishedJobs qstate = do
finished <- atomicModifyIORef (jqJobs qstate) sortoutFinishedJobs
let showJob = show . ((fromJobId . qjId) &&& calcJobStatus) . jJob
jlist = commaJoin $ map showJob finished
unless (null finished)
. logInfo $ "Finished jobs: " ++ jlist
mapM_ (maybe (return ()) killINotify . jINotify) finished
-- | Watcher task for a job, to update it on file changes. It also
-- reinstantiates itself upon receiving an Ignored event.
jobWatcher :: JQStatus -> JobWithStat -> Event -> IO ()
jobWatcher state jWS e = do
let jid = qjId $ jJob jWS
jids = show $ fromJobId jid
logInfo $ "Scheduler notified of change of job " ++ jids
logDebug $ "Scheduler notify event for " ++ jids ++ ": " ++ show e
let inotify = jINotify jWS
when (e == Ignored && isJust inotify) $ do
qdir <- queueDir
let fpath = toInotifyPath $ liveJobFile qdir jid
_ <- addWatch (fromJust inotify) [Modify, Delete] fpath
(jobWatcher state jWS)
return ()
updateJob state jWS
-- | Attach the job watcher to a running job.
attachWatcher :: JQStatus -> JobWithStat -> IO ()
attachWatcher state jWS = when (isNothing $ jINotify jWS) $ do
max_watch <- getMaxTrackedJobs state
rql <- getRQL state
if rql < max_watch
then do
inotify <- initINotify
qdir <- queueDir
let fpath = liveJobFile qdir . qjId $ jJob jWS
jWS' = jWS { jINotify=Just inotify }
logDebug $ "Attaching queue watcher for " ++ fpath
_ <- addWatch inotify [Modify, Delete] (toInotifyPath fpath)
$ jobWatcher state jWS'
modifyJobs state . onRunningJobs $ updateJobStatus jWS'
else logDebug $ "Not attaching watcher for job "
++ (show . fromJobId . qjId $ jJob jWS)
++ ", run queue length is " ++ show rql
-- | For a queued job, determine whether it is eligible to run, i.e.,
-- if no jobs it depends on are either enqueued or running.
jobEligible :: Queue -> JobWithStat -> Bool
jobEligible queue jWS =
let jdeps = getJobDependencies $ jJob jWS
blocks = flip elem jdeps . qjId . jJob
in not . any blocks . liftA2 (++) qRunning qEnqueued $ queue
-- | Decide on which jobs to schedule next for execution. This is the
-- pure function doing the scheduling.
selectJobsToRun :: Int -- ^ How many jobs are allowed to run at the
-- same time.
-> Set FilterRule -- ^ Filter rules to respect for scheduling
-> Queue
-> (Queue, [JobWithStat])
selectJobsToRun count filters queue =
let n = count - length (qRunning queue) - length (qManipulated queue)
chosen = take n
. jobFiltering queue filters
. reasonRateLimit queue
. sortBy (comparing (calcJobPriority . jJob))
. filter (jobEligible queue)
$ qEnqueued queue
remain = deleteFirstsBy ((==) `on` (qjId . jJob)) (qEnqueued queue) chosen
in (queue {qEnqueued=remain, qRunning=qRunning queue ++ chosen}, chosen)
-- | Logs errors of failed jobs and returns the set of job IDs.
logFailedJobs :: (MonadLog m)
=> [(JobWithStat, GanetiException)] -> m (S.Set JobId)
logFailedJobs [] = return S.empty
logFailedJobs jobs = do
let jids = S.fromList . map (qjId . jJob . fst) $ jobs
jidsString = commaJoin . map (show . fromJobId) . S.toList $ jids
logWarning $ "Starting jobs " ++ jidsString ++ " failed: "
++ show (map snd jobs)
return jids
-- | Fail jobs that were previously selected for execution
-- but couldn't be started.
failJobs :: ConfigData -> JQStatus -> [(JobWithStat, GanetiException)]
-> IO ()
failJobs cfg qstate jobs = do
qdir <- queueDir
now <- currentTimestamp
jids <- logFailedJobs jobs
let sjobs = intercalate "." . map (show . fromJobId) $ S.toList jids
let rmJobs = filter ((`S.notMember` jids) . qjId . jJob)
logWarning $ "Failing jobs " ++ sjobs
modifyJobs qstate $ onRunningJobs rmJobs
let trySaveJob :: JobWithStat -> ResultT String IO ()
trySaveJob = (() <$) . writeAndReplicateJob cfg qdir . jJob
reason jid msg =
( "gnt:daemon:luxid:startjobs"
, "job " ++ show (fromJobId jid) ++ " failed to start: " ++ msg
, reasonTrailTimestamp now )
failJob err job = failQueuedJob (reason (qjId job) (show err)) now job
failAndSaveJobWithStat (jws, err) =
trySaveJob . over jJobL (failJob err) $ jws
mapM_ (runResultT . failAndSaveJobWithStat) jobs
logDebug $ "Failed jobs " ++ sjobs
-- | Checks if any jobs match a REJECT filter rule, and cancels them.
cancelRejectedJobs :: JQStatus -> ConfigData -> Set FilterRule -> IO ()
cancelRejectedJobs qstate cfg filters = do
enqueuedJobs <- map jJob . qEnqueued <$> readIORef (jqJobs qstate)
-- Determine which jobs are rejected.
let jobsToCancel =
[ (job, fr) | job <- enqueuedJobs
, Just fr <- [applyingFilter filters job]
, frAction fr == Reject ]
-- Cancel them.
qDir <- queueDir
forM_ jobsToCancel $ \(job, fr) -> do
let jid = qjId job
logDebug $ "Cancelling job " ++ show (fromJobId jid)
++ " because it was REJECTed by filter rule " ++ uuidOf fr
-- First dequeue, then cancel.
dequeueResult <- dequeueJob qstate jid
case dequeueResult of
Ok True -> do
now <- currentTimestamp
r <- runResultT
$ writeAndReplicateJob cfg qDir (cancelQueuedJob now job)
case r of
Ok _ -> return ()
Bad err -> logError $
"Failed to write config when cancelling job: " ++ err
Ok False -> do
logDebug $ "Job " ++ show (fromJobId jid)
++ " not queued; trying to cancel directly"
_ <- cancelJob False (jqLivelock qstate) jid -- sigTERM-kill only
return ()
Bad s -> logError s -- passing a nonexistent job ID is an error here
-- | Schedule jobs to be run. This is the IO wrapper around the
-- pure `selectJobsToRun`.
scheduleSomeJobs :: JQStatus -> IO ()
scheduleSomeJobs qstate = do
cfgR <- readIORef (jqConfig qstate)
case cfgR of
Bad err -> do
let msg = "Configuration unavailable: " ++ err
logError msg
Ok cfg -> do
let filters = S.fromList . Map.elems . fromContainer $ configFilters cfg
-- Check if jobs are rejected by a REJECT filter, and cancel them.
cancelRejectedJobs qstate cfg filters
-- Select the jobs to run.
count <- getMaxRunningJobs qstate
chosen <- atomicModifyIORef (jqJobs qstate)
(selectJobsToRun count filters)
let jobs = map jJob chosen
unless (null chosen) . logInfo . (++) "Starting jobs: " . commaJoin
$ map (show . fromJobId . qjId) jobs
-- Attach the watcher.
mapM_ (attachWatcher qstate) chosen
-- Start the jobs.
result <- JQ.startJobs (jqLivelock qstate) (jqForkLock qstate) jobs
let badWith (x, Bad y) = Just (x, y)
badWith _ = Nothing
let failed = mapMaybe badWith $ zip chosen result
unless (null failed) $ failJobs cfg qstate failed
-- | Format the job queue status in a compact, human readable way.
showQueue :: Queue -> String
showQueue (Queue {qEnqueued=waiting, qRunning=running}) =
let showids = show . map (fromJobId . qjId . jJob)
in "Waiting jobs: " ++ showids waiting
++ "; running jobs: " ++ showids running
-- | Check if a job died, and clean up if so. Return True, if
-- the job was found dead.
checkForDeath :: JQStatus -> JobWithStat -> IO Bool
checkForDeath state jobWS = do
let job = jJob jobWS
jid = qjId job
sjid = show $ fromJobId jid
livelock = qjLivelock job
logDebug $ "Livelock of job " ++ sjid ++ " is " ++ show livelock
died <- maybe (return False) isDead
. mfilter (/= jqLivelock state)
$ livelock
logDebug $ "Death of " ++ sjid ++ ": " ++ show died
when died $ do
logInfo $ "Detected death of job " ++ sjid
-- if we manage to remove the job from the queue, we own the job file
-- and can manipulate it.
void . manipulateRunningJob state jid . runResultT $ do
jobWS' <- mkResultT $ readJobFromDisk jid :: ResultG JobWithStat
unless (jobFinalized . jJob $ jobWS') . void $ do
-- If the job isn't finalized, but dead, add a corresponding
-- failed status.
now <- liftIO currentTimestamp
qDir <- liftIO queueDir
let reason = ( "gnt:daemon:luxid:deathdetection"
, "detected death of job " ++ sjid
, reasonTrailTimestamp now )
failedJob = failQueuedJob reason now $ jJob jobWS'
cfg <- mkResultT . readIORef $ jqConfig state
writeAndReplicateJob cfg qDir failedJob
return died
-- | Trigger job detection for the job with the given job id.
-- Return True, if the job is dead.
cleanupIfDead :: JQStatus -> JobId -> IO Bool
cleanupIfDead state jid = do
logDebug $ "Extra job-death detection for " ++ show (fromJobId jid)
jobs <- readIORef (jqJobs state)
let jobWS = find ((==) jid . qjId . jJob) $ qRunning jobs
maybe (return True) (checkForDeath state) jobWS
-- | Force the queue to check the state of all jobs.
updateStatusAndScheduleSomeJobs :: JQStatus -> IO ()
updateStatusAndScheduleSomeJobs qstate = do
jobs <- readIORef (jqJobs qstate)
mapM_ (checkForDeath qstate) $ qRunning jobs
jobs' <- readIORef (jqJobs qstate)
mapM_ (updateJob qstate) $ qRunning jobs'
cleanupFinishedJobs qstate
jobs'' <- readIORef (jqJobs qstate)
logInfo $ showQueue jobs''
scheduleSomeJobs qstate
-- | Time-based watcher for updating the job queue.
onTimeWatcher :: JQStatus -> IO ()
onTimeWatcher qstate = forever $ do
threadDelay watchInterval
logDebug "Job queue watcher timer fired"
updateStatusAndScheduleSomeJobs qstate
logDebug "Job queue watcher cycle finished"
-- | Read a single, non-archived, job, specified by its id, from disk.
readJobFromDisk :: JobId -> IO (Result JobWithStat)
readJobFromDisk jid = do
qdir <- queueDir
let fpath = liveJobFile qdir jid
logDebug $ "Reading " ++ fpath
tryFstat <- try $ getFStat fpath :: IO (Either IOError FStat)
let fstat = either (const nullFStat) id tryFstat
loadResult <- JQ.loadJobFromDisk qdir False jid
return $ liftM (JobWithStat Nothing fstat . fst) loadResult
-- | Read all non-finalized jobs from disk.
readJobsFromDisk :: IO [JobWithStat]
readJobsFromDisk = do
logInfo "Loading job queue"
qdir <- queueDir
eitherJids <- JQ.getJobIDs [qdir]
let jids = genericResult (const []) JQ.sortJobIDs eitherJids
jidsstring = commaJoin $ map (show . fromJobId) jids
logInfo $ "Non-archived jobs on disk: " ++ jidsstring
jobs <- mapM readJobFromDisk jids
return $ justOk jobs
-- | Set up the job scheduler. This will also start the monitoring
-- of changes to the running jobs.
initJQScheduler :: JQStatus -> IO ()
initJQScheduler qstate = do
alljobs <- readJobsFromDisk
let jobs = filter (not . jobFinalized . jJob) alljobs
(running, queued) = partition (jobStarted . jJob) jobs
modifyJobs qstate (onQueuedJobs (++ queued) . onRunningJobs (++ running))
jqjobs <- readIORef (jqJobs qstate)
logInfo $ showQueue jqjobs
scheduleSomeJobs qstate
logInfo "Starting time-based job queue watcher"
_ <- forkIO $ onTimeWatcher qstate
return ()
-- | Enqueue new jobs. This will guarantee that the jobs will be executed
-- eventually.
enqueueNewJobs :: JQStatus -> [QueuedJob] -> IO ()
enqueueNewJobs state jobs = do
logInfo . (++) "New jobs enqueued: " . commaJoin
$ map (show . fromJobId . qjId) jobs
let jobs' = map unreadJob jobs
insertFn = insertBy (compare `on` fromJobId . qjId . jJob)
addJobs oldjobs = foldl (flip insertFn) oldjobs jobs'
modifyJobs state (onQueuedJobs addJobs)
scheduleSomeJobs state
-- | Pure function for removing a queued job from the job queue by
-- atomicModifyIORef. The answer is Just the job if the job could be removed
-- before being handed over to execution, Nothing if it already was started
-- and a Bad result if the job is not found in the queue.
rmJob :: JobId -> Queue -> (Queue, Result (Maybe QueuedJob))
rmJob jid q =
let isJid = (jid ==) . qjId . jJob
(found, queued') = partition isJid $ qEnqueued q
isRunning = any isJid $ qRunning q
sJid = (++) "Job " . show $ fromJobId jid
in case (found, isRunning) of
([job], _) -> (q {qEnqueued = queued'}, Ok . Just $ jJob job)
(_:_, _) -> (q, Bad $ "Queue in inconsistent state."
++ sJid ++ " queued multiple times")
(_, True) -> (q, Ok Nothing)
_ -> (q, Bad $ sJid ++ " not found in queue")
-- | Try to remove a queued job from the job queue. Return True, if
-- the job could be removed from the queue before being handed over
-- to execution, False if the job already started, and a Bad result
-- if the job is unknown.
dequeueJob :: JQStatus -> JobId -> IO (Result Bool)
dequeueJob state jid = do
result <- atomicModifyIORef (jqJobs state) $ rmJob jid
let result' = fmap isJust result
logDebug $ "Result of dequeing job " ++ show (fromJobId jid)
++ " is " ++ show result'
return result'
-- | Change the priority of a queued job (once the job is handed over
-- to execution, the job itself needs to be informed). To avoid the
-- job being started unmodified, it is temporarily unqueued during the
-- change. Return the modified job, if the job's priority was sucessfully
-- modified, Nothing, if the job already started, and a Bad value, if the job
-- is unkown.
setJobPriority :: JQStatus -> JobId -> Int -> IO (Result (Maybe QueuedJob))
setJobPriority state jid prio = runResultT $ do
maybeJob <- mkResultT . atomicModifyIORef (jqJobs state) $ rmJob jid
case maybeJob of
Nothing -> return Nothing
Just job -> do
let job' = changeJobPriority prio job
qDir <- liftIO queueDir
mkResultT $ writeJobToDisk qDir job'
liftIO $ enqueueNewJobs state [job']
return $ Just job'
-- | Given old and new configs, determines if the changes between them should
-- trigger the scheduler to run.
configChangeNeedsRescheduling :: ConfigData -> ConfigData -> Bool
configChangeNeedsRescheduling old new =
-- Trigger rescheduling if any of the following change:
(((/=) `on` configFilters) old new || -- filters
((/=) `on` clusterMaxRunningJobs . configCluster) old new -- run queue length
)
| mbakke/ganeti | src/Ganeti/JQScheduler.hs | bsd-2-clause | 24,689 | 0 | 20 | 5,658 | 5,915 | 2,982 | 2,933 | -1 | -1 |
module Cauterize.Dynamic.Meta.Types
( MetaType(..)
, MetaHeader(..)
) where
import Cauterize.Dynamic.Types
import Data.Word
data MetaHeader =
MetaHeader { metaLength :: Integer
, metaTag :: [Word8]
}
deriving (Show, Eq, Ord)
data MetaType =
MetaType { unMetaType :: CautType }
deriving (Show, Eq, Ord)
| cauterize-tools/cauterize | src/Cauterize/Dynamic/Meta/Types.hs | bsd-3-clause | 345 | 0 | 9 | 85 | 104 | 64 | 40 | 12 | 0 |
-- (c) The University of Glasgow 2006
-- (c) The GRASP/AQUA Project, Glasgow University, 1998
--
-- Type - public interface
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Main functions for manipulating types and type-related things
module Type (
-- Note some of this is just re-exports from TyCon..
-- * Main data types representing Types
-- $type_classification
-- $representation_types
TyThing(..), Type, KindOrType, PredType, ThetaType,
Var, TyVar, isTyVar,
-- ** Constructing and deconstructing types
mkTyVarTy, mkTyVarTys, getTyVar, getTyVar_maybe,
mkAppTy, mkAppTys, splitAppTy, splitAppTys,
splitAppTy_maybe, repSplitAppTy_maybe,
mkFunTy, mkFunTys, splitFunTy, splitFunTy_maybe,
splitFunTys, splitFunTysN,
funResultTy, funArgTy, zipFunTys,
mkTyConApp, mkTyConTy,
tyConAppTyCon_maybe, tyConAppArgs_maybe, tyConAppTyCon, tyConAppArgs,
splitTyConApp_maybe, splitTyConApp, tyConAppArgN, nextRole,
mkForAllTy, mkForAllTys, splitForAllTy_maybe, splitForAllTys,
mkPiKinds, mkPiType, mkPiTypes,
applyTy, applyTys, applyTysD, applyTysX, dropForAlls,
mkNumLitTy, isNumLitTy,
mkStrLitTy, isStrLitTy,
coAxNthLHS,
-- (Newtypes)
newTyConInstRhs,
-- Pred types
mkFamilyTyConApp,
isDictLikeTy,
mkEqPred, mkCoerciblePred, mkPrimEqPred, mkReprPrimEqPred,
mkClassPred,
isClassPred, isEqPred,
isIPPred, isIPPred_maybe, isIPTyCon, isIPClass,
isCTupleClass,
-- Deconstructing predicate types
PredTree(..), EqRel(..), eqRelRole, classifyPredType,
getClassPredTys, getClassPredTys_maybe,
getEqPredTys, getEqPredTys_maybe, getEqPredRole,
predTypeEqRel,
-- ** Common type constructors
funTyCon,
-- ** Predicates on types
isTypeVar, isKindVar, allDistinctTyVars, isForAllTy,
isTyVarTy, isFunTy, isDictTy, isPredTy, isVoidTy,
-- (Lifting and boxity)
isUnLiftedType, isUnboxedTupleType, isAlgType, isClosedAlgType,
isPrimitiveType, isStrictType,
-- * Main data types representing Kinds
-- $kind_subtyping
Kind, SimpleKind, MetaKindVar,
-- ** Finding the kind of a type
typeKind,
-- ** Common Kinds and SuperKinds
anyKind, liftedTypeKind, unliftedTypeKind, openTypeKind,
constraintKind, superKind,
-- ** Common Kind type constructors
liftedTypeKindTyCon, openTypeKindTyCon, unliftedTypeKindTyCon,
constraintKindTyCon, anyKindTyCon,
-- * Type free variables
tyVarsOfType, tyVarsOfTypes, closeOverKinds,
expandTypeSynonyms,
typeSize, varSetElemsKvsFirst,
-- * Type comparison
eqType, eqTypeX, eqTypes, cmpType, cmpTypes,
eqPred, eqPredX, cmpPred, eqKind, eqTyVarBndrs,
-- * Forcing evaluation of types
seqType, seqTypes,
-- * Other views onto Types
coreView, tcView,
UnaryType, RepType(..), flattenRepType, repType,
tyConsOfType,
-- * Type representation for the code generator
typePrimRep, typeRepArity,
-- * Main type substitution data types
TvSubstEnv, -- Representation widely visible
TvSubst(..), -- Representation visible to a few friends
-- ** Manipulating type substitutions
emptyTvSubstEnv, emptyTvSubst,
mkTvSubst, mkOpenTvSubst, zipOpenTvSubst, zipTopTvSubst, mkTopTvSubst, notElemTvSubst,
getTvSubstEnv, setTvSubstEnv,
zapTvSubstEnv, getTvInScope,
extendTvInScope, extendTvInScopeList,
extendTvSubst, extendTvSubstList,
isInScope, composeTvSubst, zipTyEnv,
isEmptyTvSubst, unionTvSubst,
-- ** Performing substitution on types and kinds
substTy, substTys, substTyWith, substTysWith, substTheta,
substTyVar, substTyVars, substTyVarBndr,
cloneTyVarBndr, deShadowTy, lookupTyVar,
substKiWith, substKisWith,
-- * Pretty-printing
pprType, pprParendType, pprTypeApp, pprTyThingCategory, pprTyThing,
pprTvBndr, pprTvBndrs, pprForAll, pprUserForAll, pprSigmaType,
pprTheta, pprThetaArrowTy, pprClassPred,
pprKind, pprParendKind, pprSourceTyCon,
TyPrec(..), maybeParen,
-- * Tidying type related things up for printing
tidyType, tidyTypes,
tidyOpenType, tidyOpenTypes,
tidyOpenKind,
tidyTyVarBndr, tidyTyVarBndrs, tidyFreeTyVars,
tidyOpenTyVar, tidyOpenTyVars,
tidyTyVarOcc,
tidyTopType,
tidyKind,
) where
#include "HsVersions.h"
-- We import the representation and primitive functions from TypeRep.
-- Many things are reexported, but not the representation!
import Kind
import TypeRep
-- friends:
import Var
import VarEnv
import VarSet
import NameEnv
import Class
import TyCon
import TysPrim
import {-# SOURCE #-} TysWiredIn ( eqTyCon, coercibleTyCon, typeNatKind, typeSymbolKind )
import PrelNames ( eqTyConKey, coercibleTyConKey,
ipClassNameKey, openTypeKindTyConKey,
constraintKindTyConKey, liftedTypeKindTyConKey )
import CoAxiom
-- others
import Unique ( Unique, hasKey )
import BasicTypes ( Arity, RepArity )
import Util
import ListSetOps ( getNth )
import Outputable
import FastString
import Maybes ( orElse )
import Data.Maybe ( isJust )
import Control.Monad ( guard )
infixr 3 `mkFunTy` -- Associates to the right
-- $type_classification
-- #type_classification#
--
-- Types are one of:
--
-- [Unboxed] Iff its representation is other than a pointer
-- Unboxed types are also unlifted.
--
-- [Lifted] Iff it has bottom as an element.
-- Closures always have lifted types: i.e. any
-- let-bound identifier in Core must have a lifted
-- type. Operationally, a lifted object is one that
-- can be entered.
-- Only lifted types may be unified with a type variable.
--
-- [Algebraic] Iff it is a type with one or more constructors, whether
-- declared with @data@ or @newtype@.
-- An algebraic type is one that can be deconstructed
-- with a case expression. This is /not/ the same as
-- lifted types, because we also include unboxed
-- tuples in this classification.
--
-- [Data] Iff it is a type declared with @data@, or a boxed tuple.
--
-- [Primitive] Iff it is a built-in type that can't be expressed in Haskell.
--
-- Currently, all primitive types are unlifted, but that's not necessarily
-- the case: for example, @Int@ could be primitive.
--
-- Some primitive types are unboxed, such as @Int#@, whereas some are boxed
-- but unlifted (such as @ByteArray#@). The only primitive types that we
-- classify as algebraic are the unboxed tuples.
--
-- Some examples of type classifications that may make this a bit clearer are:
--
-- @
-- Type primitive boxed lifted algebraic
-- -----------------------------------------------------------------------------
-- Int# Yes No No No
-- ByteArray# Yes Yes No No
-- (\# a, b \#) Yes No No Yes
-- ( a, b ) No Yes Yes Yes
-- [a] No Yes Yes Yes
-- @
-- $representation_types
-- A /source type/ is a type that is a separate type as far as the type checker is
-- concerned, but which has a more low-level representation as far as Core-to-Core
-- passes and the rest of the back end is concerned.
--
-- You don't normally have to worry about this, as the utility functions in
-- this module will automatically convert a source into a representation type
-- if they are spotted, to the best of it's abilities. If you don't want this
-- to happen, use the equivalent functions from the "TcType" module.
{-
************************************************************************
* *
Type representation
* *
************************************************************************
-}
{-# INLINE coreView #-}
coreView :: Type -> Maybe Type
-- ^ This function Strips off the /top layer only/ of a type synonym
-- application (if any) its underlying representation type.
-- Returns Nothing if there is nothing to look through.
--
-- By being non-recursive and inlined, this case analysis gets efficiently
-- joined onto the case analysis that the caller is already doing
coreView (TyConApp tc tys) | Just (tenv, rhs, tys') <- expandSynTyCon_maybe tc tys
= Just (mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys')
-- Its important to use mkAppTys, rather than (foldl AppTy),
-- because the function part might well return a
-- partially-applied type constructor; indeed, usually will!
coreView _ = Nothing
-----------------------------------------------
{-# INLINE tcView #-}
tcView :: Type -> Maybe Type
-- ^ Historical only; 'tcView' and 'coreView' used to differ, but don't any more
tcView = coreView
-- ToDo: get rid of tcView altogether
-- You might think that tcView belows in TcType rather than Type, but unfortunately
-- it is needed by Unify, which is turn imported by Coercion (for MatchEnv and matchList).
-- So we will leave it here to avoid module loops.
-----------------------------------------------
expandTypeSynonyms :: Type -> Type
-- ^ Expand out all type synonyms. Actually, it'd suffice to expand out
-- just the ones that discard type variables (e.g. type Funny a = Int)
-- But we don't know which those are currently, so we just expand all.
expandTypeSynonyms ty
= go ty
where
go (TyConApp tc tys)
| Just (tenv, rhs, tys') <- expandSynTyCon_maybe tc tys
= go (mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys')
| otherwise
= TyConApp tc (map go tys)
go (LitTy l) = LitTy l
go (TyVarTy tv) = TyVarTy tv
go (AppTy t1 t2) = mkAppTy (go t1) (go t2)
go (FunTy t1 t2) = FunTy (go t1) (go t2)
go (ForAllTy tv t) = ForAllTy tv (go t)
{-
************************************************************************
* *
\subsection{Constructor-specific functions}
* *
************************************************************************
---------------------------------------------------------------------
TyVarTy
~~~~~~~
-}
-- | Attempts to obtain the type variable underlying a 'Type', and panics with the
-- given message if this is not a type variable type. See also 'getTyVar_maybe'
getTyVar :: String -> Type -> TyVar
getTyVar msg ty = case getTyVar_maybe ty of
Just tv -> tv
Nothing -> panic ("getTyVar: " ++ msg)
isTyVarTy :: Type -> Bool
isTyVarTy ty = isJust (getTyVar_maybe ty)
-- | Attempts to obtain the type variable underlying a 'Type'
getTyVar_maybe :: Type -> Maybe TyVar
getTyVar_maybe ty | Just ty' <- coreView ty = getTyVar_maybe ty'
getTyVar_maybe (TyVarTy tv) = Just tv
getTyVar_maybe _ = Nothing
allDistinctTyVars :: [KindOrType] -> Bool
allDistinctTyVars tkvs = go emptyVarSet tkvs
where
go _ [] = True
go so_far (ty : tys)
= case getTyVar_maybe ty of
Nothing -> False
Just tv | tv `elemVarSet` so_far -> False
| otherwise -> go (so_far `extendVarSet` tv) tys
{-
---------------------------------------------------------------------
AppTy
~~~~~
We need to be pretty careful with AppTy to make sure we obey the
invariant that a TyConApp is always visibly so. mkAppTy maintains the
invariant: use it.
Note [Decomposing fat arrow c=>t]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Can we unify (a b) with (Eq a => ty)? If we do so, we end up with
a partial application like ((=>) Eq a) which doesn't make sense in
source Haskell. In constrast, we *can* unify (a b) with (t1 -> t2).
Here's an example (Trac #9858) of how you might do it:
i :: (Typeable a, Typeable b) => Proxy (a b) -> TypeRep
i p = typeRep p
j = i (Proxy :: Proxy (Eq Int => Int))
The type (Proxy (Eq Int => Int)) is only accepted with -XImpredicativeTypes,
but suppose we want that. But then in the call to 'i', we end
up decomposing (Eq Int => Int), and we definitely don't want that.
This really only applies to the type checker; in Core, '=>' and '->'
are the same, as are 'Constraint' and '*'. But for now I've put
the test in repSplitAppTy_maybe, which applies throughout, because
the other calls to splitAppTy are in Unify, which is also used by
the type checker (e.g. when matching type-function equations).
-}
-- | Applies a type to another, as in e.g. @k a@
mkAppTy :: Type -> Type -> Type
mkAppTy (TyConApp tc tys) ty2 = mkTyConApp tc (tys ++ [ty2])
mkAppTy ty1 ty2 = AppTy ty1 ty2
-- Note that the TyConApp could be an
-- under-saturated type synonym. GHC allows that; e.g.
-- type Foo k = k a -> k a
-- type Id x = x
-- foo :: Foo Id -> Foo Id
--
-- Here Id is partially applied in the type sig for Foo,
-- but once the type synonyms are expanded all is well
mkAppTys :: Type -> [Type] -> Type
mkAppTys ty1 [] = ty1
mkAppTys (TyConApp tc tys1) tys2 = mkTyConApp tc (tys1 ++ tys2)
mkAppTys ty1 tys2 = foldl AppTy ty1 tys2
-------------
splitAppTy_maybe :: Type -> Maybe (Type, Type)
-- ^ Attempt to take a type application apart, whether it is a
-- function, type constructor, or plain type application. Note
-- that type family applications are NEVER unsaturated by this!
splitAppTy_maybe ty | Just ty' <- coreView ty
= splitAppTy_maybe ty'
splitAppTy_maybe ty = repSplitAppTy_maybe ty
-------------
repSplitAppTy_maybe :: Type -> Maybe (Type,Type)
-- ^ Does the AppTy split as in 'splitAppTy_maybe', but assumes that
-- any Core view stuff is already done
repSplitAppTy_maybe (FunTy ty1 ty2)
| isConstraintKind (typeKind ty1) = Nothing -- See Note [Decomposing fat arrow c=>t]
| otherwise = Just (TyConApp funTyCon [ty1], ty2)
repSplitAppTy_maybe (AppTy ty1 ty2) = Just (ty1, ty2)
repSplitAppTy_maybe (TyConApp tc tys)
| mightBeUnsaturatedTyCon tc || tys `lengthExceeds` tyConArity tc
, Just (tys', ty') <- snocView tys
= Just (TyConApp tc tys', ty') -- Never create unsaturated type family apps!
repSplitAppTy_maybe _other = Nothing
-------------
splitAppTy :: Type -> (Type, Type)
-- ^ Attempts to take a type application apart, as in 'splitAppTy_maybe',
-- and panics if this is not possible
splitAppTy ty = case splitAppTy_maybe ty of
Just pr -> pr
Nothing -> panic "splitAppTy"
-------------
splitAppTys :: Type -> (Type, [Type])
-- ^ Recursively splits a type as far as is possible, leaving a residual
-- type being applied to and the type arguments applied to it. Never fails,
-- even if that means returning an empty list of type applications.
splitAppTys ty = split ty ty []
where
split orig_ty ty args | Just ty' <- coreView ty = split orig_ty ty' args
split _ (AppTy ty arg) args = split ty ty (arg:args)
split _ (TyConApp tc tc_args) args
= let -- keep type families saturated
n | mightBeUnsaturatedTyCon tc = 0
| otherwise = tyConArity tc
(tc_args1, tc_args2) = splitAt n tc_args
in
(TyConApp tc tc_args1, tc_args2 ++ args)
split _ (FunTy ty1 ty2) args = ASSERT( null args )
(TyConApp funTyCon [], [ty1,ty2])
split orig_ty _ args = (orig_ty, args)
{-
LitTy
~~~~~
-}
mkNumLitTy :: Integer -> Type
mkNumLitTy n = LitTy (NumTyLit n)
-- | Is this a numeric literal. We also look through type synonyms.
isNumLitTy :: Type -> Maybe Integer
isNumLitTy ty | Just ty1 <- tcView ty = isNumLitTy ty1
isNumLitTy (LitTy (NumTyLit n)) = Just n
isNumLitTy _ = Nothing
mkStrLitTy :: FastString -> Type
mkStrLitTy s = LitTy (StrTyLit s)
-- | Is this a symbol literal. We also look through type synonyms.
isStrLitTy :: Type -> Maybe FastString
isStrLitTy ty | Just ty1 <- tcView ty = isStrLitTy ty1
isStrLitTy (LitTy (StrTyLit s)) = Just s
isStrLitTy _ = Nothing
{-
---------------------------------------------------------------------
FunTy
~~~~~
-}
mkFunTy :: Type -> Type -> Type
-- ^ Creates a function type from the given argument and result type
mkFunTy arg res = FunTy arg res
mkFunTys :: [Type] -> Type -> Type
mkFunTys tys ty = foldr mkFunTy ty tys
isFunTy :: Type -> Bool
isFunTy ty = isJust (splitFunTy_maybe ty)
splitFunTy :: Type -> (Type, Type)
-- ^ Attempts to extract the argument and result types from a type, and
-- panics if that is not possible. See also 'splitFunTy_maybe'
splitFunTy ty | Just ty' <- coreView ty = splitFunTy ty'
splitFunTy (FunTy arg res) = (arg, res)
splitFunTy other = pprPanic "splitFunTy" (ppr other)
splitFunTy_maybe :: Type -> Maybe (Type, Type)
-- ^ Attempts to extract the argument and result types from a type
splitFunTy_maybe ty | Just ty' <- coreView ty = splitFunTy_maybe ty'
splitFunTy_maybe (FunTy arg res) = Just (arg, res)
splitFunTy_maybe _ = Nothing
splitFunTys :: Type -> ([Type], Type)
splitFunTys ty = split [] ty ty
where
split args orig_ty ty | Just ty' <- coreView ty = split args orig_ty ty'
split args _ (FunTy arg res) = split (arg:args) res res
split args orig_ty _ = (reverse args, orig_ty)
splitFunTysN :: Int -> Type -> ([Type], Type)
-- ^ Split off exactly the given number argument types, and panics if that is not possible
splitFunTysN 0 ty = ([], ty)
splitFunTysN n ty = ASSERT2( isFunTy ty, int n <+> ppr ty )
case splitFunTy ty of { (arg, res) ->
case splitFunTysN (n-1) res of { (args, res) ->
(arg:args, res) }}
-- | Splits off argument types from the given type and associating
-- them with the things in the input list from left to right. The
-- final result type is returned, along with the resulting pairs of
-- objects and types, albeit with the list of pairs in reverse order.
-- Panics if there are not enough argument types for the input list.
zipFunTys :: Outputable a => [a] -> Type -> ([(a, Type)], Type)
zipFunTys orig_xs orig_ty = split [] orig_xs orig_ty orig_ty
where
split acc [] nty _ = (reverse acc, nty)
split acc xs nty ty
| Just ty' <- coreView ty = split acc xs nty ty'
split acc (x:xs) _ (FunTy arg res) = split ((x,arg):acc) xs res res
split _ _ _ _ = pprPanic "zipFunTys" (ppr orig_xs <+> ppr orig_ty)
funResultTy :: Type -> Type
-- ^ Extract the function result type and panic if that is not possible
funResultTy ty | Just ty' <- coreView ty = funResultTy ty'
funResultTy (FunTy _arg res) = res
funResultTy ty = pprPanic "funResultTy" (ppr ty)
funArgTy :: Type -> Type
-- ^ Extract the function argument type and panic if that is not possible
funArgTy ty | Just ty' <- coreView ty = funArgTy ty'
funArgTy (FunTy arg _res) = arg
funArgTy ty = pprPanic "funArgTy" (ppr ty)
{-
---------------------------------------------------------------------
TyConApp
~~~~~~~~
-}
-- | A key function: builds a 'TyConApp' or 'FunTy' as appropriate to
-- its arguments. Applies its arguments to the constructor from left to right.
mkTyConApp :: TyCon -> [Type] -> Type
mkTyConApp tycon tys
| isFunTyCon tycon, [ty1,ty2] <- tys
= FunTy ty1 ty2
| otherwise
= TyConApp tycon tys
-- splitTyConApp "looks through" synonyms, because they don't
-- mean a distinct type, but all other type-constructor applications
-- including functions are returned as Just ..
-- | The same as @fst . splitTyConApp@
tyConAppTyCon_maybe :: Type -> Maybe TyCon
tyConAppTyCon_maybe ty | Just ty' <- coreView ty = tyConAppTyCon_maybe ty'
tyConAppTyCon_maybe (TyConApp tc _) = Just tc
tyConAppTyCon_maybe (FunTy {}) = Just funTyCon
tyConAppTyCon_maybe _ = Nothing
tyConAppTyCon :: Type -> TyCon
tyConAppTyCon ty = tyConAppTyCon_maybe ty `orElse` pprPanic "tyConAppTyCon" (ppr ty)
-- | The same as @snd . splitTyConApp@
tyConAppArgs_maybe :: Type -> Maybe [Type]
tyConAppArgs_maybe ty | Just ty' <- coreView ty = tyConAppArgs_maybe ty'
tyConAppArgs_maybe (TyConApp _ tys) = Just tys
tyConAppArgs_maybe (FunTy arg res) = Just [arg,res]
tyConAppArgs_maybe _ = Nothing
tyConAppArgs :: Type -> [Type]
tyConAppArgs ty = tyConAppArgs_maybe ty `orElse` pprPanic "tyConAppArgs" (ppr ty)
tyConAppArgN :: Int -> Type -> Type
-- Executing Nth
tyConAppArgN n ty
= case tyConAppArgs_maybe ty of
Just tys -> ASSERT2( n < length tys, ppr n <+> ppr tys ) tys !! n
Nothing -> pprPanic "tyConAppArgN" (ppr n <+> ppr ty)
-- | Attempts to tease a type apart into a type constructor and the application
-- of a number of arguments to that constructor. Panics if that is not possible.
-- See also 'splitTyConApp_maybe'
splitTyConApp :: Type -> (TyCon, [Type])
splitTyConApp ty = case splitTyConApp_maybe ty of
Just stuff -> stuff
Nothing -> pprPanic "splitTyConApp" (ppr ty)
-- | Attempts to tease a type apart into a type constructor and the application
-- of a number of arguments to that constructor
splitTyConApp_maybe :: Type -> Maybe (TyCon, [Type])
splitTyConApp_maybe ty | Just ty' <- coreView ty = splitTyConApp_maybe ty'
splitTyConApp_maybe (TyConApp tc tys) = Just (tc, tys)
splitTyConApp_maybe (FunTy arg res) = Just (funTyCon, [arg,res])
splitTyConApp_maybe _ = Nothing
-- | What is the role assigned to the next parameter of this type? Usually,
-- this will be 'Nominal', but if the type is a 'TyConApp', we may be able to
-- do better. The type does *not* have to be well-kinded when applied for this
-- to work!
nextRole :: Type -> Role
nextRole ty
| Just (tc, tys) <- splitTyConApp_maybe ty
, let num_tys = length tys
, num_tys < tyConArity tc
= tyConRoles tc `getNth` num_tys
| otherwise
= Nominal
newTyConInstRhs :: TyCon -> [Type] -> Type
-- ^ Unwrap one 'layer' of newtype on a type constructor and its
-- arguments, using an eta-reduced version of the @newtype@ if possible.
-- This requires tys to have at least @newTyConInstArity tycon@ elements.
newTyConInstRhs tycon tys
= ASSERT2( tvs `leLength` tys, ppr tycon $$ ppr tys $$ ppr tvs )
applyTysX tvs rhs tys
where
(tvs, rhs) = newTyConEtadRhs tycon
{-
---------------------------------------------------------------------
SynTy
~~~~~
Notes on type synonyms
~~~~~~~~~~~~~~~~~~~~~~
The various "split" functions (splitFunTy, splitRhoTy, splitForAllTy) try
to return type synonyms wherever possible. Thus
type Foo a = a -> a
we want
splitFunTys (a -> Foo a) = ([a], Foo a)
not ([a], a -> a)
The reason is that we then get better (shorter) type signatures in
interfaces. Notably this plays a role in tcTySigs in TcBinds.hs.
Representation types
~~~~~~~~~~~~~~~~~~~~
Note [Nullary unboxed tuple]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We represent the nullary unboxed tuple as the unary (but void) type
Void#. The reason for this is that the ReprArity is never
less than the Arity (as it would otherwise be for a function type like
(# #) -> Int).
As a result, ReprArity is always strictly positive if Arity is. This
is important because it allows us to distinguish at runtime between a
thunk and a function takes a nullary unboxed tuple as an argument!
-}
type UnaryType = Type
data RepType = UbxTupleRep [UnaryType] -- INVARIANT: never an empty list (see Note [Nullary unboxed tuple])
| UnaryRep UnaryType
flattenRepType :: RepType -> [UnaryType]
flattenRepType (UbxTupleRep tys) = tys
flattenRepType (UnaryRep ty) = [ty]
-- | Looks through:
--
-- 1. For-alls
-- 2. Synonyms
-- 3. Predicates
-- 4. All newtypes, including recursive ones, but not newtype families
--
-- It's useful in the back end of the compiler.
repType :: Type -> RepType
repType ty
= go initRecTc ty
where
go :: RecTcChecker -> Type -> RepType
go rec_nts ty -- Expand predicates and synonyms
| Just ty' <- coreView ty
= go rec_nts ty'
go rec_nts (ForAllTy _ ty) -- Drop foralls
= go rec_nts ty
go rec_nts (TyConApp tc tys) -- Expand newtypes
| isNewTyCon tc
, tys `lengthAtLeast` tyConArity tc
, Just rec_nts' <- checkRecTc rec_nts tc -- See Note [Expanding newtypes] in TyCon
= go rec_nts' (newTyConInstRhs tc tys)
| isUnboxedTupleTyCon tc
= if null tys
then UnaryRep voidPrimTy -- See Note [Nullary unboxed tuple]
else UbxTupleRep (concatMap (flattenRepType . go rec_nts) tys)
go _ ty = UnaryRep ty
-- | All type constructors occurring in the type; looking through type
-- synonyms, but not newtypes.
-- When it finds a Class, it returns the class TyCon.
tyConsOfType :: Type -> NameEnv TyCon
tyConsOfType ty
= go ty
where
go :: Type -> NameEnv TyCon -- The NameEnv does duplicate elim
go ty | Just ty' <- tcView ty = go ty'
go (TyVarTy {}) = emptyNameEnv
go (LitTy {}) = emptyNameEnv
go (TyConApp tc tys) = go_tc tc tys
go (AppTy a b) = go a `plusNameEnv` go b
go (FunTy a b) = go a `plusNameEnv` go b
go (ForAllTy _ ty) = go ty
go_tc tc tys = extendNameEnv (go_s tys) (tyConName tc) tc
go_s tys = foldr (plusNameEnv . go) emptyNameEnv tys
-- ToDo: this could be moved to the code generator, using splitTyConApp instead
-- of inspecting the type directly.
-- | Discovers the primitive representation of a more abstract 'UnaryType'
typePrimRep :: UnaryType -> PrimRep
typePrimRep ty
= case repType ty of
UbxTupleRep _ -> pprPanic "typePrimRep: UbxTupleRep" (ppr ty)
UnaryRep rep -> case rep of
TyConApp tc _ -> tyConPrimRep tc
FunTy _ _ -> PtrRep
AppTy _ _ -> PtrRep -- See Note [AppTy rep]
TyVarTy _ -> PtrRep
_ -> pprPanic "typePrimRep: UnaryRep" (ppr ty)
typeRepArity :: Arity -> Type -> RepArity
typeRepArity 0 _ = 0
typeRepArity n ty = case repType ty of
UnaryRep (FunTy ty1 ty2) -> length (flattenRepType (repType ty1)) + typeRepArity (n - 1) ty2
_ -> pprPanic "typeRepArity: arity greater than type can handle" (ppr (n, ty))
isVoidTy :: Type -> Bool
-- True if the type has zero width
isVoidTy ty = case repType ty of
UnaryRep (TyConApp tc _) -> isVoidRep (tyConPrimRep tc)
_ -> False
{-
Note [AppTy rep]
~~~~~~~~~~~~~~~~
Types of the form 'f a' must be of kind *, not #, so we are guaranteed
that they are represented by pointers. The reason is that f must have
kind (kk -> kk) and kk cannot be unlifted; see Note [The kind invariant]
in TypeRep.
---------------------------------------------------------------------
ForAllTy
~~~~~~~~
-}
mkForAllTy :: TyVar -> Type -> Type
mkForAllTy tyvar ty
= ForAllTy tyvar ty
-- | Wraps foralls over the type using the provided 'TyVar's from left to right
mkForAllTys :: [TyVar] -> Type -> Type
mkForAllTys tyvars ty = foldr ForAllTy ty tyvars
mkPiKinds :: [TyVar] -> Kind -> Kind
-- mkPiKinds [k1, k2, (a:k1 -> *)] k2
-- returns forall k1 k2. (k1 -> *) -> k2
mkPiKinds [] res = res
mkPiKinds (tv:tvs) res
| isKindVar tv = ForAllTy tv (mkPiKinds tvs res)
| otherwise = FunTy (tyVarKind tv) (mkPiKinds tvs res)
mkPiType :: Var -> Type -> Type
-- ^ Makes a @(->)@ type or a forall type, depending
-- on whether it is given a type variable or a term variable.
mkPiTypes :: [Var] -> Type -> Type
-- ^ 'mkPiType' for multiple type or value arguments
mkPiType v ty
| isId v = mkFunTy (varType v) ty
| otherwise = mkForAllTy v ty
mkPiTypes vs ty = foldr mkPiType ty vs
isForAllTy :: Type -> Bool
isForAllTy (ForAllTy _ _) = True
isForAllTy _ = False
-- | Attempts to take a forall type apart, returning the bound type variable
-- and the remainder of the type
splitForAllTy_maybe :: Type -> Maybe (TyVar, Type)
splitForAllTy_maybe ty = splitFAT_m ty
where
splitFAT_m ty | Just ty' <- coreView ty = splitFAT_m ty'
splitFAT_m (ForAllTy tyvar ty) = Just(tyvar, ty)
splitFAT_m _ = Nothing
-- | Attempts to take a forall type apart, returning all the immediate such bound
-- type variables and the remainder of the type. Always suceeds, even if that means
-- returning an empty list of 'TyVar's
splitForAllTys :: Type -> ([TyVar], Type)
splitForAllTys ty = split ty ty []
where
split orig_ty ty tvs | Just ty' <- coreView ty = split orig_ty ty' tvs
split _ (ForAllTy tv ty) tvs = split ty ty (tv:tvs)
split orig_ty _ tvs = (reverse tvs, orig_ty)
-- | Equivalent to @snd . splitForAllTys@
dropForAlls :: Type -> Type
dropForAlls ty = snd (splitForAllTys ty)
{-
-- (mkPiType now in CoreUtils)
applyTy, applyTys
~~~~~~~~~~~~~~~~~
-}
-- | Instantiate a forall type with one or more type arguments.
-- Used when we have a polymorphic function applied to type args:
--
-- > f t1 t2
--
-- We use @applyTys type-of-f [t1,t2]@ to compute the type of the expression.
-- Panics if no application is possible.
applyTy :: Type -> KindOrType -> Type
applyTy ty arg | Just ty' <- coreView ty = applyTy ty' arg
applyTy (ForAllTy tv ty) arg = substTyWith [tv] [arg] ty
applyTy _ _ = panic "applyTy"
applyTys :: Type -> [KindOrType] -> Type
-- ^ This function is interesting because:
--
-- 1. The function may have more for-alls than there are args
--
-- 2. Less obviously, it may have fewer for-alls
--
-- For case 2. think of:
--
-- > applyTys (forall a.a) [forall b.b, Int]
--
-- This really can happen, but only (I think) in situations involving
-- undefined. For example:
-- undefined :: forall a. a
-- Term: undefined @(forall b. b->b) @Int
-- This term should have type (Int -> Int), but notice that
-- there are more type args than foralls in 'undefined's type.
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
applyTys ty args = applyTysD empty ty args
applyTysD :: SDoc -> Type -> [Type] -> Type -- Debug version
applyTysD _ orig_fun_ty [] = orig_fun_ty
applyTysD doc orig_fun_ty arg_tys
| n_tvs == n_args -- The vastly common case
= substTyWith tvs arg_tys rho_ty
| n_tvs > n_args -- Too many for-alls
= substTyWith (take n_args tvs) arg_tys
(mkForAllTys (drop n_args tvs) rho_ty)
| otherwise -- Too many type args
= ASSERT2( n_tvs > 0, doc $$ ppr orig_fun_ty $$ ppr arg_tys ) -- Zero case gives infinite loop!
applyTysD doc (substTyWith tvs (take n_tvs arg_tys) rho_ty)
(drop n_tvs arg_tys)
where
(tvs, rho_ty) = splitForAllTys orig_fun_ty
n_tvs = length tvs
n_args = length arg_tys
applyTysX :: [TyVar] -> Type -> [Type] -> Type
-- applyTyxX beta-reduces (/\tvs. body_ty) arg_tys
applyTysX tvs body_ty arg_tys
= ASSERT2( length arg_tys >= n_tvs, ppr tvs $$ ppr body_ty $$ ppr arg_tys )
mkAppTys (substTyWith tvs (take n_tvs arg_tys) body_ty)
(drop n_tvs arg_tys)
where
n_tvs = length tvs
{-
************************************************************************
* *
Pred
* *
************************************************************************
Predicates on PredType
-}
isPredTy :: Type -> Bool
-- NB: isPredTy is used when printing types, which can happen in debug printing
-- during type checking of not-fully-zonked types. So it's not cool to say
-- isConstraintKind (typeKind ty) because absent zonking the type might
-- be ill-kinded, and typeKind crashes
-- Hence the rather tiresome story here
isPredTy ty = go ty []
where
go :: Type -> [KindOrType] -> Bool
go (AppTy ty1 ty2) args = go ty1 (ty2 : args)
go (TyConApp tc tys) args = go_k (tyConKind tc) (tys ++ args)
go (TyVarTy tv) args = go_k (tyVarKind tv) args
go _ _ = False
go_k :: Kind -> [KindOrType] -> Bool
-- True <=> kind is k1 -> .. -> kn -> Constraint
go_k k [] = isConstraintKind k
go_k (FunTy _ k1) (_ :args) = go_k k1 args
go_k (ForAllTy kv k1) (k2:args) = go_k (substKiWith [kv] [k2] k1) args
go_k _ _ = False -- Typeable * Int :: Constraint
isClassPred, isEqPred, isIPPred :: PredType -> Bool
isClassPred ty = case tyConAppTyCon_maybe ty of
Just tyCon | isClassTyCon tyCon -> True
_ -> False
isEqPred ty = case tyConAppTyCon_maybe ty of
Just tyCon -> tyCon `hasKey` eqTyConKey
_ -> False
isIPPred ty = case tyConAppTyCon_maybe ty of
Just tc -> isIPTyCon tc
_ -> False
isIPTyCon :: TyCon -> Bool
isIPTyCon tc = tc `hasKey` ipClassNameKey
isIPClass :: Class -> Bool
isIPClass cls = cls `hasKey` ipClassNameKey
-- Class and it corresponding TyCon have the same Unique
isCTupleClass :: Class -> Bool
isCTupleClass cls = isTupleTyCon (classTyCon cls)
isIPPred_maybe :: Type -> Maybe (FastString, Type)
isIPPred_maybe ty =
do (tc,[t1,t2]) <- splitTyConApp_maybe ty
guard (isIPTyCon tc)
x <- isStrLitTy t1
return (x,t2)
{-
Make PredTypes
--------------------- Equality types ---------------------------------
-}
-- | Creates a type equality predicate
mkEqPred :: Type -> Type -> PredType
mkEqPred ty1 ty2
= WARN( not (k `eqKind` typeKind ty2), ppr ty1 $$ ppr ty2 $$ ppr k $$ ppr (typeKind ty2) )
TyConApp eqTyCon [k, ty1, ty2]
where
k = typeKind ty1
mkCoerciblePred :: Type -> Type -> PredType
mkCoerciblePred ty1 ty2
= WARN( not (k `eqKind` typeKind ty2), ppr ty1 $$ ppr ty2 $$ ppr k $$ ppr (typeKind ty2) )
TyConApp coercibleTyCon [k, ty1, ty2]
where
k = typeKind ty1
mkPrimEqPred :: Type -> Type -> Type
mkPrimEqPred ty1 ty2
= WARN( not (k `eqKind` typeKind ty2), ppr ty1 $$ ppr ty2 )
TyConApp eqPrimTyCon [k, ty1, ty2]
where
k = typeKind ty1
mkReprPrimEqPred :: Type -> Type -> Type
mkReprPrimEqPred ty1 ty2
= WARN( not (k `eqKind` typeKind ty2), ppr ty1 $$ ppr ty2 )
TyConApp eqReprPrimTyCon [k, ty1, ty2]
where
k = typeKind ty1
-- --------------------- Dictionary types ---------------------------------
mkClassPred :: Class -> [Type] -> PredType
mkClassPred clas tys = TyConApp (classTyCon clas) tys
isDictTy :: Type -> Bool
isDictTy = isClassPred
isDictLikeTy :: Type -> Bool
-- Note [Dictionary-like types]
isDictLikeTy ty | Just ty' <- coreView ty = isDictLikeTy ty'
isDictLikeTy ty = case splitTyConApp_maybe ty of
Just (tc, tys) | isClassTyCon tc -> True
| isTupleTyCon tc -> all isDictLikeTy tys
_other -> False
{-
Note [Dictionary-like types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Being "dictionary-like" means either a dictionary type or a tuple thereof.
In GHC 6.10 we build implication constraints which construct such tuples,
and if we land up with a binding
t :: (C [a], Eq [a])
t = blah
then we want to treat t as cheap under "-fdicts-cheap" for example.
(Implication constraints are normally inlined, but sadly not if the
occurrence is itself inside an INLINE function! Until we revise the
handling of implication constraints, that is.) This turned out to
be important in getting good arities in DPH code. Example:
class C a
class D a where { foo :: a -> a }
instance C a => D (Maybe a) where { foo x = x }
bar :: (C a, C b) => a -> b -> (Maybe a, Maybe b)
{-# INLINE bar #-}
bar x y = (foo (Just x), foo (Just y))
Then 'bar' should jolly well have arity 4 (two dicts, two args), but
we ended up with something like
bar = __inline_me__ (\d1,d2. let t :: (D (Maybe a), D (Maybe b)) = ...
in \x,y. <blah>)
This is all a bit ad-hoc; eg it relies on knowing that implication
constraints build tuples.
Decomposing PredType
-}
-- | A choice of equality relation. This is separate from the type 'Role'
-- because 'Phantom' does not define a (non-trivial) equality relation.
data EqRel = NomEq | ReprEq
deriving (Eq, Ord)
instance Outputable EqRel where
ppr NomEq = text "nominal equality"
ppr ReprEq = text "representational equality"
eqRelRole :: EqRel -> Role
eqRelRole NomEq = Nominal
eqRelRole ReprEq = Representational
data PredTree = ClassPred Class [Type]
| EqPred EqRel Type Type
| IrredPred PredType
classifyPredType :: PredType -> PredTree
classifyPredType ev_ty = case splitTyConApp_maybe ev_ty of
Just (tc, tys) | tc `hasKey` coercibleTyConKey
, let [_, ty1, ty2] = tys
-> EqPred ReprEq ty1 ty2
Just (tc, tys) | tc `hasKey` eqTyConKey
, let [_, ty1, ty2] = tys
-> EqPred NomEq ty1 ty2
-- NB: Coercible is also a class, so this check must come *after*
-- the Coercible check
Just (tc, tys) | Just clas <- tyConClass_maybe tc
-> ClassPred clas tys
_ -> IrredPred ev_ty
getClassPredTys :: PredType -> (Class, [Type])
getClassPredTys ty = case getClassPredTys_maybe ty of
Just (clas, tys) -> (clas, tys)
Nothing -> pprPanic "getClassPredTys" (ppr ty)
getClassPredTys_maybe :: PredType -> Maybe (Class, [Type])
getClassPredTys_maybe ty = case splitTyConApp_maybe ty of
Just (tc, tys) | Just clas <- tyConClass_maybe tc -> Just (clas, tys)
_ -> Nothing
getEqPredTys :: PredType -> (Type, Type)
getEqPredTys ty
= case splitTyConApp_maybe ty of
Just (tc, (_ : ty1 : ty2 : tys)) ->
ASSERT( null tys && (tc `hasKey` eqTyConKey
|| tc `hasKey` coercibleTyConKey) )
(ty1, ty2)
_ -> pprPanic "getEqPredTys" (ppr ty)
getEqPredTys_maybe :: PredType -> Maybe (Role, Type, Type)
getEqPredTys_maybe ty
= case splitTyConApp_maybe ty of
Just (tc, [_, ty1, ty2])
| tc `hasKey` eqTyConKey -> Just (Nominal, ty1, ty2)
| tc `hasKey` coercibleTyConKey -> Just (Representational, ty1, ty2)
_ -> Nothing
getEqPredRole :: PredType -> Role
getEqPredRole ty
= case splitTyConApp_maybe ty of
Just (tc, [_, _, _])
| tc `hasKey` eqTyConKey -> Nominal
| tc `hasKey` coercibleTyConKey -> Representational
_ -> pprPanic "getEqPredRole" (ppr ty)
-- | Get the equality relation relevant for a pred type.
predTypeEqRel :: PredType -> EqRel
predTypeEqRel ty
| Just (tc, _) <- splitTyConApp_maybe ty
, tc `hasKey` coercibleTyConKey
= ReprEq
| otherwise
= NomEq
{-
%************************************************************************
%* *
Size
* *
************************************************************************
-}
typeSize :: Type -> Int
typeSize (LitTy {}) = 1
typeSize (TyVarTy {}) = 1
typeSize (AppTy t1 t2) = typeSize t1 + typeSize t2
typeSize (FunTy t1 t2) = typeSize t1 + typeSize t2
typeSize (ForAllTy _ t) = 1 + typeSize t
typeSize (TyConApp _ ts) = 1 + sum (map typeSize ts)
{-
************************************************************************
* *
\subsection{Type families}
* *
************************************************************************
-}
mkFamilyTyConApp :: TyCon -> [Type] -> Type
-- ^ Given a family instance TyCon and its arg types, return the
-- corresponding family type. E.g:
--
-- > data family T a
-- > data instance T (Maybe b) = MkT b
--
-- Where the instance tycon is :RTL, so:
--
-- > mkFamilyTyConApp :RTL Int = T (Maybe Int)
mkFamilyTyConApp tc tys
| Just (fam_tc, fam_tys) <- tyConFamInst_maybe tc
, let tvs = tyConTyVars tc
fam_subst = ASSERT2( length tvs == length tys, ppr tc <+> ppr tys )
zipTopTvSubst tvs tys
= mkTyConApp fam_tc (substTys fam_subst fam_tys)
| otherwise
= mkTyConApp tc tys
-- | Get the type on the LHS of a coercion induced by a type/data
-- family instance.
coAxNthLHS :: CoAxiom br -> Int -> Type
coAxNthLHS ax ind =
mkTyConApp (coAxiomTyCon ax) (coAxBranchLHS (coAxiomNthBranch ax ind))
-- | Pretty prints a 'TyCon', using the family instance in case of a
-- representation tycon. For example:
--
-- > data T [a] = ...
--
-- In that case we want to print @T [a]@, where @T@ is the family 'TyCon'
pprSourceTyCon :: TyCon -> SDoc
pprSourceTyCon tycon
| Just (fam_tc, tys) <- tyConFamInst_maybe tycon
= ppr $ fam_tc `TyConApp` tys -- can't be FunTyCon
| otherwise
= ppr tycon
{-
************************************************************************
* *
\subsection{Liftedness}
* *
************************************************************************
-}
-- | See "Type#type_classification" for what an unlifted type is
isUnLiftedType :: Type -> Bool
-- isUnLiftedType returns True for forall'd unlifted types:
-- x :: forall a. Int#
-- I found bindings like these were getting floated to the top level.
-- They are pretty bogus types, mind you. It would be better never to
-- construct them
isUnLiftedType ty | Just ty' <- coreView ty = isUnLiftedType ty'
isUnLiftedType (ForAllTy _ ty) = isUnLiftedType ty
isUnLiftedType (TyConApp tc _) = isUnLiftedTyCon tc
isUnLiftedType _ = False
isUnboxedTupleType :: Type -> Bool
isUnboxedTupleType ty = case tyConAppTyCon_maybe ty of
Just tc -> isUnboxedTupleTyCon tc
_ -> False
-- | See "Type#type_classification" for what an algebraic type is.
-- Should only be applied to /types/, as opposed to e.g. partially
-- saturated type constructors
isAlgType :: Type -> Bool
isAlgType ty
= case splitTyConApp_maybe ty of
Just (tc, ty_args) -> ASSERT( ty_args `lengthIs` tyConArity tc )
isAlgTyCon tc
_other -> False
-- | See "Type#type_classification" for what an algebraic type is.
-- Should only be applied to /types/, as opposed to e.g. partially
-- saturated type constructors. Closed type constructors are those
-- with a fixed right hand side, as opposed to e.g. associated types
isClosedAlgType :: Type -> Bool
isClosedAlgType ty
= case splitTyConApp_maybe ty of
Just (tc, ty_args) | isAlgTyCon tc && not (isFamilyTyCon tc)
-> ASSERT2( ty_args `lengthIs` tyConArity tc, ppr ty ) True
_other -> False
-- | Computes whether an argument (or let right hand side) should
-- be computed strictly or lazily, based only on its type.
-- Currently, it's just 'isUnLiftedType'.
isStrictType :: Type -> Bool
isStrictType = isUnLiftedType
isPrimitiveType :: Type -> Bool
-- ^ Returns true of types that are opaque to Haskell.
isPrimitiveType ty = case splitTyConApp_maybe ty of
Just (tc, ty_args) -> ASSERT( ty_args `lengthIs` tyConArity tc )
isPrimTyCon tc
_ -> False
{-
************************************************************************
* *
\subsection{Sequencing on types}
* *
************************************************************************
-}
seqType :: Type -> ()
seqType (LitTy n) = n `seq` ()
seqType (TyVarTy tv) = tv `seq` ()
seqType (AppTy t1 t2) = seqType t1 `seq` seqType t2
seqType (FunTy t1 t2) = seqType t1 `seq` seqType t2
seqType (TyConApp tc tys) = tc `seq` seqTypes tys
seqType (ForAllTy tv ty) = seqType (tyVarKind tv) `seq` seqType ty
seqTypes :: [Type] -> ()
seqTypes [] = ()
seqTypes (ty:tys) = seqType ty `seq` seqTypes tys
{-
************************************************************************
* *
Comparison for types
(We don't use instances so that we know where it happens)
* *
************************************************************************
-}
eqKind :: Kind -> Kind -> Bool
-- Watch out for horrible hack: See Note [Comparison with OpenTypeKind]
eqKind = eqType
eqType :: Type -> Type -> Bool
-- ^ Type equality on source types. Does not look through @newtypes@ or
-- 'PredType's, but it does look through type synonyms.
-- Watch out for horrible hack: See Note [Comparison with OpenTypeKind]
eqType t1 t2 = isEqual $ cmpType t1 t2
instance Eq Type where
(==) = eqType
eqTypeX :: RnEnv2 -> Type -> Type -> Bool
eqTypeX env t1 t2 = isEqual $ cmpTypeX env t1 t2
eqTypes :: [Type] -> [Type] -> Bool
eqTypes tys1 tys2 = isEqual $ cmpTypes tys1 tys2
eqPred :: PredType -> PredType -> Bool
eqPred = eqType
eqPredX :: RnEnv2 -> PredType -> PredType -> Bool
eqPredX env p1 p2 = isEqual $ cmpTypeX env p1 p2
eqTyVarBndrs :: RnEnv2 -> [TyVar] -> [TyVar] -> Maybe RnEnv2
-- Check that the tyvar lists are the same length
-- and have matching kinds; if so, extend the RnEnv2
-- Returns Nothing if they don't match
eqTyVarBndrs env [] []
= Just env
eqTyVarBndrs env (tv1:tvs1) (tv2:tvs2)
| eqTypeX env (tyVarKind tv1) (tyVarKind tv2)
= eqTyVarBndrs (rnBndr2 env tv1 tv2) tvs1 tvs2
eqTyVarBndrs _ _ _= Nothing
-- Now here comes the real worker
cmpType :: Type -> Type -> Ordering
-- Watch out for horrible hack: See Note [Comparison with OpenTypeKind]
cmpType t1 t2 = cmpTypeX rn_env t1 t2
where
rn_env = mkRnEnv2 (mkInScopeSet (tyVarsOfType t1 `unionVarSet` tyVarsOfType t2))
cmpTypes :: [Type] -> [Type] -> Ordering
cmpTypes ts1 ts2 = cmpTypesX rn_env ts1 ts2
where
rn_env = mkRnEnv2 (mkInScopeSet (tyVarsOfTypes ts1 `unionVarSet` tyVarsOfTypes ts2))
cmpPred :: PredType -> PredType -> Ordering
cmpPred p1 p2 = cmpTypeX rn_env p1 p2
where
rn_env = mkRnEnv2 (mkInScopeSet (tyVarsOfType p1 `unionVarSet` tyVarsOfType p2))
cmpTypeX :: RnEnv2 -> Type -> Type -> Ordering -- Main workhorse
cmpTypeX env t1 t2 | Just t1' <- coreView t1 = cmpTypeX env t1' t2
| Just t2' <- coreView t2 = cmpTypeX env t1 t2'
-- We expand predicate types, because in Core-land we have
-- lots of definitions like
-- fOrdBool :: Ord Bool
-- fOrdBool = D:Ord .. .. ..
-- So the RHS has a data type
cmpTypeX env (TyVarTy tv1) (TyVarTy tv2) = rnOccL env tv1 `compare` rnOccR env tv2
cmpTypeX env (ForAllTy tv1 t1) (ForAllTy tv2 t2) = cmpTypeX env (tyVarKind tv1) (tyVarKind tv2)
`thenCmp` cmpTypeX (rnBndr2 env tv1 tv2) t1 t2
cmpTypeX env (AppTy s1 t1) (AppTy s2 t2) = cmpTypeX env s1 s2 `thenCmp` cmpTypeX env t1 t2
cmpTypeX env (FunTy s1 t1) (FunTy s2 t2) = cmpTypeX env s1 s2 `thenCmp` cmpTypeX env t1 t2
cmpTypeX env (TyConApp tc1 tys1) (TyConApp tc2 tys2) = (tc1 `cmpTc` tc2) `thenCmp` cmpTypesX env tys1 tys2
cmpTypeX _ (LitTy l1) (LitTy l2) = compare l1 l2
-- Deal with the rest: TyVarTy < AppTy < FunTy < LitTy < TyConApp < ForAllTy < PredTy
cmpTypeX _ (AppTy _ _) (TyVarTy _) = GT
cmpTypeX _ (FunTy _ _) (TyVarTy _) = GT
cmpTypeX _ (FunTy _ _) (AppTy _ _) = GT
cmpTypeX _ (LitTy _) (TyVarTy _) = GT
cmpTypeX _ (LitTy _) (AppTy _ _) = GT
cmpTypeX _ (LitTy _) (FunTy _ _) = GT
cmpTypeX _ (TyConApp _ _) (TyVarTy _) = GT
cmpTypeX _ (TyConApp _ _) (AppTy _ _) = GT
cmpTypeX _ (TyConApp _ _) (FunTy _ _) = GT
cmpTypeX _ (TyConApp _ _) (LitTy _) = GT
cmpTypeX _ (ForAllTy _ _) (TyVarTy _) = GT
cmpTypeX _ (ForAllTy _ _) (AppTy _ _) = GT
cmpTypeX _ (ForAllTy _ _) (FunTy _ _) = GT
cmpTypeX _ (ForAllTy _ _) (LitTy _) = GT
cmpTypeX _ (ForAllTy _ _) (TyConApp _ _) = GT
cmpTypeX _ _ _ = LT
-------------
cmpTypesX :: RnEnv2 -> [Type] -> [Type] -> Ordering
cmpTypesX _ [] [] = EQ
cmpTypesX env (t1:tys1) (t2:tys2) = cmpTypeX env t1 t2 `thenCmp` cmpTypesX env tys1 tys2
cmpTypesX _ [] _ = LT
cmpTypesX _ _ [] = GT
-------------
cmpTc :: TyCon -> TyCon -> Ordering
-- Here we treat * and Constraint as equal
-- See Note [Kind Constraint and kind *] in Kinds.hs
--
-- Also we treat OpenTypeKind as equal to either * or #
-- See Note [Comparison with OpenTypeKind]
cmpTc tc1 tc2
| u1 == openTypeKindTyConKey, isSubOpenTypeKindKey u2 = EQ
| u2 == openTypeKindTyConKey, isSubOpenTypeKindKey u1 = EQ
| otherwise = nu1 `compare` nu2
where
u1 = tyConUnique tc1
nu1 = if u1==constraintKindTyConKey then liftedTypeKindTyConKey else u1
u2 = tyConUnique tc2
nu2 = if u2==constraintKindTyConKey then liftedTypeKindTyConKey else u2
{-
Note [Comparison with OpenTypeKind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In PrimOpWrappers we have things like
PrimOpWrappers.mkWeak# = /\ a b c. Prim.mkWeak# a b c
where
Prim.mkWeak# :: forall (a:Open) b c. a -> b -> c
-> State# RealWorld -> (# State# RealWorld, Weak# b #)
Now, eta reduction will turn the definition into
PrimOpWrappers.mkWeak# = Prim.mkWeak#
which is kind-of OK, but now the types aren't really equal. So HACK HACK
we pretend (in Core) that Open is equal to * or #. I hate this.
Note [cmpTypeX]
~~~~~~~~~~~~~~~
When we compare foralls, we should look at the kinds. But if we do so,
we get a corelint error like the following (in
libraries/ghc-prim/GHC/PrimopWrappers.hs):
Binder's type: forall (o_abY :: *).
o_abY
-> GHC.Prim.State# GHC.Prim.RealWorld
-> GHC.Prim.State# GHC.Prim.RealWorld
Rhs type: forall (a_12 :: ?).
a_12
-> GHC.Prim.State# GHC.Prim.RealWorld
-> GHC.Prim.State# GHC.Prim.RealWorld
This is why we don't look at the kind. Maybe we should look if the
kinds are compatible.
-- cmpTypeX env (ForAllTy tv1 t1) (ForAllTy tv2 t2)
-- = cmpTypeX env (tyVarKind tv1) (tyVarKind tv2) `thenCmp`
-- cmpTypeX (rnBndr2 env tv1 tv2) t1 t2
************************************************************************
* *
Type substitutions
* *
************************************************************************
-}
emptyTvSubstEnv :: TvSubstEnv
emptyTvSubstEnv = emptyVarEnv
composeTvSubst :: InScopeSet -> TvSubstEnv -> TvSubstEnv -> TvSubstEnv
-- ^ @(compose env1 env2)(x)@ is @env1(env2(x))@; i.e. apply @env2@ then @env1@.
-- It assumes that both are idempotent.
-- Typically, @env1@ is the refinement to a base substitution @env2@
composeTvSubst in_scope env1 env2
= env1 `plusVarEnv` mapVarEnv (substTy subst1) env2
-- First apply env1 to the range of env2
-- Then combine the two, making sure that env1 loses if
-- both bind the same variable; that's why env1 is the
-- *left* argument to plusVarEnv, because the right arg wins
where
subst1 = TvSubst in_scope env1
emptyTvSubst :: TvSubst
emptyTvSubst = TvSubst emptyInScopeSet emptyTvSubstEnv
isEmptyTvSubst :: TvSubst -> Bool
-- See Note [Extending the TvSubstEnv] in TypeRep
isEmptyTvSubst (TvSubst _ tenv) = isEmptyVarEnv tenv
mkTvSubst :: InScopeSet -> TvSubstEnv -> TvSubst
mkTvSubst = TvSubst
getTvSubstEnv :: TvSubst -> TvSubstEnv
getTvSubstEnv (TvSubst _ env) = env
getTvInScope :: TvSubst -> InScopeSet
getTvInScope (TvSubst in_scope _) = in_scope
isInScope :: Var -> TvSubst -> Bool
isInScope v (TvSubst in_scope _) = v `elemInScopeSet` in_scope
notElemTvSubst :: CoVar -> TvSubst -> Bool
notElemTvSubst v (TvSubst _ tenv) = not (v `elemVarEnv` tenv)
setTvSubstEnv :: TvSubst -> TvSubstEnv -> TvSubst
setTvSubstEnv (TvSubst in_scope _) tenv = TvSubst in_scope tenv
zapTvSubstEnv :: TvSubst -> TvSubst
zapTvSubstEnv (TvSubst in_scope _) = TvSubst in_scope emptyVarEnv
extendTvInScope :: TvSubst -> Var -> TvSubst
extendTvInScope (TvSubst in_scope tenv) var = TvSubst (extendInScopeSet in_scope var) tenv
extendTvInScopeList :: TvSubst -> [Var] -> TvSubst
extendTvInScopeList (TvSubst in_scope tenv) vars = TvSubst (extendInScopeSetList in_scope vars) tenv
extendTvSubst :: TvSubst -> TyVar -> Type -> TvSubst
extendTvSubst (TvSubst in_scope tenv) tv ty = TvSubst in_scope (extendVarEnv tenv tv ty)
extendTvSubstList :: TvSubst -> [TyVar] -> [Type] -> TvSubst
extendTvSubstList (TvSubst in_scope tenv) tvs tys
= TvSubst in_scope (extendVarEnvList tenv (tvs `zip` tys))
unionTvSubst :: TvSubst -> TvSubst -> TvSubst
-- Works when the ranges are disjoint
unionTvSubst (TvSubst in_scope1 tenv1) (TvSubst in_scope2 tenv2)
= ASSERT( not (tenv1 `intersectsVarEnv` tenv2) )
TvSubst (in_scope1 `unionInScope` in_scope2)
(tenv1 `plusVarEnv` tenv2)
-- mkOpenTvSubst and zipOpenTvSubst generate the in-scope set from
-- the types given; but it's just a thunk so with a bit of luck
-- it'll never be evaluated
-- Note [Generating the in-scope set for a substitution]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- If we want to substitute [a -> ty1, b -> ty2] I used to
-- think it was enough to generate an in-scope set that includes
-- fv(ty1,ty2). But that's not enough; we really should also take the
-- free vars of the type we are substituting into! Example:
-- (forall b. (a,b,x)) [a -> List b]
-- Then if we use the in-scope set {b}, there is a danger we will rename
-- the forall'd variable to 'x' by mistake, getting this:
-- (forall x. (List b, x, x)
-- Urk! This means looking at all the calls to mkOpenTvSubst....
-- | Generates the in-scope set for the 'TvSubst' from the types in the incoming
-- environment, hence "open"
mkOpenTvSubst :: TvSubstEnv -> TvSubst
mkOpenTvSubst tenv = TvSubst (mkInScopeSet (tyVarsOfTypes (varEnvElts tenv))) tenv
-- | Generates the in-scope set for the 'TvSubst' from the types in the incoming
-- environment, hence "open"
zipOpenTvSubst :: [TyVar] -> [Type] -> TvSubst
zipOpenTvSubst tyvars tys
| debugIsOn && (length tyvars /= length tys)
= pprTrace "zipOpenTvSubst" (ppr tyvars $$ ppr tys) emptyTvSubst
| otherwise
= TvSubst (mkInScopeSet (tyVarsOfTypes tys)) (zipTyEnv tyvars tys)
-- | Called when doing top-level substitutions. Here we expect that the
-- free vars of the range of the substitution will be empty.
mkTopTvSubst :: [(TyVar, Type)] -> TvSubst
mkTopTvSubst prs = TvSubst emptyInScopeSet (mkVarEnv prs)
zipTopTvSubst :: [TyVar] -> [Type] -> TvSubst
zipTopTvSubst tyvars tys
| debugIsOn && (length tyvars /= length tys)
= pprTrace "zipTopTvSubst" (ppr tyvars $$ ppr tys) emptyTvSubst
| otherwise
= TvSubst emptyInScopeSet (zipTyEnv tyvars tys)
zipTyEnv :: [TyVar] -> [Type] -> TvSubstEnv
zipTyEnv tyvars tys
| debugIsOn && (length tyvars /= length tys)
= pprTrace "zipTyEnv" (ppr tyvars $$ ppr tys) emptyVarEnv
| otherwise
= zip_ty_env tyvars tys emptyVarEnv
-- Later substitutions in the list over-ride earlier ones,
-- but there should be no loops
zip_ty_env :: [TyVar] -> [Type] -> TvSubstEnv -> TvSubstEnv
zip_ty_env [] [] env = env
zip_ty_env (tv:tvs) (ty:tys) env = zip_ty_env tvs tys (extendVarEnv env tv ty)
-- There used to be a special case for when
-- ty == TyVarTy tv
-- (a not-uncommon case) in which case the substitution was dropped.
-- But the type-tidier changes the print-name of a type variable without
-- changing the unique, and that led to a bug. Why? Pre-tidying, we had
-- a type {Foo t}, where Foo is a one-method class. So Foo is really a newtype.
-- And it happened that t was the type variable of the class. Post-tiding,
-- it got turned into {Foo t2}. The ext-core printer expanded this using
-- sourceTypeRep, but that said "Oh, t == t2" because they have the same unique,
-- and so generated a rep type mentioning t not t2.
--
-- Simplest fix is to nuke the "optimisation"
zip_ty_env tvs tys env = pprTrace "Var/Type length mismatch: " (ppr tvs $$ ppr tys) env
-- zip_ty_env _ _ env = env
instance Outputable TvSubst where
ppr (TvSubst ins tenv)
= brackets $ sep[ ptext (sLit "TvSubst"),
nest 2 (ptext (sLit "In scope:") <+> ppr ins),
nest 2 (ptext (sLit "Type env:") <+> ppr tenv) ]
{-
************************************************************************
* *
Performing type or kind substitutions
* *
************************************************************************
-}
-- | Type substitution making use of an 'TvSubst' that
-- is assumed to be open, see 'zipOpenTvSubst'
substTyWith :: [TyVar] -> [Type] -> Type -> Type
substTyWith tvs tys = ASSERT( length tvs == length tys )
substTy (zipOpenTvSubst tvs tys)
substKiWith :: [KindVar] -> [Kind] -> Kind -> Kind
substKiWith = substTyWith
-- | Type substitution making use of an 'TvSubst' that
-- is assumed to be open, see 'zipOpenTvSubst'
substTysWith :: [TyVar] -> [Type] -> [Type] -> [Type]
substTysWith tvs tys = ASSERT( length tvs == length tys )
substTys (zipOpenTvSubst tvs tys)
substKisWith :: [KindVar] -> [Kind] -> [Kind] -> [Kind]
substKisWith = substTysWith
-- | Substitute within a 'Type'
substTy :: TvSubst -> Type -> Type
substTy subst ty | isEmptyTvSubst subst = ty
| otherwise = subst_ty subst ty
-- | Substitute within several 'Type's
substTys :: TvSubst -> [Type] -> [Type]
substTys subst tys | isEmptyTvSubst subst = tys
| otherwise = map (subst_ty subst) tys
-- | Substitute within a 'ThetaType'
substTheta :: TvSubst -> ThetaType -> ThetaType
substTheta subst theta
| isEmptyTvSubst subst = theta
| otherwise = map (substTy subst) theta
-- | Remove any nested binders mentioning the 'TyVar's in the 'TyVarSet'
deShadowTy :: TyVarSet -> Type -> Type
deShadowTy tvs ty
= subst_ty (mkTvSubst in_scope emptyTvSubstEnv) ty
where
in_scope = mkInScopeSet tvs
subst_ty :: TvSubst -> Type -> Type
-- subst_ty is the main workhorse for type substitution
--
-- Note that the in_scope set is poked only if we hit a forall
-- so it may often never be fully computed
subst_ty subst ty
= go ty
where
go (LitTy n) = n `seq` LitTy n
go (TyVarTy tv) = substTyVar subst tv
go (TyConApp tc tys) = let args = map go tys
in args `seqList` TyConApp tc args
go (FunTy arg res) = (FunTy $! (go arg)) $! (go res)
go (AppTy fun arg) = mkAppTy (go fun) $! (go arg)
-- The mkAppTy smart constructor is important
-- we might be replacing (a Int), represented with App
-- by [Int], represented with TyConApp
go (ForAllTy tv ty) = case substTyVarBndr subst tv of
(subst', tv') ->
ForAllTy tv' $! (subst_ty subst' ty)
substTyVar :: TvSubst -> TyVar -> Type
substTyVar (TvSubst _ tenv) tv
| Just ty <- lookupVarEnv tenv tv = ty -- See Note [Apply Once]
| otherwise = ASSERT( isTyVar tv ) TyVarTy tv -- in TypeRep
-- We do not require that the tyvar is in scope
-- Reason: we do quite a bit of (substTyWith [tv] [ty] tau)
-- and it's a nuisance to bring all the free vars of tau into
-- scope --- and then force that thunk at every tyvar
-- Instead we have an ASSERT in substTyVarBndr to check for capture
substTyVars :: TvSubst -> [TyVar] -> [Type]
substTyVars subst tvs = map (substTyVar subst) tvs
lookupTyVar :: TvSubst -> TyVar -> Maybe Type
-- See Note [Extending the TvSubst] in TypeRep
lookupTyVar (TvSubst _ tenv) tv = lookupVarEnv tenv tv
substTyVarBndr :: TvSubst -> TyVar -> (TvSubst, TyVar)
substTyVarBndr subst@(TvSubst in_scope tenv) old_var
= ASSERT2( _no_capture, ppr old_var $$ ppr subst )
(TvSubst (in_scope `extendInScopeSet` new_var) new_env, new_var)
where
new_env | no_change = delVarEnv tenv old_var
| otherwise = extendVarEnv tenv old_var (TyVarTy new_var)
_no_capture = not (new_var `elemVarSet` tyVarsOfTypes (varEnvElts tenv))
-- Assertion check that we are not capturing something in the substitution
old_ki = tyVarKind old_var
no_kind_change = isEmptyVarSet (tyVarsOfType old_ki) -- verify that kind is closed
no_change = no_kind_change && (new_var == old_var)
-- no_change means that the new_var is identical in
-- all respects to the old_var (same unique, same kind)
-- See Note [Extending the TvSubst] in TypeRep
--
-- In that case we don't need to extend the substitution
-- to map old to new. But instead we must zap any
-- current substitution for the variable. For example:
-- (\x.e) with id_subst = [x |-> e']
-- Here we must simply zap the substitution for x
new_var | no_kind_change = uniqAway in_scope old_var
| otherwise = uniqAway in_scope $ updateTyVarKind (substTy subst) old_var
-- The uniqAway part makes sure the new variable is not already in scope
cloneTyVarBndr :: TvSubst -> TyVar -> Unique -> (TvSubst, TyVar)
cloneTyVarBndr (TvSubst in_scope tv_env) tv uniq
= (TvSubst (extendInScopeSet in_scope tv')
(extendVarEnv tv_env tv (mkTyVarTy tv')), tv')
where
tv' = setVarUnique tv uniq -- Simply set the unique; the kind
-- has no type variables to worry about
{-
----------------------------------------------------
-- Kind Stuff
Kinds
~~~~~
For the description of subkinding in GHC, see
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/TypeType#Kinds
-}
type MetaKindVar = TyVar -- invariant: MetaKindVar will always be a
-- TcTyVar with details MetaTv (TauTv ...) ...
-- meta kind var constructors and functions are in TcType
type SimpleKind = Kind
{-
************************************************************************
* *
The kind of a type
* *
************************************************************************
-}
typeKind :: Type -> Kind
typeKind orig_ty = go orig_ty
where
go ty@(TyConApp tc tys)
| isPromotedTyCon tc
= ASSERT( tyConArity tc == length tys ) superKind
| otherwise
= kindAppResult (ptext (sLit "typeKind 1") <+> ppr ty $$ ppr orig_ty)
(tyConKind tc) tys
go ty@(AppTy fun arg) = kindAppResult (ptext (sLit "typeKind 2") <+> ppr ty $$ ppr orig_ty)
(go fun) [arg]
go (LitTy l) = typeLiteralKind l
go (ForAllTy _ ty) = go ty
go (TyVarTy tyvar) = tyVarKind tyvar
go _ty@(FunTy _arg res)
-- Hack alert. The kind of (Int -> Int#) is liftedTypeKind (*),
-- not unliftedTypeKind (#)
-- The only things that can be after a function arrow are
-- (a) types (of kind openTypeKind or its sub-kinds)
-- (b) kinds (of super-kind TY) (e.g. * -> (* -> *))
| isSuperKind k = k
| otherwise = ASSERT2( isSubOpenTypeKind k, ppr _ty $$ ppr k ) liftedTypeKind
where
k = go res
typeLiteralKind :: TyLit -> Kind
typeLiteralKind l =
case l of
NumTyLit _ -> typeNatKind
StrTyLit _ -> typeSymbolKind
{-
Kind inference
~~~~~~~~~~~~~~
During kind inference, a kind variable unifies only with
a "simple kind", sk
sk ::= * | sk1 -> sk2
For example
data T a = MkT a (T Int#)
fails. We give T the kind (k -> *), and the kind variable k won't unify
with # (the kind of Int#).
Type inference
~~~~~~~~~~~~~~
When creating a fresh internal type variable, we give it a kind to express
constraints on it. E.g. in (\x->e) we make up a fresh type variable for x,
with kind ??.
During unification we only bind an internal type variable to a type
whose kind is lower in the sub-kind hierarchy than the kind of the tyvar.
When unifying two internal type variables, we collect their kind constraints by
finding the GLB of the two. Since the partial order is a tree, they only
have a glb if one is a sub-kind of the other. In that case, we bind the
less-informative one to the more informative one. Neat, eh?
-}
| TomMD/ghc | compiler/types/Type.hs | bsd-3-clause | 66,827 | 0 | 14 | 18,112 | 13,086 | 6,880 | 6,206 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Foldable
-- Copyright : Ross Paterson 2005
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : ross@soi.city.ac.uk
-- Stability : experimental
-- Portability : portable
--
-- Class of data structures that can be folded to a summary value.
--
-- Many of these functions generalize "Prelude", "Control.Monad" and
-- "Data.List" functions of the same names from lists to any 'Foldable'
-- functor. To avoid ambiguity, either import those modules hiding
-- these names or qualify uses of these function names with an alias
-- for this module.
module Data.Foldable (
-- * Folds
Foldable(..),
-- ** Special biased folds
foldr',
foldl',
foldrM,
foldlM,
-- ** Folding actions
-- *** Applicative actions
traverse_,
for_,
sequenceA_,
asum,
-- *** Monadic actions
mapM_,
forM_,
sequence_,
msum,
-- ** Specialized folds
toList,
concat,
concatMap,
and,
or,
any,
all,
sum,
product,
maximum,
maximumBy,
minimum,
minimumBy,
-- ** Searches
elem,
notElem,
find
) where
import Prelude hiding (foldl, foldr, foldl1, foldr1, mapM_, sequence_,
elem, notElem, concat, concatMap, and, or, any, all,
sum, product, maximum, minimum)
import qualified Prelude (foldl, foldr, foldl1, foldr1)
import Control.Applicative
import Control.Monad (MonadPlus(..))
import Data.Maybe (fromMaybe, listToMaybe)
import Data.Monoid
import Data.Array
#ifdef __NHC__
import Control.Arrow (ArrowZero(..)) -- work around nhc98 typechecker problem
#endif
#ifdef __GLASGOW_HASKELL__
import GHC.Exts (build)
#endif
-- | Data structures that can be folded.
--
-- Minimal complete definition: 'foldMap' or 'foldr'.
--
-- For example, given a data type
--
-- > data Tree a = Empty | Leaf a | Node (Tree a) a (Tree a)
--
-- a suitable instance would be
--
-- > instance Foldable Tree
-- > foldMap f Empty = mempty
-- > foldMap f (Leaf x) = f x
-- > foldMap f (Node l k r) = foldMap f l `mappend` f k `mappend` foldMap f r
--
-- This is suitable even for abstract types, as the monoid is assumed
-- to satisfy the monoid laws.
--
class Foldable t where
-- | Combine the elements of a structure using a monoid.
fold :: Monoid m => t m -> m
fold = foldMap id
-- | Map each element of the structure to a monoid,
-- and combine the results.
foldMap :: Monoid m => (a -> m) -> t a -> m
foldMap f = foldr (mappend . f) mempty
-- | Right-associative fold of a structure.
--
-- @'foldr' f z = 'Prelude.foldr' f z . 'toList'@
foldr :: (a -> b -> b) -> b -> t a -> b
foldr f z t = appEndo (foldMap (Endo . f) t) z
-- | Left-associative fold of a structure.
--
-- @'foldl' f z = 'Prelude.foldl' f z . 'toList'@
foldl :: (a -> b -> a) -> a -> t b -> a
foldl f z t = appEndo (getDual (foldMap (Dual . Endo . flip f) t)) z
-- | A variant of 'foldr' that has no base case,
-- and thus may only be applied to non-empty structures.
--
-- @'foldr1' f = 'Prelude.foldr1' f . 'toList'@
foldr1 :: (a -> a -> a) -> t a -> a
foldr1 f xs = fromMaybe (error "foldr1: empty structure")
(foldr mf Nothing xs)
where mf x Nothing = Just x
mf x (Just y) = Just (f x y)
-- | A variant of 'foldl' that has no base case,
-- and thus may only be applied to non-empty structures.
--
-- @'foldl1' f = 'Prelude.foldl1' f . 'toList'@
foldl1 :: (a -> a -> a) -> t a -> a
foldl1 f xs = fromMaybe (error "foldl1: empty structure")
(foldl mf Nothing xs)
where mf Nothing y = Just y
mf (Just x) y = Just (f x y)
-- instances for Prelude types
instance Foldable Maybe where
foldr f z Nothing = z
foldr f z (Just x) = f x z
foldl f z Nothing = z
foldl f z (Just x) = f z x
instance Foldable [] where
foldr = Prelude.foldr
foldl = Prelude.foldl
foldr1 = Prelude.foldr1
foldl1 = Prelude.foldl1
instance Ix i => Foldable (Array i) where
foldr f z = Prelude.foldr f z . elems
-- | Fold over the elements of a structure,
-- associating to the right, but strictly.
foldr' :: Foldable t => (a -> b -> b) -> b -> t a -> b
foldr' f z xs = foldl f' id xs z
where f' k x z = k $! f x z
-- | Monadic fold over the elements of a structure,
-- associating to the right, i.e. from right to left.
foldrM :: (Foldable t, Monad m) => (a -> b -> m b) -> b -> t a -> m b
foldrM f z xs = foldl f' return xs z
where f' k x z = f x z >>= k
-- | Fold over the elements of a structure,
-- associating to the left, but strictly.
foldl' :: Foldable t => (a -> b -> a) -> a -> t b -> a
foldl' f z xs = foldr f' id xs z
where f' x k z = k $! f z x
-- | Monadic fold over the elements of a structure,
-- associating to the left, i.e. from left to right.
foldlM :: (Foldable t, Monad m) => (a -> b -> m a) -> a -> t b -> m a
foldlM f z xs = foldr f' return xs z
where f' x k z = f z x >>= k
-- | Map each element of a structure to an action, evaluate
-- these actions from left to right, and ignore the results.
traverse_ :: (Foldable t, Applicative f) => (a -> f b) -> t a -> f ()
traverse_ f = foldr ((*>) . f) (pure ())
-- | 'for_' is 'traverse_' with its arguments flipped.
for_ :: (Foldable t, Applicative f) => t a -> (a -> f b) -> f ()
{-# INLINE for_ #-}
for_ = flip traverse_
-- | Map each element of a structure to an monadic action, evaluate
-- these actions from left to right, and ignore the results.
mapM_ :: (Foldable t, Monad m) => (a -> m b) -> t a -> m ()
mapM_ f = foldr ((>>) . f) (return ())
-- | 'forM_' is 'mapM_' with its arguments flipped.
forM_ :: (Foldable t, Monad m) => t a -> (a -> m b) -> m ()
{-# INLINE forM_ #-}
forM_ = flip mapM_
-- | Evaluate each action in the structure from left to right,
-- and ignore the results.
sequenceA_ :: (Foldable t, Applicative f) => t (f a) -> f ()
sequenceA_ = foldr (*>) (pure ())
-- | Evaluate each monadic action in the structure from left to right,
-- and ignore the results.
sequence_ :: (Foldable t, Monad m) => t (m a) -> m ()
sequence_ = foldr (>>) (return ())
-- | The sum of a collection of actions, generalizing 'concat'.
asum :: (Foldable t, Alternative f) => t (f a) -> f a
{-# INLINE asum #-}
asum = foldr (<|>) empty
-- | The sum of a collection of actions, generalizing 'concat'.
msum :: (Foldable t, MonadPlus m) => t (m a) -> m a
{-# INLINE msum #-}
msum = foldr mplus mzero
-- These use foldr rather than foldMap to avoid repeated concatenation.
-- | List of elements of a structure.
toList :: Foldable t => t a -> [a]
#ifdef __GLASGOW_HASKELL__
toList t = build (\ c n -> foldr c n t)
#else
toList = foldr (:) []
#endif
-- | The concatenation of all the elements of a container of lists.
concat :: Foldable t => t [a] -> [a]
concat = fold
-- | Map a function over all the elements of a container and concatenate
-- the resulting lists.
concatMap :: Foldable t => (a -> [b]) -> t a -> [b]
concatMap = foldMap
-- | 'and' returns the conjunction of a container of Bools. For the
-- result to be 'True', the container must be finite; 'False', however,
-- results from a 'False' value finitely far from the left end.
and :: Foldable t => t Bool -> Bool
and = getAll . foldMap All
-- | 'or' returns the disjunction of a container of Bools. For the
-- result to be 'False', the container must be finite; 'True', however,
-- results from a 'True' value finitely far from the left end.
or :: Foldable t => t Bool -> Bool
or = getAny . foldMap Any
-- | Determines whether any element of the structure satisfies the predicate.
any :: Foldable t => (a -> Bool) -> t a -> Bool
any p = getAny . foldMap (Any . p)
-- | Determines whether all elements of the structure satisfy the predicate.
all :: Foldable t => (a -> Bool) -> t a -> Bool
all p = getAll . foldMap (All . p)
-- | The 'sum' function computes the sum of the numbers of a structure.
sum :: (Foldable t, Num a) => t a -> a
sum = getSum . foldMap Sum
-- | The 'product' function computes the product of the numbers of a structure.
product :: (Foldable t, Num a) => t a -> a
product = getProduct . foldMap Product
-- | The largest element of a non-empty structure.
maximum :: (Foldable t, Ord a) => t a -> a
maximum = foldr1 max
-- | The largest element of a non-empty structure with respect to the
-- given comparison function.
maximumBy :: Foldable t => (a -> a -> Ordering) -> t a -> a
maximumBy cmp = foldr1 max'
where max' x y = case cmp x y of
GT -> x
_ -> y
-- | The least element of a non-empty structure.
minimum :: (Foldable t, Ord a) => t a -> a
minimum = foldr1 min
-- | The least element of a non-empty structure with respect to the
-- given comparison function.
minimumBy :: Foldable t => (a -> a -> Ordering) -> t a -> a
minimumBy cmp = foldr1 min'
where min' x y = case cmp x y of
GT -> y
_ -> x
-- | Does the element occur in the structure?
elem :: (Foldable t, Eq a) => a -> t a -> Bool
elem = any . (==)
-- | 'notElem' is the negation of 'elem'.
notElem :: (Foldable t, Eq a) => a -> t a -> Bool
notElem x = not . elem x
-- | The 'find' function takes a predicate and a structure and returns
-- the leftmost element of the structure matching the predicate, or
-- 'Nothing' if there is no such element.
find :: Foldable t => (a -> Bool) -> t a -> Maybe a
find p = listToMaybe . concatMap (\ x -> if p x then [x] else [])
| alekar/hugs | packages/base/Data/Foldable.hs | bsd-3-clause | 9,271 | 69 | 15 | 2,050 | 2,590 | 1,394 | 1,196 | 140 | 2 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Control/Concurrent/STM/TMVar.hs" #-}
{-# LANGUAGE CPP, DeriveDataTypeable, MagicHash, UnboxedTuples #-}
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.STM.TMVar
-- Copyright : (c) The University of Glasgow 2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- TMVar: Transactional MVars, for use in the STM monad
-- (GHC only)
--
-----------------------------------------------------------------------------
module Control.Concurrent.STM.TMVar (
-- * TMVars
TMVar,
newTMVar,
newEmptyTMVar,
newTMVarIO,
newEmptyTMVarIO,
takeTMVar,
putTMVar,
readTMVar,
tryReadTMVar,
swapTMVar,
tryTakeTMVar,
tryPutTMVar,
isEmptyTMVar,
mkWeakTMVar
) where
import GHC.Base
import GHC.Conc
import GHC.Weak
import Data.Typeable (Typeable)
newtype TMVar a = TMVar (TVar (Maybe a)) deriving (Eq, Typeable)
{- ^
A 'TMVar' is a synchronising variable, used
for communication between concurrent threads. It can be thought of
as a box, which may be empty or full.
-}
-- |Create a 'TMVar' which contains the supplied value.
newTMVar :: a -> STM (TMVar a)
newTMVar a = do
t <- newTVar (Just a)
return (TMVar t)
-- |@IO@ version of 'newTMVar'. This is useful for creating top-level
-- 'TMVar's using 'System.IO.Unsafe.unsafePerformIO', because using
-- 'atomically' inside 'System.IO.Unsafe.unsafePerformIO' isn't
-- possible.
newTMVarIO :: a -> IO (TMVar a)
newTMVarIO a = do
t <- newTVarIO (Just a)
return (TMVar t)
-- |Create a 'TMVar' which is initially empty.
newEmptyTMVar :: STM (TMVar a)
newEmptyTMVar = do
t <- newTVar Nothing
return (TMVar t)
-- |@IO@ version of 'newEmptyTMVar'. This is useful for creating top-level
-- 'TMVar's using 'System.IO.Unsafe.unsafePerformIO', because using
-- 'atomically' inside 'System.IO.Unsafe.unsafePerformIO' isn't
-- possible.
newEmptyTMVarIO :: IO (TMVar a)
newEmptyTMVarIO = do
t <- newTVarIO Nothing
return (TMVar t)
-- |Return the contents of the 'TMVar'. If the 'TMVar' is currently
-- empty, the transaction will 'retry'. After a 'takeTMVar',
-- the 'TMVar' is left empty.
takeTMVar :: TMVar a -> STM a
takeTMVar (TMVar t) = do
m <- readTVar t
case m of
Nothing -> retry
Just a -> do writeTVar t Nothing; return a
-- | A version of 'takeTMVar' that does not 'retry'. The 'tryTakeTMVar'
-- function returns 'Nothing' if the 'TMVar' was empty, or @'Just' a@ if
-- the 'TMVar' was full with contents @a@. After 'tryTakeTMVar', the
-- 'TMVar' is left empty.
tryTakeTMVar :: TMVar a -> STM (Maybe a)
tryTakeTMVar (TMVar t) = do
m <- readTVar t
case m of
Nothing -> return Nothing
Just a -> do writeTVar t Nothing; return (Just a)
-- |Put a value into a 'TMVar'. If the 'TMVar' is currently full,
-- 'putTMVar' will 'retry'.
putTMVar :: TMVar a -> a -> STM ()
putTMVar (TMVar t) a = do
m <- readTVar t
case m of
Nothing -> do writeTVar t (Just a); return ()
Just _ -> retry
-- | A version of 'putTMVar' that does not 'retry'. The 'tryPutTMVar'
-- function attempts to put the value @a@ into the 'TMVar', returning
-- 'True' if it was successful, or 'False' otherwise.
tryPutTMVar :: TMVar a -> a -> STM Bool
tryPutTMVar (TMVar t) a = do
m <- readTVar t
case m of
Nothing -> do writeTVar t (Just a); return True
Just _ -> return False
-- | This is a combination of 'takeTMVar' and 'putTMVar'; ie. it
-- takes the value from the 'TMVar', puts it back, and also returns
-- it.
readTMVar :: TMVar a -> STM a
readTMVar (TMVar t) = do
m <- readTVar t
case m of
Nothing -> retry
Just a -> return a
-- | A version of 'readTMVar' which does not retry. Instead it
-- returns @Nothing@ if no value is available.
tryReadTMVar :: TMVar a -> STM (Maybe a)
tryReadTMVar (TMVar t) = readTVar t
-- |Swap the contents of a 'TMVar' for a new value.
swapTMVar :: TMVar a -> a -> STM a
swapTMVar (TMVar t) new = do
m <- readTVar t
case m of
Nothing -> retry
Just old -> do writeTVar t (Just new); return old
-- |Check whether a given 'TMVar' is empty.
isEmptyTMVar :: TMVar a -> STM Bool
isEmptyTMVar (TMVar t) = do
m <- readTVar t
case m of
Nothing -> return True
Just _ -> return False
-- | Make a 'Weak' pointer to a 'TMVar', using the second argument as
-- a finalizer to run when the 'TMVar' is garbage-collected.
--
-- @since 2.4.4
mkWeakTMVar :: TMVar a -> IO () -> IO (Weak (TMVar a))
mkWeakTMVar tmv@(TMVar (TVar t#)) (IO finalizer) = IO $ \s ->
case mkWeak# t# tmv finalizer s of (# s1, w #) -> (# s1, Weak w #)
| phischu/fragnix | tests/packages/scotty/Control.Concurrent.STM.TMVar.hs | bsd-3-clause | 4,914 | 0 | 14 | 1,100 | 1,067 | 538 | 529 | 87 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
-- | Some helpers for parsing data out of a raw WAI 'Request'.
module Network.Wai.Parse
( parseHttpAccept
, parseRequestBody
, RequestBodyType (..)
, getRequestBodyType
, sinkRequestBody
, BackEnd
, lbsBackEnd
, tempFileBackEnd
, tempFileBackEndOpts
, Param
, File
, FileInfo (..)
, parseContentType
#if TEST
, Bound (..)
, findBound
, sinkTillBound
, killCR
, killCRLF
, takeLine
#endif
) where
import qualified Data.ByteString.Search as Search
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Char8 as S8
import Data.Word (Word8)
import Data.Maybe (fromMaybe)
import Data.List (sortBy)
import Data.Function (on, fix)
import System.Directory (removeFile, getTemporaryDirectory)
import System.IO (hClose, openBinaryTempFile)
import Network.Wai
import qualified Network.HTTP.Types as H
import Control.Monad (when, unless)
import Control.Monad.Trans.Resource (allocate, release, register, InternalState, runInternalState)
import Data.IORef
import Network.HTTP.Types (hContentType)
import Data.CaseInsensitive (mk)
breakDiscard :: Word8 -> S.ByteString -> (S.ByteString, S.ByteString)
breakDiscard w s =
let (x, y) = S.break (== w) s
in (x, S.drop 1 y)
-- | Parse the HTTP accept string to determine supported content types.
parseHttpAccept :: S.ByteString -> [S.ByteString]
parseHttpAccept = map fst
. sortBy (rcompare `on` snd)
. map (addSpecificity . grabQ)
. S.split 44 -- comma
where
rcompare :: (Double,Int) -> (Double,Int) -> Ordering
rcompare = flip compare
addSpecificity (s, q) =
-- Prefer higher-specificity types
let semicolons = S.count 0x3B s
stars = S.count 0x2A s
in (s, (q, semicolons - stars))
grabQ s =
-- Stripping all spaces may be too harsh.
-- Maybe just strip either side of semicolon?
let (s', q) = S.breakSubstring ";q=" (S.filter (/=0x20) s) -- 0x20 is space
q' = S.takeWhile (/=0x3B) (S.drop 3 q) -- 0x3B is semicolon
in (s', readQ q')
readQ s = case reads $ S8.unpack s of
(x, _):_ -> x
_ -> 1.0
-- | Store uploaded files in memory
lbsBackEnd :: Monad m => ignored1 -> ignored2 -> m S.ByteString -> m L.ByteString
lbsBackEnd _ _ popper =
loop id
where
loop front = do
bs <- popper
if S.null bs
then return $ L.fromChunks $ front []
else loop $ front . (bs:)
-- | Save uploaded files on disk as temporary files
--
-- Note: starting with version 2.0, removal of temp files is registered with
-- the provided @InternalState@. It is the responsibility of the caller to
-- ensure that this @InternalState@ gets cleaned up.
tempFileBackEnd :: InternalState -> ignored1 -> ignored2 -> IO S.ByteString -> IO FilePath
tempFileBackEnd = tempFileBackEndOpts getTemporaryDirectory "webenc.buf"
-- | Same as 'tempFileBackEnd', but use configurable temp folders and patterns.
tempFileBackEndOpts :: IO FilePath -- ^ get temporary directory
-> String -- ^ filename pattern
-> InternalState
-> ignored1
-> ignored2
-> IO S.ByteString
-> IO FilePath
tempFileBackEndOpts getTmpDir pattern internalState _ _ popper = do
(key, (fp, h)) <- flip runInternalState internalState $ allocate (do
tempDir <- getTmpDir
openBinaryTempFile tempDir pattern) (\(_, h) -> hClose h)
_ <- runInternalState (register $ removeFile fp) internalState
fix $ \loop -> do
bs <- popper
unless (S.null bs) $ do
S.hPut h bs
loop
release key
return fp
-- | Information on an uploaded file.
data FileInfo c = FileInfo
{ fileName :: S.ByteString
, fileContentType :: S.ByteString
, fileContent :: c
}
deriving (Eq, Show)
-- | Post parameter name and value.
type Param = (S.ByteString, S.ByteString)
-- | Post parameter name and associated file information.
type File y = (S.ByteString, FileInfo y)
-- | A file uploading backend. Takes the parameter name, file name, and a
-- stream of data.
type BackEnd a = S.ByteString -- ^ parameter name
-> FileInfo ()
-> IO S.ByteString
-> IO a
data RequestBodyType = UrlEncoded | Multipart S.ByteString
getRequestBodyType :: Request -> Maybe RequestBodyType
getRequestBodyType req = do
ctype' <- lookup hContentType $ requestHeaders req
let (ctype, attrs) = parseContentType ctype'
case ctype of
"application/x-www-form-urlencoded" -> return UrlEncoded
"multipart/form-data" | Just bound <- lookup "boundary" attrs -> return $ Multipart bound
_ -> Nothing
-- | Parse a content type value, turning a single @ByteString@ into the actual
-- content type and a list of pairs of attributes.
--
-- Since 1.3.2
parseContentType :: S.ByteString -> (S.ByteString, [(S.ByteString, S.ByteString)])
parseContentType a = do
let (ctype, b) = S.break (== semicolon) a
attrs = goAttrs id $ S.drop 1 b
in (ctype, attrs)
where
semicolon = 59
equals = 61
space = 32
goAttrs front bs
| S.null bs = front []
| otherwise =
let (x, rest) = S.break (== semicolon) bs
in goAttrs (front . (goAttr x:)) $ S.drop 1 rest
goAttr bs =
let (k, v') = S.break (== equals) bs
v = S.drop 1 v'
in (strip k, strip v)
strip = S.dropWhile (== space) . fst . S.breakEnd (/= space)
parseRequestBody :: BackEnd y
-> Request
-> IO ([Param], [File y])
parseRequestBody s r =
case getRequestBodyType r of
Nothing -> return ([], [])
Just rbt -> sinkRequestBody s rbt (requestBody r)
sinkRequestBody :: BackEnd y
-> RequestBodyType
-> IO S.ByteString
-> IO ([Param], [File y])
sinkRequestBody s r body = do
ref <- newIORef (id, id)
let add x = atomicModifyIORef ref $ \(y, z) ->
case x of
Left y' -> ((y . (y':), z), ())
Right z' -> ((y, z . (z':)), ())
conduitRequestBody s r body add
(x, y) <- readIORef ref
return (x [], y [])
conduitRequestBody :: BackEnd y
-> RequestBodyType
-> IO S.ByteString
-> (Either Param (File y) -> IO ())
-> IO ()
conduitRequestBody _ UrlEncoded rbody add = do
-- NOTE: in general, url-encoded data will be in a single chunk.
-- Therefore, I'm optimizing for the usual case by sticking with
-- strict byte strings here.
let loop front = do
bs <- rbody
if S.null bs
then return $ S.concat $ front []
else loop $ front . (bs:)
bs <- loop id
mapM_ (add . Left) $ H.parseSimpleQuery bs
conduitRequestBody backend (Multipart bound) rbody add =
parsePieces backend (S8.pack "--" `S.append` bound) rbody add
takeLine :: Source -> IO (Maybe S.ByteString)
takeLine src =
go id
where
go front = do
bs <- readSource src
if S.null bs
then close front
else push front bs
close front = leftover src (front S.empty) >> return Nothing
push front bs = do
let (x, y) = S.break (== 10) $ front bs -- LF
in if S.null y
then go $ S.append x
else do
when (S.length y > 1) $ leftover src $ S.drop 1 y
return $ Just $ killCR x
takeLines :: Source -> IO [S.ByteString]
takeLines src = do
res <- takeLine src
case res of
Nothing -> return []
Just l
| S.null l -> return []
| otherwise -> do
ls <- takeLines src
return $ l : ls
data Source = Source (IO S.ByteString) (IORef S.ByteString)
mkSource :: IO S.ByteString -> IO Source
mkSource f = do
ref <- newIORef S.empty
return $ Source f ref
readSource :: Source -> IO S.ByteString
readSource (Source f ref) = do
bs <- atomicModifyIORef ref $ \bs -> (S.empty, bs)
if S.null bs
then f
else return bs
leftover :: Source -> S.ByteString -> IO ()
leftover (Source _ ref) bs = writeIORef ref bs
parsePieces :: BackEnd y
-> S.ByteString
-> IO S.ByteString
-> (Either Param (File y) -> IO ())
-> IO ()
parsePieces sink bound rbody add =
mkSource rbody >>= loop
where
loop src = do
_boundLine <- takeLine src
res' <- takeLines src
unless (null res') $ do
let ls' = map parsePair res'
let x = do
cd <- lookup contDisp ls'
let ct = lookup contType ls'
let attrs = parseAttrs cd
name <- lookup "name" attrs
return (ct, name, lookup "filename" attrs)
case x of
Just (mct, name, Just filename) -> do
let ct = fromMaybe "application/octet-stream" mct
fi0 = FileInfo filename ct ()
(wasFound, y) <- sinkTillBound' bound name fi0 sink src
add $ Right (name, fi0 { fileContent = y })
when wasFound (loop src)
Just (_ct, name, Nothing) -> do
let seed = id
let iter front bs = return $ front . (:) bs
(wasFound, front) <- sinkTillBound bound iter seed src
let bs = S.concat $ front []
let x' = (name, bs)
add $ Left x'
when wasFound (loop src)
_ -> do
-- ignore this part
let seed = ()
iter () _ = return ()
(wasFound, ()) <- sinkTillBound bound iter seed src
when wasFound (loop src)
where
contDisp = mk $ S8.pack "Content-Disposition"
contType = mk $ S8.pack "Content-Type"
parsePair s =
let (x, y) = breakDiscard 58 s -- colon
in (mk $ x, S.dropWhile (== 32) y) -- space
data Bound = FoundBound S.ByteString S.ByteString
| NoBound
| PartialBound
deriving (Eq, Show)
findBound :: S.ByteString -> S.ByteString -> Bound
findBound b bs = handleBreak $ Search.breakOn b bs
where
handleBreak (h, t)
| S.null t = go [lowBound..S.length bs - 1]
| otherwise = FoundBound h $ S.drop (S.length b) t
lowBound = max 0 $ S.length bs - S.length b
go [] = NoBound
go (i:is)
| mismatch [0..S.length b - 1] [i..S.length bs - 1] = go is
| otherwise =
let endI = i + S.length b
in if endI > S.length bs
then PartialBound
else FoundBound (S.take i bs) (S.drop endI bs)
mismatch [] _ = False
mismatch _ [] = False
mismatch (x:xs) (y:ys)
| S.index b x == S.index bs y = mismatch xs ys
| otherwise = True
sinkTillBound' :: S.ByteString
-> S.ByteString
-> FileInfo ()
-> BackEnd y
-> Source
-> IO (Bool, y)
sinkTillBound' bound name fi sink src = do
(next, final) <- wrapTillBound bound src
y <- sink name fi next
b <- final
return (b, y)
data WTB = WTBWorking (S.ByteString -> S.ByteString)
| WTBDone Bool
wrapTillBound :: S.ByteString -- ^ bound
-> Source
-> IO (IO S.ByteString, IO Bool) -- ^ Bool indicates if the bound was found
wrapTillBound bound src = do
ref <- newIORef $ WTBWorking id
return (go ref, final ref)
where
final ref = do
x <- readIORef ref
case x of
WTBWorking _ -> error "wrapTillBound did not finish"
WTBDone y -> return y
go ref = do
state <- readIORef ref
case state of
WTBDone _ -> return S.empty
WTBWorking front -> do
bs <- readSource src
if S.null bs
then do
writeIORef ref $ WTBDone False
return $ front bs
else push $ front bs
where
push bs =
case findBound bound bs of
FoundBound before after -> do
let before' = killCRLF before
leftover src after
writeIORef ref $ WTBDone True
return before'
NoBound -> do
-- don't emit newlines, in case it's part of a bound
let (toEmit, front') =
if not (S8.null bs) && S8.last bs `elem` ['\r','\n']
then let (x, y) = S.splitAt (S.length bs - 2) bs
in (x, S.append y)
else (bs, id)
writeIORef ref $ WTBWorking front'
if S.null toEmit
then go ref
else return toEmit
PartialBound -> do
writeIORef ref $ WTBWorking $ S.append bs
go ref
sinkTillBound :: S.ByteString
-> (x -> S.ByteString -> IO x)
-> x
-> Source
-> IO (Bool, x)
sinkTillBound bound iter seed0 src = do
(next, final) <- wrapTillBound bound src
let loop seed = do
bs <- next
if S.null bs
then return seed
else iter seed bs >>= loop
seed <- loop seed0
b <- final
return (b, seed)
parseAttrs :: S.ByteString -> [(S.ByteString, S.ByteString)]
parseAttrs = map go . S.split 59 -- semicolon
where
tw = S.dropWhile (== 32) -- space
dq s = if S.length s > 2 && S.head s == 34 && S.last s == 34 -- quote
then S.tail $ S.init s
else s
go s =
let (x, y) = breakDiscard 61 s -- equals sign
in (tw x, dq $ tw y)
killCRLF :: S.ByteString -> S.ByteString
killCRLF bs
| S.null bs || S.last bs /= 10 = bs -- line feed
| otherwise = killCR $ S.init bs
killCR :: S.ByteString -> S.ByteString
killCR bs
| S.null bs || S.last bs /= 13 = bs -- carriage return
| otherwise = S.init bs
| frontrowed/wai | wai-extra/Network/Wai/Parse.hs | mit | 14,689 | 0 | 26 | 5,224 | 4,683 | 2,356 | 2,327 | 352 | 8 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-CS">
<title>Passive Scan Rules - Alpha | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | 0xkasun/security-tools | src/org/zaproxy/zap/extension/pscanrulesAlpha/resources/help_sr_CS/helpset_sr_CS.hs | apache-2.0 | 988 | 89 | 29 | 163 | 405 | 216 | 189 | -1 | -1 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module System.IO (module M) where
import "base" System.IO as M
| Ye-Yong-Chi/codeworld | codeworld-base/src/System/IO.hs | apache-2.0 | 733 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoImplicitPrelude #-}
#ifdef FAY
import Prelude
#else
this is invalid code
#endif
#ifdef FAY
main :: Fay ()
#endif
#ifndef FAY
more invalid code
#if FAY
this should not be used
#endif
#else
#if FAY
main = print True
#else
invalid and nested
#endif
#endif
| fpco/fay | tests/CPP.hs | bsd-3-clause | 316 | 1 | 6 | 78 | 52 | 28 | 24 | -1 | -1 |
import Control.Concurrent.STM
main = do
x <- atomically $ do
t <- newTVar 1
writeTVar t 2
((readTVar t >> retry) `orElse` return ()) `orElse` return ()
readTVar t
print x
| gridaphobe/packages-stm | tests/stm064.hs | bsd-3-clause | 211 | 1 | 16 | 71 | 94 | 43 | 51 | 8 | 1 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.C
-- Copyright : (c) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : ffi@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Bundles the C specific FFI library functionality
--
-----------------------------------------------------------------------------
module Foreign.C
( module Foreign.C.Types
, module Foreign.C.String
, module Foreign.C.Error
) where
import Foreign.C.Types
import Foreign.C.String
import Foreign.C.Error
| alexander-at-github/eta | libraries/base/Foreign/C.hs | bsd-3-clause | 716 | 0 | 5 | 130 | 62 | 47 | 15 | 9 | 0 |
module Foo where
import Prelude hiding (putStr, putStrLn)
import qualified System.IO (putStr, putStrLn)
import Data.Maybe
import qualified ValidHoleFits
ps :: String -> IO ()
ps = putStrLn
a :: Int -> IO Int
a _ = return 1
b :: Int -> IO Int
b _ = return 2
c :: Int -> IO Int
c _ = do { x <- a 0
; y <- _ x
; return y }
test :: [Maybe a] -> [a]
test = _
test2 :: Integer -> ValidHoleFits.Moo
test2 = _
k :: Maybe Integer
k = _ 2
f :: String
f = show _
h :: String
h = show (_ (_ :: Bool))
-- Built-in Syntax
myCons :: a -> [a] -> [a]
myCons = _
main :: IO ()
main = _ "hello, world"
| sdiehl/ghc | testsuite/tests/typecheck/should_compile/valid_hole_fits.hs | bsd-3-clause | 612 | 0 | 8 | 166 | 290 | 157 | 133 | 29 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, NoImplicitPrelude
, MagicHash
, UnboxedTuples
#-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.TopHandler
-- Copyright : (c) The University of Glasgow, 2001-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- Support for catching exceptions raised during top-level computations
-- (e.g. @Main.main@, 'Control.Concurrent.forkIO', and foreign exports)
--
-----------------------------------------------------------------------------
module GHC.TopHandler (
runMainIO, runIO, runIOFastExit, runNonIO,
topHandler, topHandlerFastExit,
reportStackOverflow, reportError,
flushStdHandles
) where
#include "HsBaseConfig.h"
import Control.Exception
import Data.Maybe
import Foreign
import Foreign.C
import GHC.Base
import GHC.Conc hiding (throwTo)
import GHC.Real
import GHC.IO
import GHC.IO.Handle.FD
import GHC.IO.Handle
import GHC.IO.Exception
import GHC.Weak
#if defined(mingw32_HOST_OS)
import GHC.ConsoleHandler
#else
import Data.Dynamic (toDyn)
#endif
-- | 'runMainIO' is wrapped around 'Main.main' (or whatever main is
-- called in the program). It catches otherwise uncaught exceptions,
-- and also flushes stdout\/stderr before exiting.
runMainIO :: IO a -> IO a
runMainIO main =
do
main_thread_id <- myThreadId
weak_tid <- mkWeakThreadId main_thread_id
install_interrupt_handler $ do
m <- deRefWeak weak_tid
case m of
Nothing -> return ()
Just tid -> throwTo tid (toException UserInterrupt)
main -- hs_exit() will flush
`catch`
topHandler
install_interrupt_handler :: IO () -> IO ()
#ifdef mingw32_HOST_OS
install_interrupt_handler handler = do
_ <- GHC.ConsoleHandler.installHandler $
Catch $ \event ->
case event of
ControlC -> handler
Break -> handler
Close -> handler
_ -> return ()
return ()
#else
#include "rts/Signals.h"
-- specialised version of System.Posix.Signals.installHandler, which
-- isn't available here.
install_interrupt_handler handler = do
let sig = CONST_SIGINT :: CInt
_ <- setHandler sig (Just (const handler, toDyn handler))
_ <- stg_sig_install sig STG_SIG_RST nullPtr
-- STG_SIG_RST: the second ^C kills us for real, just in case the
-- RTS or program is unresponsive.
return ()
foreign import ccall unsafe
stg_sig_install
:: CInt -- sig no.
-> CInt -- action code (STG_SIG_HAN etc.)
-> Ptr () -- (in, out) blocked
-> IO CInt -- (ret) old action code
#endif
-- | 'runIO' is wrapped around every @foreign export@ and @foreign
-- import \"wrapper\"@ to mop up any uncaught exceptions. Thus, the
-- result of running 'System.Exit.exitWith' in a foreign-exported
-- function is the same as in the main thread: it terminates the
-- program.
--
runIO :: IO a -> IO a
runIO main = catch main topHandler
-- | Like 'runIO', but in the event of an exception that causes an exit,
-- we don't shut down the system cleanly, we just exit. This is
-- useful in some cases, because the safe exit version will give other
-- threads a chance to clean up first, which might shut down the
-- system in a different way. For example, try
--
-- main = forkIO (runIO (exitWith (ExitFailure 1))) >> threadDelay 10000
--
-- This will sometimes exit with "interrupted" and code 0, because the
-- main thread is given a chance to shut down when the child thread calls
-- safeExit. There is a race to shut down between the main and child threads.
--
runIOFastExit :: IO a -> IO a
runIOFastExit main = catch main topHandlerFastExit
-- NB. this is used by the testsuite driver
-- | The same as 'runIO', but for non-IO computations. Used for
-- wrapping @foreign export@ and @foreign import \"wrapper\"@ when these
-- are used to export Haskell functions with non-IO types.
--
runNonIO :: a -> IO a
runNonIO a = catch (a `seq` return a) topHandler
topHandler :: SomeException -> IO a
topHandler err = catch (real_handler safeExit err) topHandler
topHandlerFastExit :: SomeException -> IO a
topHandlerFastExit err =
catchException (real_handler fastExit err) topHandlerFastExit
-- Make sure we handle errors while reporting the error!
-- (e.g. evaluating the string passed to 'error' might generate
-- another error, etc.)
--
real_handler :: (Int -> IO a) -> SomeException -> IO a
real_handler exit se = do
flushStdHandles -- before any error output
case fromException se of
Just StackOverflow -> do
reportStackOverflow
exit 2
Just UserInterrupt -> exitInterrupted
_ -> case fromException se of
-- only the main thread gets ExitException exceptions
Just ExitSuccess -> exit 0
Just (ExitFailure n) -> exit n
-- EPIPE errors received for stdout are ignored (#2699)
_ -> case fromException se of
Just IOError{ ioe_type = ResourceVanished,
ioe_errno = Just ioe,
ioe_handle = Just hdl }
| Errno ioe == ePIPE, hdl == stdout -> exit 0
_ -> do reportError se
exit 1
-- try to flush stdout/stderr, but don't worry if we fail
-- (these handles might have errors, and we don't want to go into
-- an infinite loop).
flushStdHandles :: IO ()
flushStdHandles = do
hFlush stdout `catchAny` \_ -> return ()
hFlush stderr `catchAny` \_ -> return ()
safeExit, fastExit :: Int -> IO a
safeExit = exitHelper useSafeExit
fastExit = exitHelper useFastExit
unreachable :: IO a
unreachable = fail "If you can read this, shutdownHaskellAndExit did not exit."
exitHelper :: CInt -> Int -> IO a
#ifdef mingw32_HOST_OS
exitHelper exitKind r =
shutdownHaskellAndExit (fromIntegral r) exitKind >> unreachable
#else
-- On Unix we use an encoding for the ExitCode:
-- 0 -- 255 normal exit code
-- -127 -- -1 exit by signal
-- For any invalid encoding we just use a replacement (0xff).
exitHelper exitKind r
| r >= 0 && r <= 255
= shutdownHaskellAndExit (fromIntegral r) exitKind >> unreachable
| r >= -127 && r <= -1
= shutdownHaskellAndSignal (fromIntegral (-r)) exitKind >> unreachable
| otherwise
= shutdownHaskellAndExit 0xff exitKind >> unreachable
foreign import ccall "shutdownHaskellAndSignal"
shutdownHaskellAndSignal :: CInt -> CInt -> IO ()
#endif
exitInterrupted :: IO a
exitInterrupted =
#ifdef mingw32_HOST_OS
safeExit 252
#else
-- we must exit via the default action for SIGINT, so that the
-- parent of this process can take appropriate action (see #2301)
safeExit (-CONST_SIGINT)
#endif
-- NOTE: shutdownHaskellAndExit must be called "safe", because it *can*
-- re-enter Haskell land through finalizers.
foreign import ccall "Rts.h shutdownHaskellAndExit"
shutdownHaskellAndExit :: CInt -> CInt -> IO ()
useFastExit, useSafeExit :: CInt
useFastExit = 1
useSafeExit = 0
| beni55/haste-compiler | libraries/ghc-7.10/base/GHC/TopHandler.hs | bsd-3-clause | 7,332 | 0 | 20 | 1,749 | 945 | 507 | 438 | 105 | 6 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Main where
import Control.Exception (throwIO, throw)
import Control.Monad
import qualified Data.ByteString as S
--import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B (unpack)
import qualified Network.HTTP.Types as H
import Network.Wai.Handler.Warp.Types
import Prelude hiding (lines)
import Data.ByteString.Internal
import Data.Word
import Foreign.ForeignPtr
import Foreign.Ptr
import Foreign.Storable
#if MIN_VERSION_gauge(0, 2, 0)
import Gauge
#else
import Gauge.Main
#endif
-- $setup
-- >>> :set -XOverloadedStrings
----------------------------------------------------------------
main :: IO ()
main = do
let requestLine1 = "GET http://www.example.com HTTP/1.1"
let requestLine2 = "GET http://www.example.com/cgi-path/search.cgi?key=parser HTTP/1.0"
defaultMain [
bgroup "requestLine1" [
bench "parseRequestLine3" $ whnf parseRequestLine3 requestLine1
, bench "parseRequestLine2" $ whnfIO $ parseRequestLine2 requestLine1
, bench "parseRequestLine1" $ whnfIO $ parseRequestLine1 requestLine1
, bench "parseRequestLine0" $ whnfIO $ parseRequestLine0 requestLine1
]
, bgroup "requestLine2" [
bench "parseRequestLine3" $ whnf parseRequestLine3 requestLine2
, bench "parseRequestLine2" $ whnfIO $ parseRequestLine2 requestLine2
, bench "parseRequestLine1" $ whnfIO $ parseRequestLine1 requestLine2
, bench "parseRequestLine0" $ whnfIO $ parseRequestLine0 requestLine2
]
]
----------------------------------------------------------------
-- |
--
-- >>> parseRequestLine3 "GET / HTTP/1.1"
-- ("GET","/","",HTTP/1.1)
-- >>> parseRequestLine3 "POST /cgi/search.cgi?key=foo HTTP/1.0"
-- ("POST","/cgi/search.cgi","?key=foo",HTTP/1.0)
-- >>> parseRequestLine3 "GET "
-- *** Exception: BadFirstLine "GET "
-- >>> parseRequestLine3 "GET /NotHTTP UNKNOWN/1.1"
-- *** Exception: NonHttp
parseRequestLine3 :: ByteString
-> (H.Method
,ByteString -- Path
,ByteString -- Query
,H.HttpVersion)
parseRequestLine3 requestLine = ret
where
(!method,!rest) = S.break (== 32) requestLine -- ' '
(!pathQuery,!httpVer')
| rest == "" = throw badmsg
| otherwise = S.break (== 32) (S.drop 1 rest) -- ' '
(!path,!query) = S.break (== 63) pathQuery -- '?'
!httpVer = S.drop 1 httpVer'
(!http,!ver)
| httpVer == "" = throw badmsg
| otherwise = S.break (== 47) httpVer -- '/'
!hv | http /= "HTTP" = throw NonHttp
| ver == "/1.1" = H.http11
| otherwise = H.http10
!ret = (method,path,query,hv)
badmsg = BadFirstLine $ B.unpack requestLine
----------------------------------------------------------------
-- |
--
-- >>> parseRequestLine2 "GET / HTTP/1.1"
-- ("GET","/","",HTTP/1.1)
-- >>> parseRequestLine2 "POST /cgi/search.cgi?key=foo HTTP/1.0"
-- ("POST","/cgi/search.cgi","?key=foo",HTTP/1.0)
-- >>> parseRequestLine2 "GET "
-- *** Exception: BadFirstLine "GET "
-- >>> parseRequestLine2 "GET /NotHTTP UNKNOWN/1.1"
-- *** Exception: NonHttp
parseRequestLine2 :: ByteString
-> IO (H.Method
,ByteString -- Path
,ByteString -- Query
,H.HttpVersion)
parseRequestLine2 requestLine@(PS fptr off len) = withForeignPtr fptr $ \ptr -> do
when (len < 14) $ throwIO baderr
let methodptr = ptr `plusPtr` off
limptr = methodptr `plusPtr` len
lim0 = fromIntegral len
pathptr0 <- memchr methodptr 32 lim0 -- ' '
when (pathptr0 == nullPtr || (limptr `minusPtr` pathptr0) < 11) $
throwIO baderr
let pathptr = pathptr0 `plusPtr` 1
lim1 = fromIntegral (limptr `minusPtr` pathptr0)
httpptr0 <- memchr pathptr 32 lim1 -- ' '
when (httpptr0 == nullPtr || (limptr `minusPtr` httpptr0) < 9) $
throwIO baderr
let httpptr = httpptr0 `plusPtr` 1
lim2 = fromIntegral (httpptr0 `minusPtr` pathptr)
checkHTTP httpptr
!hv <- httpVersion httpptr
queryptr <- memchr pathptr 63 lim2 -- '?'
let !method = bs ptr methodptr pathptr0
!path
| queryptr == nullPtr = bs ptr pathptr httpptr0
| otherwise = bs ptr pathptr queryptr
!query
| queryptr == nullPtr = S.empty
| otherwise = bs ptr queryptr httpptr0
return (method,path,query,hv)
where
baderr = BadFirstLine $ B.unpack requestLine
check :: Ptr Word8 -> Int -> Word8 -> IO ()
check p n w = do
w0 <- peek $ p `plusPtr` n
when (w0 /= w) $ throwIO NonHttp
checkHTTP httpptr = do
check httpptr 0 72 -- 'H'
check httpptr 1 84 -- 'T'
check httpptr 2 84 -- 'T'
check httpptr 3 80 -- 'P'
check httpptr 4 47 -- '/'
check httpptr 6 46 -- '.'
httpVersion httpptr = do
major <- peek $ httpptr `plusPtr` 5
minor <- peek $ httpptr `plusPtr` 7
if major == (49 :: Word8) && minor == (49 :: Word8) then
return H.http11
else
return H.http10
bs ptr p0 p1 = PS fptr o l
where
o = p0 `minusPtr` ptr
l = p1 `minusPtr` p0
----------------------------------------------------------------
-- |
--
-- >>> parseRequestLine1 "GET / HTTP/1.1"
-- ("GET","/","",HTTP/1.1)
-- >>> parseRequestLine1 "POST /cgi/search.cgi?key=foo HTTP/1.0"
-- ("POST","/cgi/search.cgi","?key=foo",HTTP/1.0)
-- >>> parseRequestLine1 "GET "
-- *** Exception: BadFirstLine "GET "
-- >>> parseRequestLine1 "GET /NotHTTP UNKNOWN/1.1"
-- *** Exception: NonHttp
parseRequestLine1 :: ByteString
-> IO (H.Method
,ByteString -- Path
,ByteString -- Query
,H.HttpVersion)
parseRequestLine1 requestLine = do
let (!method,!rest) = S.break (== 32) requestLine -- ' '
(!pathQuery,!httpVer') = S.break (== 32) (S.drop 1 rest) -- ' '
!httpVer = S.drop 1 httpVer'
when (rest == "" || httpVer == "") $
throwIO $ BadFirstLine $ B.unpack requestLine
let (!path,!query) = S.break (== 63) pathQuery -- '?'
(!http,!ver) = S.break (== 47) httpVer -- '/'
when (http /= "HTTP") $ throwIO NonHttp
let !hv | ver == "/1.1" = H.http11
| otherwise = H.http10
return $! (method,path,query,hv)
----------------------------------------------------------------
-- |
--
-- >>> parseRequestLine0 "GET / HTTP/1.1"
-- ("GET","/","",HTTP/1.1)
-- >>> parseRequestLine0 "POST /cgi/search.cgi?key=foo HTTP/1.0"
-- ("POST","/cgi/search.cgi","?key=foo",HTTP/1.0)
-- >>> parseRequestLine0 "GET "
-- *** Exception: BadFirstLine "GET "
-- >>> parseRequestLine0 "GET /NotHTTP UNKNOWN/1.1"
-- *** Exception: NonHttp
parseRequestLine0 :: ByteString
-> IO (H.Method
,ByteString -- Path
,ByteString -- Query
,H.HttpVersion)
parseRequestLine0 s =
case filter (not . S.null) $ S.splitWith (\c -> c == 32 || c == 9) s of -- '
(method':query:http'') -> do
let !method = method'
!http' = S.concat http''
(!hfirst, !hsecond) = S.splitAt 5 http'
if hfirst == "HTTP/"
then let (!rpath, !qstring) = S.break (== 63) query -- '?'
!hv =
case hsecond of
"1.1" -> H.http11
_ -> H.http10
in return $! (method, rpath, qstring, hv)
else throwIO NonHttp
_ -> throwIO $ BadFirstLine $ B.unpack s
| creichert/wai | warp/bench/Parser.hs | mit | 7,847 | 0 | 18 | 2,191 | 1,894 | 990 | 904 | 141 | 4 |
module Tach.Impulse.Types.TimeValueSeriesSpec (main, spec) where
import Tach.Impulse.Types.TimeValueSeries
import Tach.Impulse.Types.TimeValue
import Control.Applicative
import qualified Data.Sequence as S
import Test.Hspec
import Test.QuickCheck
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "TVSimpleImpulseType" $ do
it "should Be Ordered By it's Date" $ do
property testTVSimpleimpulseType
-- |Wrapper so that I have a prop valued list
testTVSimpleimpulseType :: [Integer] -> Bool
testTVSimpleimpulseType il = rslt
where
tvsList = (\x -> TVSimple x 3 3.3) <$> il
tvsSequence = S.unstableSort (S.fromList tvsList)
(x,rslt) = S.foldlWithIndex (\(x,trth) i y -> (x , x `tr` y)) (S.index tvsSequence 0 ,True) tvsSequence
tr (TVSimple t _ _ ) (TVSimple t' _ _ ) = t <= t'
| smurphy8/tach | core-types/tach-impulse-series-types/test/Tach/Impulse/Types/TimeValueSeriesSpec.hs | mit | 848 | 0 | 13 | 175 | 270 | 150 | 120 | 20 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module UserInputParserSpec (spec) where
import Test.Hspec
import Test.QuickCheck
import Control.Exception (evaluate)
import Data.ByteString.Char8 hiding (filter, length)
import Test.Hspec.Attoparsec
import Pipes.ByteString hiding (filter, length, lines)
import Prelude hiding (readFile, putStrLn, lines)
import System.IO hiding (readFile, putStrLn, hGetContents)
import Pipes.Core
import Pipes.Prelude hiding (fromHandle, filter, length)
import Data.Text hiding (isInfixOf, isPrefixOf, length, filter, lines)
import Data.Text.Encoding
import Text.Parsec
import Text.Parsec.Text
import Text.Parsec.Error
import Data.Either
import UserInputParser
import RemoteConsole
import Event
spec :: Spec
spec = describe "UserInputParser" $ do
it "parse non-command input" $ (parse userInputParser "" "blabla") `shouldBe` (Right $ ServerCommand "blabla")
it "parse /findloc command" $ (parse userInputParser "" "/лок В таверне") `shouldBe` (Right $ FindLoc "В таверне")
it "parse /findloc without param" $ (parse userInputParser "" "/лок") `shouldBe` (Right $ FindLoc "")
it "parse /findloc without param" $ (parse userInputParser "" "/лок ") `shouldBe` (Right $ FindLoc "")
it "parse /conn command" $ (parse userInputParser "" "/conn") `shouldBe` (Right $ Connect)
it "parse /zap command" $ (parse userInputParser "" "/zap") `shouldBe` (Right $ Zap)
it "parse /path regex" $ (parse userInputParser "" "/путь В избе") `shouldBe` (Right $ FindPathTo "В избе")
it "parse /path toLocId" $ (parse userInputParser "" "/путь 34546") `shouldBe` (Right $ FindPathToLocId (LocationId 34546))
it "parse /path fromLocId toLocId" $ (parse userInputParser "" "/путь 111 222") `shouldBe` (Right $ mkFindPathFromTo 111 222)
it "parse /path fromLocId toLocId " $ (parse userInputParser "" "/путь 1 2 ") `shouldBe` (Right $ mkFindPathFromTo 1 2)
it "parse /path str toId" $ (parse userInputParser "" "/путь a 2") `shouldBe` (Right $ FindPathTo "a 2")
it "parse /path fromId src" $ (parse userInputParser "" "/путь 1 b ") `shouldBe` (Right $ FindPathTo "1 b")
it "parse /path" $ isLeft $ (parse userInputParser "" "/путь")
it "parse /go locId command" $ (parse userInputParser "" "/го 34546") `shouldBe` (Right $ GoToLocId $ LocationId 34546)
it "parse /go regex command" $ (parse userInputParser "" "/го В избе") `shouldBe` (Right $ GoTo "В избе")
it "parse empty user input" $ (parse userInputParser "" "") `shouldBe` (Right $ ServerCommand "")
it "parse /where mob command" $ (parse userInputParser "" "/где моб муха") `shouldBe` (Right $ WhereMob "муха")
it "return error in case of misspelled command" $ isLeft $ parse userInputParser "" "/unknowncommand blabla"
mkFindPathFromTo :: Int -> Int -> UserCommand
mkFindPathFromTo from to = FindPathFromTo (LocationId from) (LocationId to)
| tort/mud-drifter | test/UserInputParserSpec.hs | mit | 2,953 | 0 | 13 | 455 | 868 | 460 | 408 | 43 | 1 |
{-# LANGUAGE JavaScriptFFI #-}
-- | FIXME: doc
module GHCJS.Electron.BrowserWindow where
| taktoa/ghcjs-electron | src/GHCJS/Electron/BrowserWindow.hs | mit | 90 | 0 | 3 | 12 | 9 | 7 | 2 | 2 | 0 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
module ForwardVector where
import Numeric.AD.Internal.Forward.Double
import Numeric.AD.Internal.On
import Numeric.AD.Mode
import Numeric.AD.Internal.Identity
import Numeric.AD.Jacobian
import Numeric.AD.Mode
import Control.Monad
import Data.Vector.Unboxed.Deriving
import Data.Vector.Unboxed as U
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Generic
import qualified Data.Vector.Generic.Mutable
import qualified Data.Vector.Fusion.Stream as S
import qualified Data.Vector.Fusion.Stream.Size as Size
derivingUnbox "ForwardVector"
[t| ForwardDouble -> (Double, Double) |]
[| \ (ForwardDouble a b) -> (a,b) |]
[| \ (a, b) -> (ForwardDouble a b) |]
diff :: (ForwardDouble -> Vector ForwardDouble) -> Double -> Vector Double
diff f x = G.map tangent (f (bundle x 1))
{- |
>>> diff fun 2
fromList [1.0,4.0,12.0,32.0]
-}
fun x = U.fromList [x, x^2, x^3, x^4]
zipWithRecycle f xs ys
| m < n = G.generate n $ \i -> f (xs G.! mod i m) (ys G.! i)
| otherwise = G.generate m $ \i -> f (xs G.! i) (ys G.! mod i n)
where (m, n) = (G.length xs, G.length ys)
instance Mode (Vector ForwardDouble) where
type Scalar (Vector ForwardDouble) = Double
auto = G.singleton . auto
zero = G.singleton zero
isKnownZero = G.all isKnownZero
isKnownConstant = G.all isKnownConstant
a *^ xs = G.map (a*^) xs
xs ^* a = G.map (^* a) xs
xs ^/ a = G.map (^/ a) xs
instance Jacobian (Vector ForwardDouble) where
type D (Vector ForwardDouble) = Id Double
unary f a b = G.map (unary f a) b
lift1 f df = G.map (lift1 f df)
lift1_ f df = G.map (lift1_ f df)
binary f da db = zipWithRecycle (binary f da db)
lift2 f df = zipWithRecycle (lift2 f df)
lift2_ f df = zipWithRecycle (lift2_ f df)
instance Num (Vector ForwardDouble) where
fromInteger 0 = zero
fromInteger n = auto (fromInteger n)
(+) = binary (+) 1 1
(-) = binary (-) (auto 1) (auto (-1)) -- TODO: <-> ? as it is, this might be pretty bad for Tower
(*) = lift2 (*) (\x y -> (y, x))
negate = lift1 negate (const (auto (-1)))
abs = lift1 abs signum
signum a = lift1 signum (const zero) a
instance Fractional (Vector ForwardDouble) where
fromRational 0 = zero
fromRational r = auto (fromRational r)
x / y = x * recip y
recip = lift1_ recip (const . negate . join (*))
instance Floating (Vector ForwardDouble) where
pi = auto pi
exp = lift1_ exp const
log = lift1 log recip
logBase x y = log y / log x
sqrt = lift1_ sqrt (\z _ -> recip (auto 2 * z))
(**) = zipWithRecycle (**)
sin = lift1 sin cos
cos = lift1 cos $ negate . sin
tan = lift1 tan $ recip . join (*) . cos
asin = lift1 asin $ \x -> recip (sqrt (auto 1 - join (*) x))
acos = lift1 acos $ \x -> negate (recip (sqrt (1 - join (*) x)))
atan = lift1 atan $ \x -> recip (1 + join (*) x)
sinh = lift1 sinh cosh
cosh = lift1 cosh sinh
tanh = lift1 tanh $ recip . join (*) . cosh
asinh = lift1 asinh $ \x -> recip (sqrt (1 + join (*) x))
acosh = lift1 acosh $ \x -> recip (sqrt (join (*) x - 1))
atanh = lift1 atanh $ \x -> recip (1 - join (*) x)
| fhaust/aer-utils | src/ForwardVector.hs | mit | 3,403 | 0 | 15 | 862 | 1,384 | 739 | 645 | 81 | 1 |
module Main where
import Control.Concurrent.MVar (newMVar)
import Server.Web (runServer)
import Server.Types (newServerState)
import Server.ProgramOptions (parseOptions)
main :: IO ()
main = do
programOptions <- parseOptions
serverState <- newMVar newServerState
runServer serverState programOptions
| best-coloc-ever/twitch-cast | streamer/app/Main.hs | mit | 336 | 0 | 8 | 68 | 85 | 46 | 39 | 10 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Writer.Formats.Acacia
-- License : MIT (see the LICENSE file)
-- Maintainer : Felix Klein (klein@react.uni-saarland.de)
--
-- Transforms a specification to the Acacia+ format.
--
-----------------------------------------------------------------------------
module Writer.Formats.Acacia where
-----------------------------------------------------------------------------
import Config
import Simplify
import Data.LTL
import Data.Error
import Data.Specification
import Writer.Eval
import Writer.Data
import Writer.Utils
-----------------------------------------------------------------------------
-- | Acacia / Acacia+ operator configuration.
opConfig
:: OperatorConfig
opConfig = OperatorConfig
{ tTrue = "true"
, fFalse = "false"
, opNot = UnaryOp "!" 1
, opAnd = BinaryOp "*" 3 AssocLeft
, opOr = BinaryOp "+" 3 AssocLeft
, opImplies = BinaryOp "->" 3 AssocLeft
, opEquiv = BinaryOp "<->" 3 AssocLeft
, opNext = UnaryOp "X" 1
, opPrevious = UnaryOpUnsupported
, opFinally = UnaryOp "F" 1
, opGlobally = UnaryOp "G" 1
, opHistorically = UnaryOpUnsupported
, opOnce = UnaryOpUnsupported
, opUntil = BinaryOp "U" 2 AssocLeft
, opRelease = BinaryOpUnsupported
, opWeak = BinaryOpUnsupported
, opSince = BinaryOpUnsupported
, opTriggered = BinaryOpUnsupported
}
-----------------------------------------------------------------------------
-- | Acacia / Acacia+ writer.
writeFormat
:: Configuration -> Specification -> Either Error String
writeFormat c s = do
(es1,ss1,rs1,as1,is1,gs1) <-
eval c s
as2 <- mapM (simplify (adjust c opConfig) . adjustAtomic) $
case ss1 of
[] -> filter (/= FFalse) $ es1 ++ map fGlobally rs1 ++ as1
_ -> filter (/= FFalse) $ es1 ++
map (\f -> fOr [fNot $ fAnd ss1, f])
(map fGlobally rs1 ++ as1)
is2 <- mapM (simplify (adjust c opConfig) . fGlobally . adjustAtomic) is1
gs2 <- mapM (simplify (adjust c opConfig) . adjustAtomic) (gs1 ++ ss1)
as3 <- mapM (printFormula opConfig (outputMode c) (quoteMode c)) as2
is3 <- mapM (printFormula opConfig (outputMode c) (quoteMode c)) is2
gs3 <- mapM (printFormula opConfig (outputMode c) (quoteMode c)) gs2
let
as4 = map (\x -> "assume " ++ x ++ ";") as3
is4 = map (++ ";") is3
gs4 = map (++ ";") gs3
xs = case as4 ++ is4 ++ gs4 of
[] -> []
ys -> map (++ "\n") $ filter nonempty $ (init ys) ++ [last ys]
return $ concat xs
where
nonempty = any (\x -> x /= ' ' && x /= '\t')
adjustAtomic fml = case fml of
Not (Atomic (Output x)) -> Atomic (Output ("(" ++ x ++ "=0)"))
Not (Atomic (Input x)) -> Atomic (Input ("(" ++ x ++ "=0)"))
Atomic (Output x) -> Atomic (Output ("(" ++ x ++ "=1)"))
Atomic (Input x) -> Atomic (Input ("(" ++ x ++ "=1)"))
_ -> applySub adjustAtomic fml
-----------------------------------------------------------------------------
| reactive-systems/syfco | src/lib/Writer/Formats/Acacia.hs | mit | 3,221 | 0 | 19 | 825 | 934 | 492 | 442 | 61 | 7 |
module Interpreter.DataTypes
( Frame
, Environment(..)
, Expression(..)
, addBinding
, lookupValue
, extendEnvironment
, pairToList
) where
import qualified Data.Map as Map
-- A frame contains mappings from variable names to Lisp values.
type Frame = Map.Map String Expression
-- An Environment is a frame coupled with a parent environment.
data Environment = EmptyEnvironment
| Environment Frame Environment
-- The Expression data type defines the elements of the abstract syntax tree
-- and the runtime types manipulated by the Lisp system.
data Expression = Null
| Number Double
| Boolean Bool
| Variable String
| Pair Expression Expression
| Exception String
| Lambda [Expression] Expression
| PrimitiveProcedure ([Expression] -> Expression)
| Application Expression [Expression]
| Definition Expression Expression
| If Expression Expression Expression
| Cond [(Expression, Expression)]
instance Show Expression where
show = showExpression
-- A function that recursively converts a Lisp Expression to a
-- String representation.
showExpression :: Expression -> String
showExpression (Null) = "null"
showExpression (Number number) = show number
showExpression (Boolean bool)
| bool == True = "#t"
| otherwise = "#f"
showExpression (Variable variable) = variable
showExpression (Exception message) = "#Exception: " ++ "'" ++ message ++ "'"
showExpression pair@(Pair first second)
| isList pair = "(" ++ (showPairList pair) ++ ")"
| otherwise = "(" ++ (show first) ++ " . " ++ (show second) ++ ")"
showExpression (Lambda parameters body) = "#CompoundProcedure"
showExpression (PrimitiveProcedure _) = "#PrimitiveProcedure"
showExpression (Application operator operands) = "#Application"
showExpression (Definition variable value) = "#Definition"
showExpression _ = "#Unknown"
showPairList :: Expression -> String
showPairList Null = ""
showPairList (Pair first (Null)) = (show first)
showPairList (Pair first second) = (show first) ++ " " ++ (showPairList second)
-- Helper functions for environment manipulation.
addBinding :: Environment -> String -> Expression -> Environment
addBinding EmptyEnvironment _ _ = EmptyEnvironment
addBinding (Environment frame parent) name value = Environment newFrame parent
where newFrame = Map.insert name value frame
lookupValue :: Environment -> String -> Expression
lookupValue EmptyEnvironment variable = Exception ("Binding for " ++ variable ++ " not found.")
lookupValue (Environment frame parent) variable =
case value of
Just result -> result
Nothing -> lookupValue parent variable
where value = Map.lookup variable frame
extendEnvironment :: Environment -> [Expression] -> [Expression] -> Environment
extendEnvironment environment parameters arguments =
let params = map show parameters
in Environment (Map.fromList (zip params arguments)) environment
-- Helper functions for pair manipulation.
pairToList :: Expression -> [Expression]
pairToList Null = []
pairToList (Pair first rest) = first : pairToList rest
isList :: Expression -> Bool
isList Null = True
isList (Pair _ second) = isList second
isList _ = False
| IvanIvanov/haskell-lisp-interpreter | src/DataTypes.hs | mit | 3,302 | 0 | 11 | 674 | 853 | 449 | 404 | 68 | 2 |
-- TODO: Write module docs.
module Main where
import Lib
main :: IO ()
main = do
someFunc
| xnil/ply | app/Main.hs | mit | 96 | 0 | 6 | 24 | 26 | 15 | 11 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Graph.JSON.Cypher.Read.Rows where
import Data.Aeson
import Data.Aeson.Types (Parser)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromJust, mapMaybe)
import Data.Set (Set)
import qualified Data.Vector as V
import Control.Map (snarf)
-- Reads in rows of JSON from a Cypher query result
data GraphResults a = GR { results :: [TabledResults a], errors :: [Errors] }
deriving Show
instance FromJSON a => FromJSON (GraphResults a) where
parseJSON (Object v) = GR <$> v .: "results" <*> v .: "errors"
data TabledResults a = Table { columns :: [String], rows :: [TableRow a] }
deriving Show
instance FromJSON a => FromJSON (TabledResults a) where
parseJSON (Object v) = Table <$> v .: "columns" <*> v .: "data"
type Errors = Object
data TableRow a = TR { row :: a } deriving Show
instance FromJSON a => FromJSON (TableRow a) where
parseJSON (Object v) = TR <$> v .: "row"
{--
>>> getGraphResponse url ["match ()-[]->(n) where not (n)-[]->() return n.letter"]
>>> let ans = it
--}
type QueryResult = String
justRows :: FromJSON a => QueryResult -> [TableRow a]
justRows = rows . head . results . fromJust . decode . BL.pack
{--
>>> concat $ map (head . row) ((justRows ans) :: [TableRow [String]])
"HVFLPJOYCXBZQ"
--}
-- to parse pathed result-sets:
unarray :: [Value] -> Parser [Value]
unarray = withArray "an array of arrays" (return . V.toList) . head
{--
Another, more complex, example:
From the Cypher query:
>>> getGraphResponse url ["match p=(:START_HERE)-[*]->(l:Letter { letter: 'Q' }) return p"]
is a sample row returned of:
theLetterQ :: String
theLetterQ = "[[{},{\"rep\":\"-\"},{\"letter\":\"T\"},{\"rep\":\"-\"},"
++ "{\"letter\":\"M\"},{\"rep\":\".\"},{\"letter\":\"G\"},"
++ "{\"rep\":\"-\"},{\"letter\":\"Q\"}]]"
(note that an array of arrays is returned for paths)
Given the parsers:
instance FromJSON Path where
parseJSON = withArray "morse code" $ pathify . unarray . V.toList
pathify :: Parser [Value] -> Parser Path
pathify pvs = pvs >>= pathy' [] . tail
pathy' :: [MorsePair] -> [Value] -> Parser Path
pathy' path [] = return (Path $ reverse path)
pathy' acc (a:b:rest) =
parseJSON a >>= \m ->
parseJSON b >>= \l ->
pathy' ((m, l):acc) rest
instance FromJSON Morse where
parseJSON = withObject "da-dit" $ \v -> v .: "rep" >>= return . read
instance FromJSON Letter where
parseJSON = withObject "ltr" $ \v -> Chr <$> v .: "letter"
we can do:
>>> getGraphResponse url [cyphQuery]
>>> let padme = (justRows it) :: [TableRow Path]
>>> let nmt = newMorseTable (map row padme)
>>> take 5 $ Map.toList nmt
[('A',.-),('B',-...),('C',-.-.),('D',-..),('E',.)]
>>> Map.size nmt
26
--}
-- Here's another decoding approach:
mapBy :: Ord a => ([Value] -> Maybe (a,b)) -> QueryResult -> Map a b
mapBy pairf = Map.fromList . mapMaybe (pairf . row) . justRows
mapIt :: (FromJSON a, FromJSON b, Ord a) => QueryResult -> Map a b
mapIt = mapBy toPair
toPair :: (FromJSON a, FromJSON b) => [Value] -> Maybe (a,b)
toPair [a,b] = fromJSON1 a >>= \alef ->
fromJSON1 b >>= \beth ->
return (alef, beth)
fromJSON1 :: FromJSON a => Value -> Maybe a
fromJSON1 = reifySuccess . fromJSON
reifySuccess :: Result a -> Maybe a
reifySuccess (Success a) = Just a
reifySuccess _ = Nothing
-- and, if we want to map rows to a multimap:
multimap :: (FromJSON a, FromJSON b, Ord a, Ord b) => QueryResult -> Map a (Set b)
multimap = multimapBy toPair
multimapBy :: (Ord a, Ord b) =>
([Value] -> Maybe (a, b)) -> QueryResult -> Map a (Set b)
multimapBy f = snarf id . map (f . row) . justRows
| geophf/1HaskellADay | exercises/HAD/Graph/JSON/Cypher/Read/Rows.hs | mit | 3,740 | 0 | 10 | 747 | 844 | 461 | 383 | 46 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-launchtemplate-ipv6add.html
module Stratosphere.ResourceProperties.EC2LaunchTemplateIpv6Add where
import Stratosphere.ResourceImports
-- | Full data type definition for EC2LaunchTemplateIpv6Add. See
-- 'ec2LaunchTemplateIpv6Add' for a more convenient constructor.
data EC2LaunchTemplateIpv6Add =
EC2LaunchTemplateIpv6Add
{ _eC2LaunchTemplateIpv6AddIpv6Address :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON EC2LaunchTemplateIpv6Add where
toJSON EC2LaunchTemplateIpv6Add{..} =
object $
catMaybes
[ fmap (("Ipv6Address",) . toJSON) _eC2LaunchTemplateIpv6AddIpv6Address
]
-- | Constructor for 'EC2LaunchTemplateIpv6Add' containing required fields as
-- arguments.
ec2LaunchTemplateIpv6Add
:: EC2LaunchTemplateIpv6Add
ec2LaunchTemplateIpv6Add =
EC2LaunchTemplateIpv6Add
{ _eC2LaunchTemplateIpv6AddIpv6Address = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-launchtemplate-ipv6add.html#cfn-ec2-launchtemplate-ipv6add-ipv6address
ecltiaIpv6Address :: Lens' EC2LaunchTemplateIpv6Add (Maybe (Val Text))
ecltiaIpv6Address = lens _eC2LaunchTemplateIpv6AddIpv6Address (\s a -> s { _eC2LaunchTemplateIpv6AddIpv6Address = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/EC2LaunchTemplateIpv6Add.hs | mit | 1,429 | 0 | 12 | 157 | 173 | 100 | 73 | 22 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude, BangPatterns, MagicHash, UnboxedTuples #-}
module Data.Word.Word24.Internal
( Word24(..)
, byteSwap24
) where
import Data.X24Bit.Internal
import GHC.Word
import Data.Bits
import Data.Maybe
import GHC.Base
import GHC.Enum
import GHC.Num
import GHC.Real
import GHC.Read
import GHC.Arr
import GHC.Show
------------------------------------------------------------------------
-- type Word24
------------------------------------------------------------------------
-- Word24 is represented in the same way as Word. Operations may assume
-- and must ensure that it holds only values from its logical range.
data Word24 = W24# Word# deriving (Eq, Ord)
-- ^ 24-bit unsigned integer type
instance Show Word24 where
showsPrec p x = showsPrec p (fromIntegral x :: Int)
instance Num Word24 where
(W24# x#) + (W24# y#) = W24# (narrow24Word# (x# `plusWord#` y#))
(W24# x#) - (W24# y#) = W24# (narrow24Word# (x# `minusWord#` y#))
(W24# x#) * (W24# y#) = W24# (narrow24Word# (x# `timesWord#` y#))
negate (W24# x#) = W24# (narrow24Word# (int2Word# (negateInt# (word2Int# x#))))
abs x = x
signum 0 = 0
signum _ = 1
fromInteger i = W24# (narrow24Word# (integerToWord i))
instance Real Word24 where
toRational x = toInteger x % 1
instance Enum Word24 where
succ x
| x /= maxBound = x + 1
| otherwise = succError "Word24"
pred x
| x /= minBound = x - 1
| otherwise = predError "Word24"
toEnum i@(I# i#)
| i >= 0 && i <= fromIntegral (maxBound::Word24)
= W24# (int2Word# i#)
| otherwise = toEnumError "Word24" i (minBound::Word24, maxBound::Word24)
fromEnum (W24# x#) = I# (word2Int# x#)
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
instance Integral Word24 where
quot (W24# x#) y@(W24# y#)
| y /= 0 = W24# (x# `quotWord#` y#)
| otherwise = divZeroError
rem (W24# x#) y@(W24# y#)
| y /= 0 = W24# (x# `remWord#` y#)
| otherwise = divZeroError
div (W24# x#) y@(W24# y#)
| y /= 0 = W24# (x# `quotWord#` y#)
| otherwise = divZeroError
mod (W24# x#) y@(W24# y#)
| y /= 0 = W24# (x# `remWord#` y#)
| otherwise = divZeroError
quotRem (W24# x#) y@(W24# y#)
| y /= 0 = case x# `quotRemWord#` y# of
(# q, r #) ->
(W24# q, W24# r)
| otherwise = divZeroError
divMod (W24# x#) y@(W24# y#)
| y /= 0 = (W24# (x# `quotWord#` y#), W24# (x# `remWord#` y#))
| otherwise = divZeroError
toInteger (W24# x#) = smallInteger (word2Int# x#)
instance Bounded Word24 where
minBound = 0
maxBound = 0xFFFF
instance Ix Word24 where
range (m,n) = [m..n]
unsafeIndex (m,_) i = fromIntegral (i - m)
inRange (m,n) i = m <= i && i <= n
instance Read Word24 where
readsPrec p s = [(fromIntegral (x::Int), r) | (x, r) <- readsPrec p s]
instance Bits Word24 where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(W24# x#) .&. (W24# y#) = W24# (x# `and#` y#)
(W24# x#) .|. (W24# y#) = W24# (x# `or#` y#)
(W24# x#) `xor` (W24# y#) = W24# (x# `xor#` y#)
complement (W24# x#) = W24# (x# `xor#` mb#)
where !(W24# mb#) = maxBound
(W24# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = W24# (narrow24Word# (x# `shiftL#` i#))
| otherwise = W24# (x# `shiftRL#` negateInt# i#)
(W24# x#) `shiftL` (I# i#) = W24# (narrow24Word# (x# `shiftL#` i#))
(W24# x#) `unsafeShiftL` (I# i#) =
W24# (narrow24Word# (x# `uncheckedShiftL#` i#))
(W24# x#) `shiftR` (I# i#) = W24# (x# `shiftRL#` i#)
(W24# x#) `unsafeShiftR` (I# i#) = W24# (x# `uncheckedShiftRL#` i#)
(W24# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#) = W24# x#
| otherwise = W24# (narrow24Word# ((x# `uncheckedShiftL#` i'#) `or#`
(x# `uncheckedShiftRL#` (24# -# i'#))))
where
!i'# = word2Int# (int2Word# i# `and#` 15##)
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = False
popCount (W24# x#) = I# (word2Int# (popCnt24# x#))
bit = bitDefault
testBit = testBitDefault
instance FiniteBits Word24 where
finiteBitSize _ = 24
countLeadingZeros (W24# x#) = I# (word2Int# (clz24# x#))
countTrailingZeros (W24# x#) = I# (word2Int# (ctz24# x#))
-- | Swap bytes in 'Word24'.
byteSwap24 :: Word24 -> Word24
byteSwap24 (W24# w#) = W24# (narrow24Word# (byteSwap24# w#))
{-# RULES
"fromIntegral/Word8->Word24" fromIntegral = \(W8# x#) -> W24# x#
"fromIntegral/Word24->Word24" fromIntegral = id :: Word24 -> Word24
"fromIntegral/Word24->Integer" fromIntegral = toInteger :: Word24 -> Integer
"fromIntegral/a->Word24" fromIntegral = \x -> case fromIntegral x of W# x# -> W24# (narrow24Word# x#)
"fromIntegral/Word24->a" fromIntegral = \(W24# x#) -> fromIntegral (W# x#)
#-}
{-# RULES
"properFraction/Float->(Word24,Float)"
properFraction = \x ->
case properFraction x of {
(n, y) -> ((fromIntegral :: Int -> Word24) n, y :: Float) }
"truncate/Float->Word24"
truncate = (fromIntegral :: Int -> Word24) . (truncate :: Float -> Int)
"floor/Float->Word24"
floor = (fromIntegral :: Int -> Word24) . (floor :: Float -> Int)
"ceiling/Float->Word24"
ceiling = (fromIntegral :: Int -> Word24) . (ceiling :: Float -> Int)
"round/Float->Word24"
round = (fromIntegral :: Int -> Word24) . (round :: Float -> Int)
#-}
{-# RULES
"properFraction/Double->(Word24,Double)"
properFraction = \x ->
case properFraction x of {
(n, y) -> ((fromIntegral :: Int -> Word24) n, y :: Double) }
"truncate/Double->Word24"
truncate = (fromIntegral :: Int -> Word24) . (truncate :: Double -> Int)
"floor/Double->Word24"
floor = (fromIntegral :: Int -> Word24) . (floor :: Double -> Int)
"ceiling/Double->Word24"
ceiling = (fromIntegral :: Int -> Word24) . (ceiling :: Double -> Int)
"round/Double->Word24"
round = (fromIntegral :: Int -> Word24) . (round :: Double -> Int)
#-}
| nickspinale/24-bit | src/Data/Word/Word24/Internal.hs | mit | 6,764 | 0 | 15 | 2,119 | 1,841 | 956 | 885 | 141 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-route53-aliastarget.html
module Stratosphere.ResourceProperties.Route53RecordSetAliasTarget where
import Stratosphere.ResourceImports
-- | Full data type definition for Route53RecordSetAliasTarget. See
-- 'route53RecordSetAliasTarget' for a more convenient constructor.
data Route53RecordSetAliasTarget =
Route53RecordSetAliasTarget
{ _route53RecordSetAliasTargetDNSName :: Val Text
, _route53RecordSetAliasTargetEvaluateTargetHealth :: Maybe (Val Bool)
, _route53RecordSetAliasTargetHostedZoneId :: Val Text
} deriving (Show, Eq)
instance ToJSON Route53RecordSetAliasTarget where
toJSON Route53RecordSetAliasTarget{..} =
object $
catMaybes
[ (Just . ("DNSName",) . toJSON) _route53RecordSetAliasTargetDNSName
, fmap (("EvaluateTargetHealth",) . toJSON) _route53RecordSetAliasTargetEvaluateTargetHealth
, (Just . ("HostedZoneId",) . toJSON) _route53RecordSetAliasTargetHostedZoneId
]
-- | Constructor for 'Route53RecordSetAliasTarget' containing required fields
-- as arguments.
route53RecordSetAliasTarget
:: Val Text -- ^ 'rrsatDNSName'
-> Val Text -- ^ 'rrsatHostedZoneId'
-> Route53RecordSetAliasTarget
route53RecordSetAliasTarget dNSNamearg hostedZoneIdarg =
Route53RecordSetAliasTarget
{ _route53RecordSetAliasTargetDNSName = dNSNamearg
, _route53RecordSetAliasTargetEvaluateTargetHealth = Nothing
, _route53RecordSetAliasTargetHostedZoneId = hostedZoneIdarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-route53-aliastarget.html#cfn-route53-aliastarget-dnshostname
rrsatDNSName :: Lens' Route53RecordSetAliasTarget (Val Text)
rrsatDNSName = lens _route53RecordSetAliasTargetDNSName (\s a -> s { _route53RecordSetAliasTargetDNSName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-route53-aliastarget.html#cfn-route53-aliastarget-evaluatetargethealth
rrsatEvaluateTargetHealth :: Lens' Route53RecordSetAliasTarget (Maybe (Val Bool))
rrsatEvaluateTargetHealth = lens _route53RecordSetAliasTargetEvaluateTargetHealth (\s a -> s { _route53RecordSetAliasTargetEvaluateTargetHealth = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-route53-aliastarget.html#cfn-route53-aliastarget-hostedzoneid
rrsatHostedZoneId :: Lens' Route53RecordSetAliasTarget (Val Text)
rrsatHostedZoneId = lens _route53RecordSetAliasTargetHostedZoneId (\s a -> s { _route53RecordSetAliasTargetHostedZoneId = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/Route53RecordSetAliasTarget.hs | mit | 2,676 | 0 | 13 | 268 | 356 | 202 | 154 | 34 | 1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
module FRP.Yampa.SDL.Graphic where
import Control.Monad (void)
import Graphics.UI.SDL as SDL
import Graphics.UI.SDL.TTF
type Screen = SDL.Surface
newtype Graphic = Graphic { paintGraphic :: Screen -> IO () }
type GraphicOpt = Rect -> Graphic
type GraphicUpdate = (GraphicOpt, Rect)
data Mask = Mask { maskClip :: Maybe Rect
, maskX :: !Int
, maskY :: !Int
} deriving (Show, Eq)
data Fill = Fill { fillClip :: Maybe Rect
, fillColor :: !Color
} deriving (Show, Eq)
data Text = Text { textMsg :: !String
, textFont :: !Font
, textColor :: !Color
} deriving (Show, Eq)
instance Eq Color where
(Color r1 g1 b1) == (Color r2 g2 b2) = r1 == r2 && g1 == g2 && b1 == b2
instance Show Color where
show (Color r g b) = "Color { " ++ show r ++ ", " ++ show g ++ ", " ++ show b ++ " }"
class Draw canvas mask where
draw :: canvas -> mask -> Graphic
over, under :: Graphic -> Graphic -> Graphic
(Graphic x) `over` (Graphic y) = Graphic $ \s -> y s >> x s
under = Prelude.flip over
emptyG :: Graphic
emptyG = Graphic $ \_ -> return ()
render :: Graphic -> Graphic
render (Graphic x) = Graphic $ \s -> x s >> SDL.flip s
withinBox :: Rect -> Graphic -> GraphicOpt
withinBox r g r' | r `intersect` r' = emptyG
| otherwise = g
overOpt :: GraphicOpt -> GraphicOpt -> GraphicOpt
overOpt g1 g2 r = g1 r `over` g2 r
overUpdate :: GraphicOpt -> GraphicUpdate -> GraphicUpdate
overUpdate g1 (g2, r) = (g1 `overOpt` g2, r)
intersect :: Rect -> Rect -> Bool
intersect r1 r2 = xintersect && yintersect
where
xintersect = x1 `between` (x2, w2) || w1 `between` (x2, w2)
yintersect = y1 `between` (y2, h2) || h1 `between` (y2, h2)
x1 = rectX r1
x2 = rectX r2
y1 = rectY r1
y2 = rectY r2
w1 = x1 + rectW r1
w2 = x2 + rectW r2
h1 = y1 + rectH r1
h2 = y2 + rectH r2
between :: (Ord a) => a -> (a, a) -> Bool
between x (l, h) = x >= l && x <= h
instance Draw SDL.Surface Mask where
draw src mask = Graphic $ \dst -> void $ blitSurface src clip dst offset
where
clip = maskClip mask
offset = Just $ Rect (maskX mask) (maskY mask) 0 0
instance Draw Fill Mask where
draw fill mask = Graphic $ \dst -> pixel dst >>= \c -> void $ fillRect dst clip c
where
pixel dst = (mapRGB . surfaceGetPixelFormat) dst (colorRed color) (colorGreen color) (colorBlue color)
clip = fillClip fill
color = fillColor fill
instance Draw Text Mask where
draw text mask = Graphic $ \dst -> void $ blitText dst
where
blitText dst = do
txt <- renderTextSolid (textFont text) (textMsg text) (textColor text)
blitSurface txt clip dst offset
freeSurface txt
clip = maskClip mask
offset = Just $ Rect (maskX mask) (maskY mask) 0 0
| scan/yampa-sdl | src/FRP/Yampa/SDL/Graphic.hs | mit | 3,094 | 0 | 13 | 966 | 1,170 | 619 | 551 | 83 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-spotfleet-spotfleetrequestconfigdata-launchspecifications-networkinterfaces-privateipaddresses.html
module Stratosphere.ResourceProperties.EC2SpotFleetPrivateIpAddressSpecification where
import Stratosphere.ResourceImports
-- | Full data type definition for EC2SpotFleetPrivateIpAddressSpecification.
-- See 'ec2SpotFleetPrivateIpAddressSpecification' for a more convenient
-- constructor.
data EC2SpotFleetPrivateIpAddressSpecification =
EC2SpotFleetPrivateIpAddressSpecification
{ _eC2SpotFleetPrivateIpAddressSpecificationPrimary :: Maybe (Val Bool)
, _eC2SpotFleetPrivateIpAddressSpecificationPrivateIpAddress :: Val Text
} deriving (Show, Eq)
instance ToJSON EC2SpotFleetPrivateIpAddressSpecification where
toJSON EC2SpotFleetPrivateIpAddressSpecification{..} =
object $
catMaybes
[ fmap (("Primary",) . toJSON) _eC2SpotFleetPrivateIpAddressSpecificationPrimary
, (Just . ("PrivateIpAddress",) . toJSON) _eC2SpotFleetPrivateIpAddressSpecificationPrivateIpAddress
]
-- | Constructor for 'EC2SpotFleetPrivateIpAddressSpecification' containing
-- required fields as arguments.
ec2SpotFleetPrivateIpAddressSpecification
:: Val Text -- ^ 'ecsfpiasPrivateIpAddress'
-> EC2SpotFleetPrivateIpAddressSpecification
ec2SpotFleetPrivateIpAddressSpecification privateIpAddressarg =
EC2SpotFleetPrivateIpAddressSpecification
{ _eC2SpotFleetPrivateIpAddressSpecificationPrimary = Nothing
, _eC2SpotFleetPrivateIpAddressSpecificationPrivateIpAddress = privateIpAddressarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-spotfleet-spotfleetrequestconfigdata-launchspecifications-networkinterfaces-privateipaddresses.html#cfn-ec2-spotfleet-privateipaddressspecification-primary
ecsfpiasPrimary :: Lens' EC2SpotFleetPrivateIpAddressSpecification (Maybe (Val Bool))
ecsfpiasPrimary = lens _eC2SpotFleetPrivateIpAddressSpecificationPrimary (\s a -> s { _eC2SpotFleetPrivateIpAddressSpecificationPrimary = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-spotfleet-spotfleetrequestconfigdata-launchspecifications-networkinterfaces-privateipaddresses.html#cfn-ec2-spotfleet-privateipaddressspecification-privateipaddress
ecsfpiasPrivateIpAddress :: Lens' EC2SpotFleetPrivateIpAddressSpecification (Val Text)
ecsfpiasPrivateIpAddress = lens _eC2SpotFleetPrivateIpAddressSpecificationPrivateIpAddress (\s a -> s { _eC2SpotFleetPrivateIpAddressSpecificationPrivateIpAddress = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/EC2SpotFleetPrivateIpAddressSpecification.hs | mit | 2,706 | 0 | 13 | 213 | 266 | 152 | 114 | 28 | 1 |
-- Copyright (c) 2014 Curtis Gagliardi
-- Permission is hereby granted, free of charge, to any person obtaining
-- a copy of this software and associated documentation files (the
-- "Software"), to deal in the Software without restriction, including
-- without limitation the rights to use, copy, modify, merge, publish,
-- distribute, sublicense, and/or sell copies of the Software, and to
-- permit persons to whom the Software is furnished to do so, subject to
-- the following conditions:
-- The above copyright notice and this permission notice shall be included
-- in all copies or substantial portions of the Software.
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-- IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-- CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-- TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-- SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
{-# LANGUAGE ViewPatterns #-}
module FuzzyMatchScore where
import Control.Parallel.Strategies
import qualified Data.Text as T
-- TODO: add tests
minMatchLength :: T.Text -> T.Text -> Int
minMatchLength (T.uncons -> Nothing) _ = 1
minMatchLength _ (T.uncons -> Nothing) = 0
minMatchLength (T.uncons -> Just (qHead, rest)) choice =
let matchLengths = filter (>0)
. map (\t -> endMatch rest (T.drop 1 t) 1)
. filter ((== qHead) . T.head)
. filter (not . T.null)
$ T.tails choice
in if null matchLengths
then 0
else minimum matchLengths
where
endMatch :: T.Text -> T.Text -> Int -> Int
endMatch (T.uncons -> Nothing) _ lastIndex = lastIndex
endMatch (T.uncons -> Just (q, qs)) s lastIndex =
case T.findIndex (== q) s of
Just i -> endMatch qs (T.drop (i + 1) s) (i + 1 + lastIndex)
Nothing -> 0
normalizeScore :: Int -> T.Text -> T.Text -> Double
normalizeScore matchLength query choice
| matchLength <= 0 = 0
| otherwise =
fromIntegral (T.length query)
/ fromIntegral matchLength -- penalize longer match lengths
/ fromIntegral (T.length choice) -- penalize longer choice strings
score :: T.Text -> T.Text -> Double
score q choice
| T.null q = 1
| T.null choice = 0
| otherwise = let minLength = minMatchLength q (T.toLower choice)
in normalizeScore minLength q choice
scoreAll :: T.Text -> [T.Text] -> [(T.Text, Double)]
scoreAll query choices =
map (\choice -> (choice, score (T.toLower query) choice)) choices
`using` parListChunk 1000 rdeepseq
| ethercrow/yi-config | modules/FuzzyMatchScore.hs | gpl-2.0 | 2,852 | 0 | 19 | 722 | 631 | 333 | 298 | 39 | 4 |
-----------------------------------------------------------------------------
-- |
-- Module : Hie.Language.Haskell.Exts.Annotated.ExactPrint
-- Copyright : (c) Niklas Broberg 2009
-- License : BSD-style (see the file LICENSE.txt)
--
-- Maintainer : Niklas Broberg, d00nibro@chalmers.se
-- Stability : stable
-- Portability : portable
--
-- Exact-printer for Haskell abstract syntax. The input is a (semi-concrete)
-- abstract syntax tree, annotated with exact source information to enable
-- printing the tree exactly as it was parsed.
--
-----------------------------------------------------------------------------
module Hie.Language.Haskell.Exts.Annotated.ExactPrint
( exactPrint
, ExactP
) where
import Hie.Language.Haskell.Exts.Annotated.Syntax
import Hie.Language.Haskell.Exts.SrcLoc
import Hie.Language.Haskell.Exts.Comments
import Control.Monad (when)
import Control.Arrow ((***), (&&&))
import Data.List (intersperse)
-- import Debug.Trace (trace)
------------------------------------------------------
-- The EP monad and basic combinators
type Pos = (Int,Int)
pos :: (SrcInfo loc) => loc -> Pos
pos ss = (startLine ss, startColumn ss)
newtype EP x = EP (Pos -> [Comment] -> (x, Pos, [Comment], ShowS))
instance Monad EP where
return x = EP $ \l cs -> (x, l, cs, id)
EP m >>= k = EP $ \l0 c0 -> let
(a, l1, c1, s1) = m l0 c0
EP f = k a
(b, l2, c2, s2) = f l1 c1
in (b, l2, c2, s1 . s2)
runEP :: EP () -> [Comment] -> String
runEP (EP f) cs = let (_,_,_,s) = f (1,1) cs in s ""
getPos :: EP Pos
getPos = EP (\l cs -> (l,l,cs,id))
setPos :: Pos -> EP ()
setPos l = EP (\_ cs -> ((),l,cs,id))
printString :: String -> EP ()
printString str = EP (\(l,c) cs -> ((), (l,c+length str), cs, showString str))
getComment :: EP (Maybe Comment)
getComment = EP $ \l cs ->
let x = case cs of
c:_ -> Just c
_ -> Nothing
in (x, l, cs, id)
dropComment :: EP ()
dropComment = EP $ \l cs ->
let cs' = case cs of
(_:cs) -> cs
_ -> cs
in ((), l, cs', id)
newLine :: EP ()
newLine = do
(l,_) <- getPos
printString "\n"
setPos (l+1,1)
padUntil :: Pos -> EP ()
padUntil (l,c) = do
(l1,c1) <- getPos
case {- trace (show ((l,c), (l1,c1))) -} () of
_ {-()-} | l1 >= l && c1 <= c -> printString $ replicate (c - c1) ' '
| l1 < l -> newLine >> padUntil (l,c)
| otherwise -> return ()
mPrintComments :: Pos -> EP ()
mPrintComments p = do
mc <- getComment
case mc of
Nothing -> return ()
Just (Comment multi s str) ->
when (pos s < p) $ do
dropComment
padUntil (pos s)
printComment multi str
setPos (srcSpanEndLine s, srcSpanEndColumn s)
mPrintComments p
printComment :: Bool -> String -> EP ()
printComment b str
| b = printString $ "{-" ++ str ++ "-}"
| otherwise = printString $ "--" ++ str
printWhitespace :: Pos -> EP ()
printWhitespace p = mPrintComments p >> padUntil p
printStringAt :: Pos -> String -> EP ()
printStringAt p str = printWhitespace p >> printString str
errorEP :: String -> EP a
errorEP = fail
------------------------------------------------------------------------------
-- Printing of source elements
-- | Print an AST exactly as specified by the annotations on the nodes in the tree.
exactPrint :: (ExactP ast) => ast SrcSpanInfo -> [Comment] -> String
exactPrint ast cs = runEP (exactPC ast) cs
exactPC :: (ExactP ast) => ast SrcSpanInfo -> EP ()
exactPC ast = let p = pos (ann ast) in mPrintComments p >> padUntil p >> exactP ast
printSeq :: [(Pos, EP ())] -> EP ()
printSeq [] = return ()
printSeq ((p,pr):xs) = printWhitespace p >> pr >> printSeq xs
printStrs :: SrcInfo loc => [(loc, String)] -> EP ()
printStrs = printSeq . map (pos *** printString)
printPoints :: SrcSpanInfo -> [String] -> EP ()
printPoints l = printStrs . zip (srcInfoPoints l)
printInterleaved, printInterleaved' :: (Annotated ast, ExactP ast, SrcInfo loc) => [(loc, String)] -> [ast SrcSpanInfo] -> EP ()
printInterleaved sistrs asts = printSeq $
interleave (map (pos *** printString ) sistrs)
(map (pos . ann &&& exactP) asts)
printInterleaved' sistrs (a:asts) = exactPC a >> printInterleaved sistrs asts
printStreams :: [(Pos, EP ())] -> [(Pos, EP ())] -> EP ()
printStreams [] ys = printSeq ys
printStreams xs [] = printSeq xs
printStreams (x@(p1,ep1):xs) (y@(p2,ep2):ys)
| p1 <= p2 = printWhitespace p1 >> ep1 >> printStreams xs (y:ys)
| otherwise = printWhitespace p2 >> ep2 >> printStreams (x:xs) ys
interleave :: [a] -> [a] -> [a]
interleave [] ys = ys
interleave xs [] = xs
interleave (x:xs) (y:ys) = x:y: interleave xs ys
maybeEP :: (a -> EP ()) -> Maybe a -> EP ()
maybeEP = maybe (return ())
bracketList :: (Annotated ast, ExactP ast) => (String, String, String) -> [SrcSpan] -> [ast SrcSpanInfo] -> EP ()
bracketList (a,b,c) poss asts = printInterleaved (pList poss (a,b,c)) asts
pList (p:ps) (a,b,c) = (p,a) : pList' ps (b,c)
pList' [] _ = []
pList' [p] (_,c) = [(p,c)]
pList' (p:ps) (b,c) = (p, b) : pList' ps (b,c)
parenList, squareList, curlyList :: (Annotated ast, ExactP ast) => [SrcSpan] -> [ast SrcSpanInfo] -> EP ()
parenList = bracketList ("(",",",")")
squareList = bracketList ("[",",","]")
curlyList = bracketList ("{",",","}")
layoutList :: (Functor ast, Show (ast ()), Annotated ast, ExactP ast) => [SrcSpan] -> [ast SrcSpanInfo] -> EP ()
layoutList poss asts = printStreams
(map (pos *** printString) $ lList poss)
(map (pos . ann &&& exactP) asts)
lList (p:ps) = (if isNullSpan p then (p,"") else (p,"{")) : lList' ps
lList' [] = []
lList' [p] = [if isNullSpan p then (p,"") else (p,"}")]
lList' (p:ps) = (if isNullSpan p then (p,"") else (p,";")) : lList' ps
--------------------------------------------------
-- Exact printing
class Annotated ast => ExactP ast where
exactP :: ast SrcSpanInfo -> EP ()
instance ExactP Literal where
exactP lit = case lit of
Char _ _ rw -> printString ('\'':rw ++ "\'")
String _ _ rw -> printString ('\"':rw ++ "\"")
Int _ _ rw -> printString (rw)
Frac _ _ rw -> printString (rw)
PrimInt _ _ rw -> printString (rw ++ "#" )
PrimWord _ _ rw -> printString (rw ++ "##")
PrimFloat _ _ rw -> printString (rw ++ "#" )
PrimDouble _ _ rw -> printString (rw ++ "##")
PrimChar _ _ rw -> printString ('\'':rw ++ "\'#" )
PrimString _ _ rw -> printString ('\"':rw ++ "\"#" )
instance ExactP ModuleName where
exactP (ModuleName l str) = printString str
instance ExactP SpecialCon where
exactP sc = case sc of
UnitCon l -> printPoints l ["(",")"]
ListCon l -> printPoints l ["[","]"]
FunCon l -> printPoints l ["(","->",")"]
TupleCon l b n -> printPoints l $
case b of
Unboxed -> "(#": replicate (n-1) "," ++ ["#)"]
_ -> "(" : replicate (n-1) "," ++ [")"]
Cons l -> printString ":"
UnboxedSingleCon l -> printPoints l ["(#","#)"]
isSymbol :: Name l -> Bool
isSymbol (Symbol _ _) = True
isSymbol _ = False
getName :: QName l -> Name l
getName (UnQual _ s) = s
getName (Qual _ _ s) = s
getName (Special l (Cons _)) = Symbol l ":"
getName (Special l (FunCon _)) = Symbol l "->"
getName (Special l s) = Ident l (specialName s)
specialName :: SpecialCon l -> String
specialName (UnitCon _) = "()"
specialName (ListCon _) = "[]"
specialName (FunCon _) = "->"
specialName (TupleCon _ b n) = "(" ++ hash ++ replicate (n-1) ',' ++ hash ++ ")"
where hash = case b of
Unboxed -> "#"
_ -> ""
specialName (Cons _) = ":"
instance ExactP QName where
exactP qn
| isSymbol (getName qn) = do
case srcInfoPoints (ann qn) of
[a,b,c] -> do
printString "("
printWhitespace (pos b)
epQName qn
printStringAt (pos c) ")"
_ -> errorEP "ExactP: QName is given wrong number of srcInfoPoints"
| otherwise = epQName qn
epQName :: QName SrcSpanInfo -> EP ()
epQName qn = case qn of
Qual l mn n -> exactP mn >> printString "." >> epName n
UnQual l n -> epName n
Special l sc -> exactP sc
epInfixQName :: QName SrcSpanInfo -> EP ()
epInfixQName qn
| isSymbol (getName qn) = printWhitespace (pos (ann qn)) >> epQName qn
| otherwise = do
case srcInfoPoints (ann qn) of
[a,b,c] -> do
printStringAt (pos a) "`"
printWhitespace (pos b)
epQName qn
printStringAt (pos c) "`"
_ -> errorEP "ExactP: QName (epInfixName) is given wrong number of srcInfoPoints"
instance ExactP Name where
exactP n = case n of
Ident l str -> printString str
Symbol l str -> do
case srcInfoPoints l of
[a,b,c] -> do
printString "("
printWhitespace (pos b)
printString str
printStringAt (pos c) ")"
_ -> errorEP "ExactP: Name is given wrong number of srcInfoPoints"
epName :: Name SrcSpanInfo -> EP ()
epName (Ident _ str) = printString str
epName (Symbol _ str) = printString str
epInfixName :: Name SrcSpanInfo -> EP ()
epInfixName n
| isSymbol n = printWhitespace (pos (ann n)) >> epName n
| otherwise = do
case srcInfoPoints (ann n) of
[a,b,c] -> do
printStringAt (pos a) "`"
printWhitespace (pos b)
epName n
printStringAt (pos c) "`"
_ -> errorEP "ExactP: Name (epInfixName) is given wrong number of srcInfoPoints"
instance ExactP IPName where
exactP ipn = case ipn of
IPDup l str -> printString $ '?':str
IPLin l str -> printString $ '%':str
instance ExactP QOp where
exactP qop = case qop of
QVarOp l qn -> epInfixQName qn
QConOp l qn -> epInfixQName qn
instance ExactP Op where
exactP op = case op of
VarOp l n -> epInfixName n
ConOp l n -> epInfixName n
instance ExactP CName where
exactP cn = case cn of
VarName l n -> exactP n
ConName l n -> exactP n
instance ExactP ExportSpec where
exactP espec = case espec of
EVar l qn -> exactP qn
EAbs l qn -> exactP qn
EThingAll l qn -> exactP qn >> printPoints l ["(","..",")"]
EThingWith l qn cns ->
let k = length (srcInfoPoints l)
in exactP qn >> printInterleaved (zip (srcInfoPoints l) $ "(":replicate (k-2) "," ++ [")"]) cns
EModuleContents l mn -> printString "module" >> exactPC mn
instance ExactP ExportSpecList where
exactP (ExportSpecList l ess) =
let k = length (srcInfoPoints l)
in printInterleaved (zip (srcInfoPoints l) $ "(": replicate (k-2) "," ++ [")"]) ess
instance ExactP ImportSpecList where
exactP (ImportSpecList l hid ispecs) = do
let pts = srcInfoPoints l
pts <- if hid then do
let (x:pts') = pts
printStringAt (pos x) "hiding"
return pts'
else return pts
let k = length pts
printInterleaved (zip pts $ "(": replicate (k-2) "," ++ [")"]) ispecs
instance ExactP ImportSpec where
exactP ispec = case ispec of
IVar l n -> exactP n
IAbs l n -> exactP n
IThingAll l n -> exactP n >> printPoints l ["(","..",")"]
IThingWith l n cns ->
let k = length (srcInfoPoints l)
in exactP n >> printInterleaved (zip (srcInfoPoints l) $ "(":replicate (k-2) "," ++ [")"]) cns
instance ExactP ImportDecl where
exactP (ImportDecl l mn qf src mpkg mas mispecs) = do
printString "import"
case srcInfoPoints l of
(a:pts) -> do
pts <- if src then
case pts of
x:y:pts' -> do
printStringAt (pos x) "{-# SOURCE"
printStringAt (pos y) "#-}"
return pts'
_ -> errorEP "ExactP: ImportDecl is given too few srcInfoPoints"
else return pts
pts <- if qf then
case pts of
x:pts' -> do
printStringAt (pos x) "qualified"
return pts'
_ -> errorEP "ExactP: ImportDecl is given too few srcInfoPoints"
else return pts
pts <- case mpkg of
Just pkg ->
case pts of
x:pts' -> do
printStringAt (pos x) $ show pkg
return pts'
_ -> errorEP "ExactP: ImportDecl is given too few srcInfoPoints"
_ -> return pts
exactPC mn
pts <- case mas of
Just as ->
case pts of
x:pts' -> do
printStringAt (pos x) "as"
exactPC as
return pts'
_ -> errorEP "ExactP: ImportDecl is given too few srcInfoPoints"
_ -> return pts
case mispecs of
Nothing -> return ()
Just ispecs -> exactPC ispecs
_ -> errorEP "ExactP: ImportDecl is given too few srcInfoPoints"
instance ExactP Module where
exactP mdl = case mdl of
Module l mmh oss ids decls -> do
let (oPts, pts) = splitAt (max (length oss + 1) 2) (srcInfoPoints l)
layoutList oPts oss
maybeEP exactPC mmh
printStreams (map (pos *** printString) $ lList pts)
(map (pos . ann &&& exactPC) ids ++ map (pos . ann &&& exactPC) (sepFunBinds decls))
XmlPage l _mn oss xn attrs mat es -> do
let (oPts, pPts) = splitAt (max (length oss + 1) 2) $ srcInfoPoints l
case pPts of
[a,b,c,d,e] -> do
layoutList oPts oss
printStringAt (pos a) "<"
exactPC xn
mapM_ exactPC attrs
maybeEP exactPC mat
printStringAt (pos b) ">"
mapM_ exactPC es
printStringAt (pos c) "</"
printWhitespace (pos d)
exactP xn
printStringAt (pos e) ">"
_ -> errorEP "ExactP: Module: XmlPage is given wrong number of srcInfoPoints"
XmlHybrid l mmh oss ids decls xn attrs mat es -> do
let (oPts, pts) = splitAt (max (length oss + 1) 2) (srcInfoPoints l)
layoutList oPts oss
maybeEP exactPC mmh
let (dPts, pPts) = splitAt (length pts - 5) pts
case pPts of
[a,b,c,d,e] -> do
printStreams (map (\(p,s) -> (pos p, printString s)) $ lList dPts)
(map (\i -> (pos $ ann i, exactPC i)) ids ++ map (\d -> (pos $ ann d, exactPC d)) (sepFunBinds decls))
printStringAt (pos a) "<"
exactPC xn
mapM_ exactPC attrs
maybeEP exactPC mat
printStringAt (pos b) ">"
mapM_ exactPC es
printStringAt (pos c) "</"
printWhitespace (pos d)
exactP xn
printStringAt (pos e) ">"
_ -> errorEP "ExactP: Module: XmlHybrid is given wrong number of srcInfoPoints"
instance ExactP ModuleHead where
exactP (ModuleHead l mn mwt mess) = do
case srcInfoPoints l of
[a,b] -> do
printStringAt (pos a) "module"
exactPC mn
maybeEP exactPC mwt
maybeEP exactPC mess
printStringAt (pos b) "where"
_ -> errorEP "ExactP: ModuleHead is given wrong number of srcInfoPoints"
instance ExactP ModulePragma where
exactP op = case op of
LanguagePragma l ns ->
let pts = srcInfoPoints l
k = length ns - 1 -- number of commas
m = length pts - k - 2 -- number of virtual semis, likely 0
in printInterleaved (zip pts ("{-# LANGUAGE":replicate k "," ++ replicate m "" ++ ["#-}"])) ns
OptionsPragma l mt str ->
let k = length (srcInfoPoints l)
opstr = "{-# OPTIONS" ++ case mt of { Just t -> "_" ++ show t ; _ -> "" } ++ " " ++ str
in printPoints l $ opstr : replicate (k-2) "" ++ ["#-}"]
AnnModulePragma l ann ->
case srcInfoPoints l of
[a,b] -> do
printString $ "{-# ANN"
exactPC ann
printStringAt (pos b) "#-}"
_ -> errorEP "ExactP: ModulePragma: AnnPragma is given wrong number of srcInfoPoints"
instance ExactP WarningText where
exactP (DeprText l str) = printPoints l ["{-# DEPRECATED", str, "#-}"]
exactP (WarnText l str) = printPoints l ["{-# WARNING", str, "#-}"]
instance ExactP Assoc where
exactP a = case a of
AssocNone l -> printString "infix"
AssocLeft l -> printString "infixl"
AssocRight l -> printString "infixr"
instance ExactP DataOrNew where
exactP (DataType l) = printString "data"
exactP (NewType l) = printString "newtype"
instance ExactP Decl where
exactP decl = case decl of
TypeDecl l dh t ->
case srcInfoPoints l of
[a,b] -> do
printStringAt (pos a) "type"
exactPC dh
printStringAt (pos b) "="
exactPC t
_ -> errorEP "ExactP: Decl: TypeDecl is given wrong number of srcInfoPoints"
TypeFamDecl l dh mk ->
case srcInfoPoints l of
a:b:ps -> do
printStringAt (pos a) "type"
printStringAt (pos b) "family"
exactPC dh
maybeEP (\k -> printStringAt (pos (head ps)) "::" >> exactPC k) mk
_ -> errorEP "ExactP: Decl: TypeFamDecl is given wrong number of srcInfoPoints"
DataDecl l dn mctxt dh constrs mder -> do
exactP dn
maybeEP exactPC mctxt
exactPC dh
-- the next line works for empty data types since the srcInfoPoints will be empty then
printInterleaved (zip (srcInfoPoints l) ("=": repeat "|")) constrs
maybeEP exactPC mder
GDataDecl l dn mctxt dh mk gds mder -> do
let pts = srcInfoPoints l
exactP dn
maybeEP exactPC mctxt
exactPC dh
pts <- case mk of
Nothing -> return pts
Just kd -> case pts of
p:pts' -> do
printStringAt (pos p) "::"
exactPC kd
return pts'
_ -> errorEP "ExactP: Decl: GDataDecl is given too few srcInfoPoints"
case pts of
x:pts -> do
printStringAt (pos x) "where"
layoutList pts gds
maybeEP exactPC mder
_ -> errorEP "ExactP: Decl: GDataDecl is given too few srcInfoPoints"
DataFamDecl l mctxt dh mk -> do
printString "data"
maybeEP exactPC mctxt
exactPC dh
maybeEP (\kd -> printStringAt (pos (head (srcInfoPoints l))) "::" >> exactPC kd) mk
TypeInsDecl l t1 t2 ->
case srcInfoPoints l of
[a,b,c] -> do
printString "type"
printStringAt (pos b) "instance"
exactPC t1
printStringAt (pos c) "="
exactPC t2
_ -> errorEP "ExactP: Decl: TypeInsDecl is given wrong number of srcInfoPoints"
DataInsDecl l dn t constrs mder ->
case srcInfoPoints l of
p:pts -> do
exactP dn
printStringAt (pos p) "instance"
exactPC t
printInterleaved (zip pts ("=": repeat "|")) constrs
maybeEP exactPC mder
_ -> errorEP "ExactP: Decl: DataInsDecl is given too few srcInfoPoints"
GDataInsDecl l dn t mk gds mder ->
case srcInfoPoints l of
p:pts -> do
exactP dn
printStringAt (pos p) "instance"
exactPC t
pts <- case mk of
Nothing -> return pts
Just kd -> case pts of
p:pts' -> do
printStringAt (pos p) "::"
exactPC kd
return pts'
_ -> errorEP "ExactP: Decl: GDataInsDecl is given too few srcInfoPoints"
case pts of
x:pts -> do
printStringAt (pos x) "where"
layoutList pts gds
maybeEP exactPC mder
_ -> errorEP "ExactP: Decl: GDataInsDecl is given too few srcInfoPoints"
_ -> errorEP "ExactP: Decl: GDataInsDecl is given too few srcInfoPoints"
ClassDecl l mctxt dh fds mcds ->
case srcInfoPoints l of
a:pts -> do
printString "class"
maybeEP exactPC mctxt
exactPC dh
pts <- case fds of
[] -> return pts
_ -> do
let (pts1, pts2) = splitAt (length fds) pts
printInterleaved (zip pts1 ("|":repeat ",")) fds
return pts2
maybeEP (\cds ->
case pts of
p:pts' -> do
printStringAt (pos p) "where"
layoutList pts' $ sepClassFunBinds cds
_ -> errorEP "ExactP: Decl: ClassDecl is given too few srcInfoPoints"
) mcds
_ -> errorEP "ExactP: Decl: ClassDecl is given too few srcInfoPoints"
InstDecl l mctxt ih mids ->
case srcInfoPoints l of
a:pts -> do
printString "instance"
maybeEP exactPC mctxt
exactPC ih
maybeEP (\ids -> do
let (p:pts') = pts
printStringAt (pos p) "where"
layoutList pts' $ sepInstFunBinds ids
) mids
_ -> errorEP "ExactP: Decl: InstDecl is given too few srcInfoPoints"
DerivDecl l mctxt ih ->
case srcInfoPoints l of
[a,b] -> do
printString "deriving"
printStringAt (pos b) "instance"
maybeEP exactPC mctxt
exactPC ih
_ -> errorEP "ExactP: Decl: DerivDecl is given wrong number of srcInfoPoints"
InfixDecl l assoc mprec ops -> do
let pts = srcInfoPoints l
exactP assoc
pts <- case mprec of
Nothing -> return pts
Just prec ->
case pts of
p:pts' -> do
printStringAt (pos p) (show prec)
return pts'
_ -> errorEP "ExactP: Decl: InfixDecl is given too few srcInfoPoints"
printInterleaved' (zip pts (repeat ",")) ops
DefaultDecl l ts ->
case srcInfoPoints l of
a:pts -> do
printString "default"
printInterleaved (zip (init pts) ("(":repeat ",")) ts
printStringAt (pos (last pts)) ")"
_ -> errorEP "ExactP: Decl: DefaultDecl is given too few srcInfoPoints"
SpliceDecl l spl -> exactP spl
TypeSig l ns t -> do
let pts = srcInfoPoints l
printInterleaved' (zip pts (replicate (length pts - 1) "," ++ ["::"])) ns
exactPC t
FunBind l ms -> mapM_ exactPC ms
PatBind l p mt rhs mbs -> do
let pts = srcInfoPoints l
exactP p
pts <- case mt of
Nothing -> return pts
Just t -> case pts of
x:pts'-> do
printStringAt (pos x) "::"
exactPC t
return pts'
_ -> errorEP "ExactP: Decl: PatBind is given too few srcInfoPoints"
exactPC rhs
maybeEP (\bs -> printStringAt (pos (head pts)) "where" >> exactPC bs) mbs
ForImp l cc msf mstr n t ->
case srcInfoPoints l of
a:b:pts -> do
printString "foreign"
printStringAt (pos b) "import"
exactPC cc
maybeEP exactPC msf
pts <- case mstr of
Nothing -> return pts
Just str -> case pts of
x:pts' -> do
printStringAt (pos x) (show str)
return pts'
_ -> errorEP "ExactP: Decl: ForImp is given too few srcInfoPoints"
case pts of
y:_ -> do
exactPC n
printStringAt (pos y) "::"
exactPC t
_ -> errorEP "ExactP: Decl: ForImp is given too few srcInfoPoints"
_ -> errorEP "ExactP: Decl: ForImp is given too few srcInfoPoints"
ForExp l cc mstr n t ->
case srcInfoPoints l of
a:b:pts -> do
printString "foreign"
printStringAt (pos b) "export"
exactPC cc
pts <- case mstr of
Nothing -> return pts
Just str -> case pts of
x:pts' -> do
printStringAt (pos x) (show str)
return pts'
_ -> errorEP "ExactP: Decl: ForExp is given too few srcInfoPoints"
case pts of
y:_ -> do
exactPC n
printStringAt (pos y) "::"
exactPC t
_ -> errorEP "ExactP: Decl: ForExp is given too few srcInfoPoints"
_ -> errorEP "ExactP: Decl: ForExp is given too few srcInfoPoints"
RulePragmaDecl l rs ->
case srcInfoPoints l of
[a,b] -> do
printString "{-# RULES"
mapM_ exactPC rs
printStringAt (pos b) "#-}"
_ -> errorEP "ExactP: Decl: RulePragmaDecl is given too few srcInfoPoints"
DeprPragmaDecl l nstrs ->
case srcInfoPoints l of
a:pts -> do
printString "{-# DEPRECATED"
printWarndeprs (map pos (init pts)) nstrs
printStringAt (pos (last pts)) "#-}"
_ -> errorEP "ExactP: Decl: DeprPragmaDecl is given too few srcInfoPoints"
WarnPragmaDecl l nstrs ->
case srcInfoPoints l of
a:pts -> do
printString "{-# WARNING"
printWarndeprs (map pos (init pts)) nstrs
printStringAt (pos (last pts)) "#-}"
_ -> errorEP "ExactP: Decl: WarnPragmaDecl is given too few srcInfoPoints"
InlineSig l inl mact qn ->
case srcInfoPoints l of
[a,b] -> do
printString $ if inl then "{-# INLINE" else "{-# NOINLINE"
maybeEP exactPC mact
exactPC qn
printStringAt (pos b) "#-}"
_ -> errorEP "ExactP: Decl: InlineSig is given wrong number of srcInfoPoints"
InlineConlikeSig l mact qn ->
case srcInfoPoints l of
[a,b] -> do
printString "{-# INLINE_CONLIKE"
maybeEP exactPC mact
exactPC qn
printStringAt (pos b) "#-}"
_ -> errorEP "ExactP: Decl: InlineConlikeSig is given wrong number of srcInfoPoints"
SpecSig l qn ts ->
case srcInfoPoints l of
a:pts -> do
printString "{-# SPECIALISE"
exactPC qn
printInterleaved (zip pts ("::" : repeat "," ++ ["#-}"])) ts
_ -> errorEP "ExactP: Decl: SpecSig is given too few srcInfoPoints"
SpecInlineSig l b mact qn ts ->
case srcInfoPoints l of
a:pts -> do
printString $ "{-# SPECIALISE " ++ if b then "INLINE" else "NOINLINE"
maybeEP exactPC mact
exactPC qn
printInterleaved (zip pts ("::" : repeat "," ++ ["#-}"])) ts
_ -> errorEP "ExactP: Decl: SpecInlineSig is given too few srcInfoPoints"
InstSig l mctxt ih ->
case srcInfoPoints l of
[a,b,c] -> do
printString $ "{-# SPECIALISE"
printStringAt (pos b) "instance"
maybeEP exactPC mctxt
exactPC ih
printStringAt (pos c) "#-}"
_ -> errorEP "ExactP: Decl: InstSig is given wrong number of srcInfoPoints"
AnnPragma l ann ->
case srcInfoPoints l of
[a,b] -> do
printString $ "{-# ANN"
exactPC ann
printStringAt (pos b) "#-}"
_ -> errorEP "ExactP: Decl: AnnPragma is given wrong number of srcInfoPoints"
instance ExactP Annotation where
exactP ann = case ann of
Ann l n e -> do
exactP n
exactPC e
TypeAnn l n e -> do
printString "type"
exactPC n
exactPC e
ModuleAnn l e -> do
printString "module"
exactPC e
printWarndeprs :: [Pos] -> [([Name SrcSpanInfo], String)] -> EP ()
printWarndeprs _ [] = return ()
printWarndeprs ps ((ns,str):nsts) = printWd ps ns str nsts
where printWd :: [Pos] -> [Name SrcSpanInfo] -> String -> [([Name SrcSpanInfo], String)] -> EP ()
printWd (p:ps) [] str nsts = printStringAt p (show str) >> printWarndeprs ps nsts
printWd ps [n] str nsts = exactPC n >> printWd ps [] str nsts
printWd (p:ps) (n:ns) str nsts = exactPC n >> printStringAt p "," >> printWd ps ns str nsts
sepFunBinds :: [Decl SrcSpanInfo] -> [Decl SrcSpanInfo]
sepFunBinds [] = []
sepFunBinds (FunBind _ ms:ds) = map (\m -> FunBind (ann m) [m]) ms ++ sepFunBinds ds
sepFunBinds (d:ds) = d : sepFunBinds ds
sepClassFunBinds :: [ClassDecl SrcSpanInfo] -> [ClassDecl SrcSpanInfo]
sepClassFunBinds [] = []
sepClassFunBinds (ClsDecl _ (FunBind _ ms):ds) = map (\m -> ClsDecl (ann m) $ FunBind (ann m) [m]) ms ++ sepClassFunBinds ds
sepClassFunBinds (d:ds) = d : sepClassFunBinds ds
sepInstFunBinds :: [InstDecl SrcSpanInfo] -> [InstDecl SrcSpanInfo]
sepInstFunBinds [] = []
sepInstFunBinds (InsDecl _ (FunBind _ ms):ds) = map (\m -> InsDecl (ann m) $ FunBind (ann m) [m]) ms ++ sepInstFunBinds ds
sepInstFunBinds (d:ds) = d : sepInstFunBinds ds
instance ExactP DeclHead where
exactP dh = case dh of
DHead l n tvs -> exactP n >> mapM_ exactPC tvs
DHInfix l tva n tvb -> exactP tva >> epInfixName n >> exactPC tvb
DHParen l dh ->
case srcInfoPoints l of
[_,b] -> printString "(" >> exactPC dh >> printStringAt (pos b) ")"
_ -> errorEP "ExactP: DeclHead: DeclParen is given wrong number of srcInfoPoints"
instance ExactP InstHead where
exactP ih = case ih of
IHead l qn ts -> exactP qn >> mapM_ exactPC ts
IHInfix l ta qn tb -> exactP ta >> epInfixQName qn >> exactPC tb
IHParen l ih ->
case srcInfoPoints l of
[_,b] -> printString "(" >> exactPC ih >> printStringAt (pos b) ")"
_ -> errorEP "ExactP: InstHead: IHParen is given wrong number of srcInfoPoints"
instance ExactP TyVarBind where
exactP (KindedVar l n k) =
case srcInfoPoints l of
[a,b,c] -> do
printString "("
exactPC n
printStringAt (pos b) "::"
exactPC k
printStringAt (pos c) ")"
_ -> errorEP "ExactP: TyVarBind: KindedVar is given wrong number of srcInfoPoints"
exactP (UnkindedVar l n) = exactP n
instance ExactP Kind where
exactP kd = case kd of
KindStar l -> printString "*"
KindBang l -> printString "!"
KindFn l k1 k2 ->
case srcInfoPoints l of
[a] -> do
exactP k1
printStringAt (pos a) "->"
exactPC k2
_ -> errorEP "ExactP: Kind: KindFn is given wrong number of srcInfoPoints"
KindParen l kd -> do
case srcInfoPoints l of
[a,b] -> do
printString "("
exactPC kd
printStringAt (pos b) ")"
_ -> errorEP "ExactP: Kind: KindParen is given wrong number of srcInfoPoints"
KindVar l n -> exactP n
instance ExactP Type where
exactP t = case t of
TyForall l mtvs mctxt t -> do
let pts = srcInfoPoints l
pts <- case mtvs of
Nothing -> return pts
Just tvs ->
case pts of
a:b:pts' -> do
printString "forall"
mapM_ exactPC tvs
printStringAt (pos b) "."
return pts'
_ -> errorEP "ExactP: Type: TyForall is given too few srcInfoPoints"
maybeEP exactPC mctxt
exactPC t
TyFun l t1 t2 -> do
case srcInfoPoints l of
[a] -> do
exactP t1
printStringAt (pos a) "->"
exactPC t2
_ -> errorEP "ExactP: Type: TyFun is given wrong number of srcInfoPoints"
TyTuple l bx ts -> do
let pts = srcInfoPoints l
(o,e) = case bx of
Boxed -> ("(" , ")")
Unboxed -> ("(#","#)")
printInterleaved (zip pts (o: replicate (length pts - 2) "," ++ [e])) ts
TyList l t -> do
case srcInfoPoints l of
[a,b] -> do
printString "["
exactPC t
printStringAt (pos b) "]"
_ -> errorEP "ExactP: Type: TyList is given wrong number of srcInfoPoints"
TyApp l t1 t2 -> exactP t1 >> exactPC t2
TyVar l n -> exactP n
TyCon l qn -> exactP qn
TyParen l t -> do
case srcInfoPoints l of
[a,b] -> do
printString "("
exactPC t
printStringAt (pos b) ")"
_ -> errorEP "ExactP: Type: TyParen is given wrong number of srcInfoPoints"
TyInfix l t1 qn t2 -> exactP t1 >> epInfixQName qn >> exactPC t2
TyKind l t kd -> do
case srcInfoPoints l of
[a,b,c] -> do
printString "("
exactPC t
printStringAt (pos b) "::"
exactPC kd
printStringAt (pos c) ")"
_ -> errorEP "ExactP: Type: TyKind is given wrong number of srcInfoPoints"
instance ExactP Context where
exactP ctxt = do
printContext ctxt
printStringAt (pos . last . srcInfoPoints . ann $ ctxt) "=>"
printContext ctxt = do
let l = ann ctxt
pts = init $ srcInfoPoints l
case ctxt of
CxParen l ctxt ->
case pts of
[a,b] -> do
printStringAt (pos a) "("
printContext ctxt
printStringAt (pos b) ")"
_ -> errorEP "ExactP: Context: CxParen is given wrong number of srcInfoPoints"
CxSingle l asst -> exactP asst
CxEmpty l ->
case pts of
[a,b] -> do
printStringAt (pos a) "("
printStringAt (pos b) ")"
_ -> errorEP "ExactP: Context: CxEmpty is given wrong number of srcInfoPoints"
CxTuple l assts -> parenList pts assts
instance ExactP Asst where
exactP asst = case asst of
ClassA l qn ts -> exactP qn >> mapM_ exactPC ts
InfixA l ta qn tb -> exactP ta >> epInfixQName qn >> exactPC tb
IParam l ipn t ->
case srcInfoPoints l of
[a] -> do
exactP ipn
printStringAt (pos a) "::"
exactPC t
_ -> errorEP "ExactP: Asst: IParam is given wrong number of srcInfoPoints"
EqualP l t1 t2 ->
case srcInfoPoints l of
[a] -> do
exactP t1
printStringAt (pos a) "~"
exactPC t2
instance ExactP Deriving where
exactP (Deriving l ihs) =
case srcInfoPoints l of
x:pts -> do
printString "deriving"
case pts of
[] -> exactPC $ head ihs
_ -> parenList pts ihs
_ -> errorEP "ExactP: Deriving is given too few srcInfoPoints"
instance ExactP ClassDecl where
exactP cdecl = case cdecl of
ClsDecl l d -> exactP d
ClsDataFam l mctxt dh mk ->
case srcInfoPoints l of
x:pts -> do
printString "data"
maybeEP exactPC mctxt
exactPC dh
maybeEP (\kd -> printStringAt (pos (head pts)) "::" >> exactPC kd) mk
_ -> errorEP "ExactP: ClassDecl: ClsDataFam is given too few srcInfoPoints"
ClsTyFam l dh mk ->
case srcInfoPoints l of
x:pts -> do
printString "type"
exactPC dh
maybeEP (\kd -> printStringAt (pos (head pts)) "::" >> exactPC kd) mk
_ -> errorEP "ExactP: ClassDecl: ClsTyFam is given too few srcInfoPoints"
ClsTyDef l t1 t2 ->
case srcInfoPoints l of
a:b:pts -> do
printString "type"
exactPC t1
printStringAt (pos b) "="
exactPC t2
_ -> errorEP "ExactP: ClassDecl: ClsTyDef is given too few srcInfoPoints"
instance ExactP InstDecl where
exactP idecl = case idecl of
InsDecl l d -> exactP d
InsType l t1 t2 ->
case srcInfoPoints l of
[a,b] -> do
printString "type"
exactPC t1
printStringAt (pos b) "="
exactPC t2
InsData l dn t constrs mder -> do
exactP dn
exactPC t
printInterleaved (zip (srcInfoPoints l) ("=": repeat "|")) constrs
maybeEP exactPC mder
InsGData l dn t mk gds mder -> do
let pts = srcInfoPoints l
exactP dn
exactPC t
pts <- case mk of
Nothing -> return pts
Just kd -> case pts of
p:pts' -> do
printStringAt (pos p) "::"
exactPC kd
return pts'
_ -> errorEP "ExactP: InstDecl: InsGData is given too few srcInfoPoints"
case pts of
x:_ -> do
printStringAt (pos x) "where"
mapM_ exactPC gds
maybeEP exactPC mder
_ -> errorEP "ExactP: InstDecl: InsGData is given too few srcInfoPoints"
-- InsInline l inl mact qn -> do
-- case srcInfoPoints l of
-- [a,b] -> do
-- printString $ if inl then "{-# INLINE" else "{-# NOINLINE"
-- maybeEP exactPC mact
-- exactPC qn
-- printStringAt (pos b) "#-}"
-- _ -> errorEP "ExactP: InstDecl: InsInline is given wrong number of srcInfoPoints"
instance ExactP FunDep where
exactP (FunDep l nxs nys) =
case srcInfoPoints l of
[a] -> do
mapM_ exactPC nxs
printStringAt (pos a) "->"
mapM_ exactPC nys
_ -> errorEP "ExactP: FunDep is given wrong number of srcInfoPoints"
instance ExactP QualConDecl where
exactP (QualConDecl l mtvs mctxt cd) = do
let pts = srcInfoPoints l
pts <- case mtvs of
Nothing -> return pts
Just tvs ->
case pts of
a:b:pts' -> do
printString "forall"
mapM_ exactPC tvs
printStringAt (pos b) "."
return pts'
_ -> errorEP "ExactP: QualConDecl is given wrong number of srcInfoPoints"
maybeEP exactPC mctxt
exactPC cd
instance ExactP ConDecl where
exactP cd = case cd of
ConDecl l n bts -> exactP n >> mapM_ exactPC bts
InfixConDecl l bta n btb -> exactP bta >> epInfixName n >> exactP btb
RecDecl l n fds -> exactP n >> curlyList (srcInfoPoints l) fds
instance ExactP GadtDecl where
exactP (GadtDecl l n t) =
case srcInfoPoints l of
[a] -> do
exactP n
printStringAt (pos a) "::"
exactPC t
_ -> errorEP "ExactP: GadtDecl is given wrong number of srcInfoPoints"
instance ExactP BangType where
exactP bt = case bt of
UnBangedTy l t -> exactP t
BangedTy l t -> printString "!" >> exactPC t
UnpackedTy l t ->
case srcInfoPoints l of
[a,b,c] -> do
printString "{-# UNPACK"
printStringAt (pos b) "#-}"
printStringAt (pos c) "!"
exactPC t
_ -> errorEP "ExactP: BangType: UnpackedTy is given wrong number of srcInfoPoints"
instance ExactP Splice where
exactP (IdSplice l str) = printString $ '$':str
exactP (ParenSplice l e) =
case srcInfoPoints l of
[a,b] -> do
printString "$("
exactPC e
printStringAt (pos b) ")"
_ -> errorEP "ExactP: Splice: ParenSplice is given wrong number of srcInfoPoints"
instance ExactP Exp where
exactP exp = case exp of
Var l qn -> exactP qn
IPVar l ipn -> exactP ipn
Con l qn -> exactP qn
Lit l lit -> exactP lit
InfixApp l e1 op e2 -> exactP e1 >> exactPC op >> exactPC e2
App l e1 e2 -> exactP e1 >> exactPC e2
NegApp l e -> printString "-" >> exactPC e
Lambda l ps e ->
case srcInfoPoints l of
[a,b] -> do
printString "\\"
mapM_ exactPC ps
printStringAt (pos b) "->"
exactPC e
_ -> errorEP "ExactP: Exp: Lambda is given wrong number of srcInfoPoints"
Let l bs e ->
case srcInfoPoints l of
[a,b] -> do
printString "let"
exactPC bs
printStringAt (pos b) "in"
exactPC e
_ -> errorEP "ExactP: Exp: Let is given wrong number of srcInfoPoints"
If l ec et ee ->
case srcInfoPoints l of
[a,b,c] -> do
printString "if"
exactPC ec
printStringAt (pos b) "then"
exactPC et
printStringAt (pos c) "else"
exactPC ee
_ -> errorEP "ExactP: Exp: If is given wrong number of srcInfoPoints"
Case l e alts ->
case srcInfoPoints l of
a:b:pts -> do
printString "case"
exactPC e
printStringAt (pos b) "of"
layoutList pts alts
_ -> errorEP "ExactP: Exp: Case is given too few srcInfoPoints"
Do l stmts ->
case srcInfoPoints l of
a:pts -> do
printString "do"
layoutList pts stmts
_ -> errorEP "ExactP: Exp: Do is given too few srcInfoPoints"
MDo l stmts ->
case srcInfoPoints l of
a:pts -> do
printString "mdo"
layoutList pts stmts
_ -> errorEP "ExactP: Exp: Mdo is given wrong number of srcInfoPoints"
Tuple l b es -> parenList (srcInfoPoints l) es
TupleSection l b mexps -> do
let pts = srcInfoPoints l
printSeq $ interleave (zip (map pos $ init pts) (map printString ("(": repeat ",")) ++ [(pos $ last pts, printString ")")])
(map (maybe (0,0) (pos . ann) &&& maybeEP exactPC) mexps)
List l es -> squareList (srcInfoPoints l) es
Paren l p -> parenList (srcInfoPoints l) [p]
LeftSection l e qop ->
case srcInfoPoints l of
[a,b] -> do
printString "("
exactPC e
exactPC qop
printStringAt (pos b) ")"
_ -> errorEP "ExactP: Exp: LeftSection is given wrong number of srcInfoPoints"
RightSection l qop e ->
case srcInfoPoints l of
[a,b] -> do
printString "("
exactPC qop
exactPC e
printStringAt (pos b) ")"
_ -> errorEP "ExactP: Exp: RightSection is given wrong number of srcInfoPoints"
RecConstr l qn fups -> do
let pts = srcInfoPoints l
exactP qn
curlyList pts fups
RecUpdate l e fups -> do
let pts = srcInfoPoints l
exactP e
curlyList pts fups
EnumFrom l e ->
case srcInfoPoints l of
[a,b,c] -> do
printString "["
exactPC e
printStringAt (pos b) ".."
printStringAt (pos c) "]"
_ -> errorEP "ExactP: Exp: EnumFrom is given wrong number of srcInfoPoints"
EnumFromTo l e1 e2 ->
case srcInfoPoints l of
[a,b,c] -> do
printString "["
exactPC e1
printStringAt (pos b) ".."
exactPC e2
printStringAt (pos c) "]"
_ -> errorEP "ExactP: Exp: EnumFromTo is given wrong number of srcInfoPoints"
EnumFromThen l e1 e2 ->
case srcInfoPoints l of
[a,b,c,d] -> do
printString "["
exactPC e1
printStringAt (pos b) ","
exactPC e2
printStringAt (pos c) ".."
printStringAt (pos d) "]"
_ -> errorEP "ExactP: Exp: EnumFromThen is given wrong number of srcInfoPoints"
EnumFromThenTo l e1 e2 e3 ->
case srcInfoPoints l of
[a,b,c,d] -> do
printString "["
exactPC e1
printStringAt (pos b) ","
exactPC e2
printStringAt (pos c) ".."
exactPC e3
printStringAt (pos d) "]"
_ -> errorEP "ExactP: Exp: EnumFromToThen is given wrong number of srcInfoPoints"
ListComp l e qss ->
case srcInfoPoints l of
a:pts -> do
printString "["
exactPC e
bracketList ("|",",","]") pts qss
_ -> errorEP "ExactP: Exp: ListComp is given too few srcInfoPoints"
ParComp l e qsss ->
case srcInfoPoints l of
a:pts -> do
let (strs, qss) = unzip $ pairUp qsss
printString "["
exactPC e
printInterleaved (zip pts (strs ++ ["]"])) qss
_ -> errorEP "ExactP: Exp: ParComp is given wrong number of srcInfoPoints"
where pairUp [] = []
pairUp ((a:as):xs) = ("|", a) : zip (repeat ",") as ++ pairUp xs
ExpTypeSig l e t ->
case srcInfoPoints l of
[a] -> do
exactP e
printStringAt (pos a) "::"
exactPC t
_ -> errorEP "ExactP: Exp: ExpTypeSig is given wrong number of srcInfoPoints"
VarQuote l qn -> do
printString "'"
exactPC qn
TypQuote l qn -> do
printString "''"
exactPC qn
BracketExp l br -> exactP br
SpliceExp l sp -> exactP sp
QuasiQuote l name qt -> do
let qtLines = lines qt
printString $ "[$" ++ name ++ "|"
sequence_ (intersperse newLine $ map printString qtLines)
printString "|]"
XTag l xn attrs mat es ->
case srcInfoPoints l of
[a,b,c,d,e] -> do
printString "<"
exactPC xn
mapM_ exactPC attrs
maybeEP exactPC mat
printStringAt (pos b) ">"
mapM_ exactPC es
printStringAt (pos c) "</"
printWhitespace (pos d)
exactP xn
printStringAt (pos e) ">"
_ -> errorEP "ExactP: Exp: XTag is given wrong number of srcInfoPoints"
XETag l xn attrs mat ->
case srcInfoPoints l of
[a,b] -> do
printString "<"
exactPC xn
mapM_ exactPC attrs
maybeEP exactPC mat
printStringAt (pos b) "/>"
_ -> errorEP "ExactP: Exp: XETag is given wrong number of srcInfoPoints"
XPcdata l str -> do
let strLines = lines str
sequence_ (intersperse newLine $ map printString strLines)
XExpTag l e ->
case srcInfoPoints l of
[a,b] -> do
printString "<%"
exactPC e
printStringAt (pos b) "%>"
_ -> errorEP "ExactP: Exp: XExpTag is given wrong number of srcInfoPoints"
XChildTag l es ->
case srcInfoPoints l of
[a,b,c] -> do
printString "<%>"
mapM_ exactPC es
printStringAt (pos b) "</"
printStringAt (pos c) "%>"
CorePragma l str e ->
case srcInfoPoints l of
[a,b] -> do
printString $ "{-# CORE " ++ show str
printStringAt (pos b) "#-}"
exactPC e
_ -> errorEP "ExactP: Exp: CorePragma is given wrong number of srcInfoPoints"
SCCPragma l str e ->
case srcInfoPoints l of
[a,b] -> do
printString $ "{-# SCC " ++ show str
printStringAt (pos b) "#-}"
exactPC e
_ -> errorEP "ExactP: Exp: SCCPragma is given wrong number of srcInfoPoints"
GenPragma l str (i1,i2) (i3,i4) e -> do
printStrs $ zip (srcInfoPoints l) ["{-# GENERATED", show str, show i1, ":", show i2, "-", show i3, ":", show i4, "#-}"]
exactPC e
Proc l p e ->
case srcInfoPoints l of
[a,b] -> do
printString "proc"
exactPC p
printStringAt (pos b) "->"
exactPC e
_ -> errorEP "ExactP: Exp: Proc is given wrong number of srcInfoPoints"
LeftArrApp l e1 e2 ->
case srcInfoPoints l of
[a] -> do
exactP e1
printStringAt (pos a) "-<"
exactPC e2
_ -> errorEP "ExactP: Exp: LeftArrApp is given wrong number of srcInfoPoints"
RightArrApp l e1 e2 -> do
case srcInfoPoints l of
[a] -> do
exactP e1
printStringAt (pos a) ">-"
exactPC e2
_ -> errorEP "ExactP: Exp: RightArrApp is given wrong number of srcInfoPoints"
LeftArrHighApp l e1 e2 -> do
case srcInfoPoints l of
[a] -> do
exactP e1
printStringAt (pos a) "-<<"
exactPC e2
_ -> errorEP "ExactP: Exp: LeftArrHighApp is given wrong number of srcInfoPoints"
RightArrHighApp l e1 e2 -> do
case srcInfoPoints l of
[a] -> do
exactP e1
printStringAt (pos a) ">>-"
exactPC e2
_ -> errorEP "ExactP: Exp: RightArrHighApp is given wrong number of srcInfoPoints"
instance ExactP FieldUpdate where
exactP fup = case fup of
FieldUpdate l qn e -> do
case srcInfoPoints l of
[a] -> do
exactP qn
printStringAt (pos a) "="
exactPC e
_ -> errorEP "ExactP: FieldUpdate is given wrong number of srcInfoPoints"
FieldPun l n -> exactP n
FieldWildcard l -> printString ".."
instance ExactP Stmt where
exactP stmt = case stmt of
Generator l p e ->
case srcInfoPoints l of
[a] -> do
exactP p
printStringAt (pos a) "<-"
exactPC e
_ -> errorEP "ExactP: Stmt: Generator is given wrong number of srcInfoPoints"
Qualifier l e -> exactP e
LetStmt l bds -> do
printString "let"
exactPC bds
RecStmt l ss ->
case srcInfoPoints l of
a:pts -> do
printString "rec"
layoutList pts ss
_ -> errorEP "ExactP: Stmt: RecStmt is given too few srcInfoPoints"
instance ExactP QualStmt where
exactP qstmt = case qstmt of
QualStmt l stmt -> exactP stmt
ThenTrans l e -> printString "then" >> exactPC e
ThenBy l e1 e2 -> do
case srcInfoPoints l of
[a,b] -> do
printString "then"
exactPC e1
printStringAt (pos b) "by"
exactPC e2
_ -> errorEP "ExactP: QualStmt: ThenBy is given wrong number of srcInfoPoints"
GroupBy l e -> do
printStrs $ zip (srcInfoPoints l) ["then","group","by"]
exactPC e
GroupUsing l e -> do
printStrs $ zip (srcInfoPoints l) ["then","group","using"]
exactPC e
GroupByUsing l e1 e2 -> do
let pts = srcInfoPoints l
printStrs $ zip (init pts) ["then","group","by"]
exactPC e1
printStringAt (pos (last pts)) "using"
exactPC e2
instance ExactP Bracket where
exactP br = case br of
ExpBracket l e ->
case srcInfoPoints l of
[a,b] -> do
printString "[|"
exactPC e
printStringAt (pos b) "|]"
_ -> errorEP "ExactP: Bracket: ExpBracket is given wrong number of srcInfoPoints"
PatBracket l p ->
case srcInfoPoints l of
[a,b] -> do
printString "[p|"
exactPC p
printStringAt (pos b) "|]"
_ -> errorEP "ExactP: Bracket: PatBracket is given wrong number of srcInfoPoints"
TypeBracket l t -> do
case srcInfoPoints l of
[a,b] -> do
printString "[t|"
exactPC t
printStringAt (pos b) "|]"
_ -> errorEP "ExactP: Bracket: TypeBracket is given wrong number of srcInfoPoints"
DeclBracket l ds ->
case srcInfoPoints l of
a:pts -> do
printString "[d|"
layoutList (init pts) (sepFunBinds ds)
printStringAt (pos (last pts)) "|]"
_ -> errorEP "ExactP: Bracket: DeclBracket is given too few srcInfoPoints"
instance ExactP XAttr where
exactP (XAttr l xn e) =
case srcInfoPoints l of
[a] -> do
exactP xn
printStringAt (pos a) "="
exactPC e
_ -> errorEP "ExactP: XAttr is given wrong number of srcInfoPoints"
instance ExactP Alt where
exactP (Alt l p galts mbs) = do
exactP p
exactPC galts
maybeEP (\bs -> printStringAt (pos (head (srcInfoPoints l))) "where" >> exactPC bs) mbs
instance ExactP GuardedAlts where
exactP (UnGuardedAlt l e) = printString "->" >> exactPC e
exactP (GuardedAlts l galts) = mapM_ exactPC galts
instance ExactP GuardedAlt where
exactP (GuardedAlt l stmts e) = do
bracketList ("|",",","->") (srcInfoPoints l) stmts
exactPC e
instance ExactP Match where
exactP (Match l n ps rhs mbinds) = do
let pts = srcInfoPoints l
len = length pts
pars = len `div` 2
(oPars,cParsWh) = splitAt pars pts
(cPars,whPt) = splitAt pars cParsWh -- whPt is either singleton or empty
printStrs (zip oPars (repeat "("))
exactPC n
printStreams (zip (map pos cPars) (repeat $ printString ")")) (map (pos . ann &&& exactPC) ps)
exactPC rhs
maybeEP (\bds -> printStringAt (pos (head pts)) "where" >> exactPC bds) mbinds
exactP (InfixMatch l a n bs rhs mbinds) = do
let pts = srcInfoPoints l
len = length pts
pars = len `div` 2
(oPars,cParsWh) = splitAt pars pts
(cPars,whPt) = splitAt pars cParsWh -- whPt is either singleton or empty
printStrs (zip oPars (repeat "("))
exactPC a
epInfixName n
printInterleaved' (zip cPars (repeat ")")) bs
exactPC rhs
maybeEP (\bds -> printStringAt (pos (head whPt)) "where" >> exactPC bds) mbinds
instance ExactP Rhs where
exactP (UnGuardedRhs l e) = printString "=" >> exactPC e
exactP (GuardedRhss l grhss) = mapM_ exactPC grhss
instance ExactP GuardedRhs where
exactP (GuardedRhs l ss e) =
case srcInfoPoints l of
a:pts -> do
printString "|"
printInterleaved' (zip (init pts) (repeat ",") ++ [(last pts, "=")]) ss
exactPC e
_ -> errorEP "ExactP: GuardedRhs is given wrong number of srcInfoPoints"
instance ExactP Pat where
exactP pat = case pat of
PVar l n -> exactP n
PLit l lit -> exactP lit
PNeg l p -> printString "-" >> exactPC p
PNPlusK l n k ->
case srcInfoPoints l of
[a,b] -> do
exactP n
printStringAt (pos a) "+"
printStringAt (pos b) (show k)
_ -> errorEP "ExactP: Pat: PNPlusK is given wrong number of srcInfoPoints"
PInfixApp l pa qn pb -> exactP pa >> epInfixQName qn >> exactPC pb
PApp l qn ps -> exactP qn >> mapM_ exactPC ps
PTuple l b ps -> parenList (srcInfoPoints l) ps
PList l ps -> squareList (srcInfoPoints l) ps
PParen l p -> parenList (srcInfoPoints l) [p]
PRec l qn pfs -> exactP qn >> curlyList (srcInfoPoints l) pfs
PAsPat l n p ->
case srcInfoPoints l of
[a] -> do
exactP n
printStringAt (pos a) "@"
exactPC p
_ -> errorEP "ExactP: Pat: PAsPat is given wrong number of srcInfoPoints"
PWildCard l -> printString "_"
PIrrPat l p -> printString "~" >> exactPC p
PatTypeSig l p t ->
case srcInfoPoints l of
[a] -> do
exactP p
printStringAt (pos a) "::"
exactPC t
_ -> errorEP "ExactP: Pat: PatTypeSig is given wrong number of srcInfoPoints"
PViewPat l e p ->
case srcInfoPoints l of
[a] -> do
exactP e
printStringAt (pos a) "->"
exactPC p
_ -> errorEP "ExactP: Pat: PViewPat is given wrong number of srcInfoPoints"
PRPat l rps -> squareList (srcInfoPoints l) rps
PXTag l xn attrs mat ps ->
case srcInfoPoints l of
[a,b,c,d,e] -> do
printString "<"
exactPC xn
mapM_ exactPC attrs
maybeEP exactPC mat
printStringAt (pos b) ">"
mapM_ exactPC ps
printStringAt (pos c) "</"
printWhitespace (pos d)
exactP xn
printStringAt (pos e) ">"
_ -> errorEP "ExactP: Pat: PXTag is given wrong number of srcInfoPoints"
PXETag l xn attrs mat ->
case srcInfoPoints l of
[a,b] -> do
printString "<"
exactPC xn
mapM_ exactPC attrs
maybeEP exactPC mat
printStringAt (pos b) "/>"
_ -> errorEP "ExactP: Pat: PXETag is given wrong number of srcInfoPoints"
PXPcdata l str -> printString str
PXPatTag l p ->
case srcInfoPoints l of
[a,b] -> do
printString "<%"
exactPC p
printString "%>"
_ -> errorEP "ExactP: Pat: PXPatTag is given wrong number of srcInfoPoints"
PXRPats l rps -> bracketList ("<[",",","]>") (srcInfoPoints l) rps
PExplTypeArg l qn t ->
case srcInfoPoints l of
[a,b] -> do
exactP qn
printStringAt (pos a) "{|"
exactPC t
printStringAt (pos b) "|}"
_ -> errorEP "ExactP: Pat: PExplTypeArg is given wrong number of srcInfoPoints"
PQuasiQuote l name qt -> printString $ "[$" ++ name ++ "|" ++ qt ++ "]"
PBangPat l p -> printString "!" >> exactPC p
instance ExactP PatField where
exactP pf = case pf of
PFieldPat l qn p ->
case srcInfoPoints l of
[a] -> do
exactP qn
printStringAt (pos a) "="
exactPC p
_ -> errorEP "ExactP: PatField: PFieldPat is given wrong number of srcInfoPoints"
PFieldPun l n -> exactP n
PFieldWildcard l -> printString ".."
instance ExactP RPat where
exactP rpat = case rpat of
RPOp l rp op -> exactP rp >> exactPC op
RPEither l r1 r2 ->
case srcInfoPoints l of
[a] -> do
exactP r1
printStringAt (pos a) "|"
exactPC r2
_ -> errorEP "ExactP: RPat: RPEither is given wrong number of srcInfoPoints"
RPSeq l rps -> bracketList ("(|",",","|)") (srcInfoPoints l) rps
RPGuard l p stmts ->
case srcInfoPoints l of
a:pts -> do
printString "(|"
exactPC p
bracketList ("|",",","|)") pts stmts
_ -> errorEP "ExactP: RPat: RPGuard is given wrong number of srcInfoPoints"
RPCAs l n rp ->
case srcInfoPoints l of
[a] -> do
exactP n
printStringAt (pos a) "@:"
exactPC rp
_ -> errorEP "ExactP: RPat: RPCAs is given wrong number of srcInfoPoints"
RPAs l n rp ->
case srcInfoPoints l of
[a] -> do
exactP n
printStringAt (pos a) "@"
exactPC rp
_ -> errorEP "ExactP: RPat: RPAs is given wrong number of srcInfoPoints"
RPParen l rp -> do
parenList (srcInfoPoints l) [rp]
RPPat l p -> exactP p
instance ExactP RPatOp where
exactP rop = printString $ case rop of
RPStar l -> "*"
RPStarG l -> "*!"
RPPlus l -> "+"
RPPlusG l -> "+!"
RPOpt l -> "?"
RPOptG l -> "?!"
instance ExactP PXAttr where
exactP (PXAttr l xn p) =
case srcInfoPoints l of
[a] -> do
exactP xn
printStringAt (pos a) "="
exactPC p
_ -> errorEP "ExactP: PXAttr is given wrong number of srcInfoPoints"
instance ExactP XName where
exactP xn = case xn of
XName l name -> printString name
XDomName l dom name ->
case srcInfoPoints l of
[a,b,c] -> do
printString dom
printStringAt (pos b) ":"
printStringAt (pos c) name
_ -> errorEP "ExactP: XName: XDomName is given wrong number of srcInfoPoints"
instance ExactP Binds where
exactP (BDecls l ds) = layoutList (srcInfoPoints l) (sepFunBinds ds)
exactP (IPBinds l ips) = layoutList (srcInfoPoints l) ips
instance ExactP CallConv where
exactP (StdCall _) = printString "stdcall"
exactP (CCall _) = printString "ccall"
instance ExactP Safety where
exactP (PlayRisky _) = printString "unsafe"
exactP (PlaySafe _ b) = printString $ if b then "threadsafe" else "safe"
instance ExactP Rule where
exactP (Rule l str mact mrvs e1 e2) =
case srcInfoPoints l of
a:pts -> do
printString (show str)
maybeEP exactP mact
pts <- case mrvs of
Nothing -> return pts
Just rvs ->
case pts of
a:b:pts' -> do
printStringAt (pos a) "forall"
mapM_ exactPC rvs
printStringAt (pos b) "."
return pts'
_ -> errorEP "ExactP: Rule is given too few srcInfoPoints"
case pts of
[x] -> do
exactPC e1
printStringAt (pos x) "="
exactPC e2
_ -> errorEP "ExactP: Rule is given wrong number of srcInfoPoints"
_ -> errorEP "ExactP: Rule is given too few srcInfoPoints"
instance ExactP RuleVar where
exactP (TypedRuleVar l n t) = do
case srcInfoPoints l of
[a,b,c] -> do
printString "("
exactPC n
printStringAt (pos b) "::"
exactPC t
printStringAt (pos c) ")"
_ -> errorEP "ExactP: RuleVar: TypedRuleVar is given wrong number of srcInfoPoints"
exactP (RuleVar l n) = exactP n
instance ExactP Activation where
exactP (ActiveFrom l i) =
printPoints l ["[", show i, "]"]
exactP (ActiveUntil l i) =
printPoints l ["[", "~", show i, "]"]
instance ExactP FieldDecl where
exactP (FieldDecl l ns bt) = do
let pts = srcInfoPoints l
printInterleaved' (zip (init pts) (repeat ",") ++ [(last pts, "::")]) ns
exactPC bt
instance ExactP IPBind where
exactP (IPBind l ipn e) = do
case srcInfoPoints l of
[a] -> do
exactP ipn
printStringAt (pos a) "="
exactPC e
_ -> errorEP "ExactP: IPBind is given wrong number of srcInfoPoints"
| monsanto/hie | Hie/Language/Haskell/Exts/Annotated/ExactPrint.hs | gpl-3.0 | 64,542 | 0 | 25 | 24,374 | 20,500 | 9,563 | 10,937 | -1 | -1 |
module Lib where
import Data.Char
half :: Fractional a => a -> a
half x = x / 2
capitalizeWord :: String -> String
capitalizeWord [] = []
capitalizeWord (c:cs) = toUpper c : cs
| nirvinm/Solving-Exercises-in-Haskell-Programming-From-First-Principles | Testing/quickcheck-testing/src/Lib.hs | gpl-3.0 | 180 | 0 | 7 | 38 | 80 | 42 | 38 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | This is a simple library to query the Linux UPower daemon (via
-- DBus) for battery information. Currently, it only retrieves
-- information for the first battery it finds.
module System.Information.Battery (
-- * Types
BatteryContext,
BatteryInfo(..),
BatteryState(..),
BatteryTechnology(..),
BatteryType(..),
-- * Accessors
batteryContextNew,
getBatteryInfo
) where
import Data.Map ( Map )
import qualified Data.Map as M
import Data.Maybe ( fromMaybe )
import Data.Word
import Data.Int
import DBus
import DBus.Client
import Data.List ( find, isInfixOf )
import Data.Text ( Text )
import qualified Data.Text as T
import Safe ( atMay )
-- | An opaque wrapper around some internal library state
data BatteryContext = BC Client ObjectPath
data BatteryType = BatteryTypeUnknown
| BatteryTypeLinePower
| BatteryTypeBatteryType
| BatteryTypeUps
| BatteryTypeMonitor
| BatteryTypeMouse
| BatteryTypeKeyboard
| BatteryTypePda
| BatteryTypePhone
deriving (Show, Ord, Eq, Enum)
data BatteryState = BatteryStateUnknown
| BatteryStateCharging
| BatteryStateDischarging
| BatteryStateEmpty
| BatteryStateFullyCharged
| BatteryStatePendingCharge
| BatteryStatePendingDischarge
deriving (Show, Ord, Eq, Enum)
data BatteryTechnology = BatteryTechnologyUnknown
| BatteryTechnologyLithiumIon
| BatteryTechnologyLithiumPolymer
| BatteryTechnologyLithiumIronPhosphate
| BatteryTechnologyLeadAcid
| BatteryTechnologyNickelCadmium
| BatteryTechnologyNickelMetalHydride
deriving (Show, Ord, Eq, Enum)
-- | There are a few fields supported by UPower that aren't exposed
-- here.. could be easily.
data BatteryInfo = BatteryInfo { batteryNativePath :: Text
, batteryVendor :: Text
, batteryModel :: Text
, batterySerial :: Text
-- , batteryUpdateTime :: Time
, batteryType :: BatteryType
, batteryPowerSupply :: Bool
, batteryHasHistory :: Bool
, batteryHasStatistics :: Bool
, batteryOnline :: Bool
, batteryEnergy :: Double
, batteryEnergyEmpty :: Double
, batteryEnergyFull :: Double
, batteryEnergyFullDesign :: Double
, batteryEnergyRate :: Double
, batteryVoltage :: Double
, batteryTimeToEmpty :: Int64
, batteryTimeToFull :: Int64
, batteryPercentage :: Double
, batteryIsPresent :: Bool
, batteryState :: BatteryState
, batteryIsRechargable :: Bool
, batteryCapacity :: Double
, batteryTechnology :: BatteryTechnology
{- , batteryRecallNotice :: Bool
, batteryRecallVendor :: Text
, batteryRecallUr :: Text
-}
}
-- | Find the first power source that is a battery in the list. The
-- simple heuristic is a substring search on 'BAT'
firstBattery :: [ObjectPath] -> Maybe ObjectPath
firstBattery = find (isInfixOf "BAT" . formatObjectPath)
-- | The name of the power daemon bus
powerBusName :: BusName
powerBusName = "org.freedesktop.UPower"
-- | The base object path
powerBaseObjectPath :: ObjectPath
powerBaseObjectPath = "/org/freedesktop/UPower"
-- | A helper to read the variant contents of a dict with a default
-- value.
readDict :: (IsVariant a) => Map Text Variant -> Text -> a -> a
readDict dict key dflt = fromMaybe dflt $ do
variant <- M.lookup key dict
fromVariant variant
-- | Read the variant contents of a dict which is of an unknown integral type.
readDictIntegral :: Map Text Variant -> Text -> Int32 -> Int
readDictIntegral dict key dflt = fromMaybe (fromIntegral dflt) $ do
v <- M.lookup key dict
case variantType v of
TypeWord8 -> return $ fromIntegral (f v :: Word8)
TypeWord16 -> return $ fromIntegral (f v :: Word16)
TypeWord32 -> return $ fromIntegral (f v :: Word32)
TypeWord64 -> return $ fromIntegral (f v :: Word64)
TypeInt16 -> return $ fromIntegral (f v :: Int16)
TypeInt32 -> return $ fromIntegral (f v :: Int32)
TypeInt64 -> return $ fromIntegral (f v :: Int64)
_ -> Nothing
where
f :: (Num a, IsVariant a) => Variant -> a
f = fromMaybe (fromIntegral dflt) . fromVariant
-- | Query the UPower daemon about information on a specific battery.
-- If some fields are not actually present, they may have bogus values
-- here. Don't bet anything critical on it.
getBatteryInfo :: BatteryContext -> IO (Maybe BatteryInfo)
getBatteryInfo (BC systemConn battPath) = do
-- Grab all of the properties of the battery each call with one
-- message.
reply <- call_ systemConn (methodCall battPath "org.freedesktop.DBus.Properties" "GetAll")
{ methodCallDestination = Just "org.freedesktop.UPower"
, methodCallBody = [toVariant $ T.pack "org.freedesktop.UPower.Device"]
}
return $ do
body <- methodReturnBody reply `atMay` 0
dict <- fromVariant body
return BatteryInfo { batteryNativePath = readDict dict "NativePath" ""
, batteryVendor = readDict dict "Vendor" ""
, batteryModel = readDict dict "Model" ""
, batterySerial = readDict dict "Serial" ""
, batteryType = toEnum $ fromIntegral $ readDictIntegral dict "Type" 0
, batteryPowerSupply = readDict dict "PowerSupply" False
, batteryHasHistory = readDict dict "HasHistory" False
, batteryHasStatistics = readDict dict "HasStatistics" False
, batteryOnline = readDict dict "Online" False
, batteryEnergy = readDict dict "Energy" 0.0
, batteryEnergyEmpty = readDict dict "EnergyEmpty" 0.0
, batteryEnergyFull = readDict dict "EnergyFull" 0.0
, batteryEnergyFullDesign = readDict dict "EnergyFullDesign" 0.0
, batteryEnergyRate = readDict dict "EnergyRate" 0.0
, batteryVoltage = readDict dict "Voltage" 0.0
, batteryTimeToEmpty = readDict dict "TimeToEmpty" 0
, batteryTimeToFull = readDict dict "TimeToFull" 0
, batteryPercentage = readDict dict "Percentage" 0.0
, batteryIsPresent = readDict dict "IsPresent" False
, batteryState = toEnum $ readDictIntegral dict "State" 0
, batteryIsRechargable = readDict dict "IsRechargable" True
, batteryCapacity = readDict dict "Capacity" 0.0
, batteryTechnology =
toEnum $ fromIntegral $ readDictIntegral dict "Technology" 0
}
-- | Construct a battery context if possible. This could fail if the
-- UPower daemon is not running. The context can be used to get
-- actual battery state with 'getBatteryInfo'.
batteryContextNew :: IO (Maybe BatteryContext)
batteryContextNew = do
systemConn <- connectSystem
-- First, get the list of devices. For now, we just get the stats
-- for the first battery
reply <- call_ systemConn (methodCall powerBaseObjectPath "org.freedesktop.UPower" "EnumerateDevices")
{ methodCallDestination = Just powerBusName
}
return $ do
body <- methodReturnBody reply `atMay` 0
powerDevices <- fromVariant body
battPath <- firstBattery powerDevices
return $ BC systemConn battPath
| Fizzixnerd/xmonad-config | site-haskell/src/System/Information/Battery.hs | gpl-3.0 | 8,528 | 0 | 14 | 3,014 | 1,412 | 782 | 630 | 136 | 8 |
-- Module : PDBparse
-- Copyright : (c) 2012 Grant Rotskoff
-- License : GPL-3
--
-- Maintainer : gmr1887@gmail.com
-- Stability : experimental
module PDButil.PDBparse where
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import System.IO (FilePath)
data Atom = Atom { name :: ByteString,
atid :: Int,
chain :: ByteString,
resid :: Int,
resname :: ByteString,
coords :: [Double],
aField :: Double,
bField :: Double,
atype :: ByteString }
deriving (Show,Eq)
data Protein = Protein { atoms :: [Atom] }
deriving (Show)
--Sample record:
-- ATOM 1 N ASP A 28 52.958 39.871 41.308 1.00 89.38 N
{- We only want record lines that begin with ATOM and HETATM
ATOM lines contain the coordinates of the protein(s) in a PDB file
HETATM lines (short for heteroatom) contain coordinate information for
other molecules present in the structure... ligands, DNA, RNA, waters, etc. -}
parseAtom :: ByteString -> Atom
parseAtom record = Atom { name = pull 13 16,
atid = rpull 7 11,
chain = pull 22 22,
resid = rpull 23 26,
resname = pull 18 20,
coords = [rpull 31 38,rpull 39 46,rpull 47 54],
aField = rpull 55 60,
bField = rpull 61 66,
atype = pull 77 78 } where
--Hard coded parsing of the PDB record for coordinate types
--I've encountered this "repacking for comparison in expert code,
--but it seems like comparison should be possible some other way
pull m n = cutspace $ B.drop (m-1) $ B.take n record
rpull m n = read $ B.unpack $ pull m n
cutspace = B.pack . filter (/=' ') . B.unpack
isAtom :: ByteString -> Bool
isAtom line = (B.take 4 line) == (B.pack "ATOM")
isHETATM :: ByteString -> Bool
isHETATM line = (B.take 6 line) == (B.pack "HETATM")
parse :: FilePath -> IO ([Protein],[Atom])
parse pdb = do
let input = B.readFile pdb
bstring <- input
let atms = map parseAtom $ filter isAtom (B.lines bstring)
let hetatms = map parseAtom $ filter isHETATM (B.lines bstring)
return (splitChains atms, hetatms)
parseCofactorOnly :: FilePath -> IO [Atom]
parseCofactorOnly pdb = do
bstring <- B.readFile pdb
let hetatms = map parseAtom $ filter isHETATM (B.lines bstring)
return hetatms
parseProteinOnly :: FilePath -> IO [Protein]
parseProteinOnly pdb = do
bstring <- B.readFile pdb
let atms = map parseAtom $ filter isAtom (B.lines bstring)
return $ splitChains atms
splitChains :: [Atom] -> [Protein]
splitChains [] = []
splitChains contents = [Protein {atoms = chain1}] ++ splitChains remainder where
chain1 = takeWhile (\s -> id == chain s) contents
remainder = dropWhile (\s -> id == chain s) contents
id = chain (head contents)
--TODO
-- HELIX lines in PDB file.
getSecondaryStructure :: Protein -> [([Atom],String)]
getSecondaryStructure = undefined
| rotskoff/Haskell-PDB-Utilities | PDBtools/PDButil/PDBparse.hs | gpl-3.0 | 3,279 | 0 | 14 | 1,051 | 870 | 465 | 405 | 58 | 1 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module Test.StandOff.DataXML (htf_thisModulesTests) where
import Test.Framework
import StandOff.TextRange
import Test.StandOff.TestSetup
test_elementImplementsTextRange = do
assertEqual 100 (start d)
assertEqual 200 (end d)
assertEqual (100, 200) (spans d)
assertEqual ((99, 105), (194, 201)) (splitPoints d)
assertEqual 100 (len d)
assertThrowsSome (split FstSplit d)
assertThrowsSome (leftSplit FstSplit d (elm "x" 120 350 []))
assertThrowsSome (rightSplit FstSplit d (elm "x" 12 150 []))
assertEqual True (d <<>> (elm "span" 120 150 []))
assertEqual True (d <<>> (elm "span" 100 150 []))
assertEqual True (d <<>> (elm "span" 120 200 []))
assertEqual False (d <<>> (elm "span" 120 210 []))
assertEqual True (d `before` (elm "div" 210 300 []))
assertEqual True (d `before` (elm "div" 200 300 []))
assertEqual False (d `before` (elm "div" 10 30 []))
assertEqual True (d `behind` (elm "div" 10 30 []))
assertEqual True (d `behind` (elm "div" 10 100 []))
assertEqual False (d `behind` (elm "div" 210 300 []))
assertEqual True (d `leftOverlaps` (elm "div" 190 300 []))
assertEqual False (d `leftOverlaps` (elm "div" 90 130 []))
assertEqual True (d `rightOverlaps` (elm "div" 90 130 []))
assertEqual False (d `rightOverlaps` (elm "div" 210 300 []))
where d = elm "div" 100 200 []
| lueck/standoff-tools | testsuite/Test/StandOff/DataXML.hs | gpl-3.0 | 1,361 | 0 | 12 | 249 | 646 | 327 | 319 | 29 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Wiretap.Main where
import System.Console.Docopt
-- import System.Directory
import System.Environment (getArgs)
import System.FilePath
import System.IO
-- import Debug.Trace
import Control.Applicative
import Control.Lens (over, _2)
import Control.Monad
import Control.Monad.Catch
import Control.Monad.Trans.Except
import Control.Monad.Trans.State.Strict (StateT)
import Control.Monad.State.Class
import Control.Monad.Writer
-- import Z3.Monad (evalZ3, MonadZ3)
import qualified Data.List as L
import qualified Data.Map.Strict as M
import Data.Maybe (catMaybes, fromMaybe)
import qualified Data.Set as S
import Data.Traversable (mapM)
import Data.Unique
-- import Data.IORef
import Data.Either
import Data.Functor
-- import Debug.Trace
import Pipes
import qualified Pipes.Lift as PL
import qualified Pipes.Missing as PM
import qualified Pipes.Prelude as P
import qualified Pipes.Internal as PI
import Wiretap.Analysis.Count
import Wiretap.Format.Binary
import Wiretap.Format.Text
import Wiretap.Utils
import Wiretap.Data.Event
import Wiretap.Data.History
import qualified Wiretap.Data.Program as Program
import Wiretap.Analysis.DataRace
import Wiretap.Analysis.Deadlock
import Wiretap.Analysis.HBL
import Wiretap.Analysis.HBL.Z3
import Wiretap.Analysis.Lock hiding (lockMap)
import Wiretap.Analysis.Permute
import Wiretap.Analysis.Consistency
import Wiretap.Analysis.MustHappenBefore
patterns :: Docopt
patterns = [docopt|wiretap-tools version 0.1.0.0
Usage:
wiretap-tools (count|size) [<history>]
wiretap-tools parse [-Ph] [<history>]
wiretap-tools lockset [-vh] [<history>]
wiretap-tools dataraces [options] [<history>]
wiretap-tools deadlocks [options] [<history>]
wiretap-tools bugs [options] [<history>]
wiretap-tools check [options] [<history>]
wiretap-tools (--help | --version)
Options:
-h, --human-readable Adds more information about execution
-P PROGRAM, --program PROGRAM The path to the program information, the default
is the folder of the history if none is declared.
-f FILTER, --filter FILTER For use in a candidate analysis. The multible filters
can be added seperated by commas. See filters.
-p PROVER, --prover PROVER For use in a candidate analysis, if no prover is
provided, un verified candidates are produced.
-o OUT, --proof OUT Produces the proof in the following directory
--chunk-size CHUNK_SIZE For use to set a the size of the chunks, if not
set the program will read the entier history.
--chunk-offset CHUNK_OFFSET Chunk offset is the number of elements dropped each
after each chunk. The minimal offset is 1, and the
maximal offset, which touches all events is the
size.
--solve-time SOLVE_TIME The time the solver can use before it is timed out
(default: 0).
--solver SOLVER The solver (default: z3:qf_lia)
--ignore IGNORED_FILE A file containing candidates to ignore.
-v, --verbose Produce verbose outputs
Filters:
Filters are applicable to dataraces and deadlock analyses.
lockset: Remove all candidates with shared locks.
reject: Rejects all candidates
ignored: Don't try candidates in the ignore set
mhb: Do not check candidates which is must-happen-before related
Provers:
A prover is an algorithm turns a history into a constraint.
none: No constraints except that the candidate has to be placed next to
each other.
free: A prover that only uses must-happen-before constraints, and sequential
consistency.
valuesonly: An unsound prover that only takes values into account.
branchonly: An unsound prover that only take branch events into account.
refsonly: An unsound prover that only takes refs into account.
dirk: The data flow sentisive control-flow consistency alogrithm [default].
rvpredict: A prover based on Huang et. al. 2014.
said: The prover used in Said et. al. 2011.
|]
data Config = Config
{ verbose :: Bool
, prover :: String
, filters :: [String]
, outputProof :: Maybe FilePath
, program :: Maybe FilePath
, history :: Maybe FilePath
, humanReadable :: Bool
, ignoreSet :: S.Set String
, chunkSize :: Maybe Int
, chunkOffset :: Int
, solveTime :: Integer
, solver :: String
} deriving (Show, Read)
getArgOrExit :: Arguments -> Option -> IO String
getArgOrExit = getArgOrExitWith patterns
helpNeeded :: Arguments -> Bool
helpNeeded args =
args `isPresent` longOption "help"
main :: IO ()
main = do
mainWithArgs =<< getArgs
mainWithArgs :: [String] -> IO ()
mainWithArgs args =
case parseArgs patterns args of
Right args' -> do
when (helpNeeded args') $ exitWithUsage patterns
config <- readConfig args'
runCommand args' config
Left err ->
exitWithUsageMessage patterns (show err)
readConfig :: Arguments -> IO Config
readConfig args = do
ignoreSet' <- case getLongOption "ignore" of
Just file ->
S.fromList . lines <$> readFile file
Nothing ->
return $ S.empty
let chunk_size = read <$> getLongOption "chunk-size"
return $ Config
{ verbose = isPresent args $ longOption "verbose"
, filters = splitOn ','
$ getArgWithDefault args "mhb,lockset" (longOption "filter")
, prover = getArgWithDefault args "dirk" (longOption "prover")
, outputProof = getLongOption "proof"
, program = getLongOption "program"
, history = getArgument "history"
, chunkSize = chunk_size
, chunkOffset = fromMaybe (fromMaybe 1 $ (flip div 2) <$> chunk_size)
(read <$> getLongOption "chunk-offset")
, humanReadable = args `isPresent` longOption "human-readable"
, ignoreSet = ignoreSet'
, solveTime = read $ getArgWithDefault args "0" (longOption "solve-time")
, solver = getArgWithDefault args "z3:qf_lia" (longOption "solver")
}
where
getLongOption = getArg args . longOption
getArgument = getArg args . argument
prettyPrintEvents :: Program.Program -> Config -> Proxy X () () Event IO () -> IO ()
prettyPrintEvents !p !config events
| humanReadable config = do
runEffect $ for events $ \e -> do
PI.M $ do
print $ PP p e
i <- instruction p e
putStr " "
putStrLn $ Program.instName p i
return (PI.Pure ())
| otherwise = do
let
go !pipe =
case pipe of
PI.Respond e f -> print (PP p e) >>= go . f
PI.M m -> m >>= go
PI.Pure e -> return e
_ -> undefined
go events
-- runEffect $ for events helper
runCommand :: Arguments -> Config -> IO ()
runCommand args config = do
p <- getProgram config
let
pprint :: Show (PP a) => a -> String
pprint = pp p
onCommand "parse" $ prettyPrintEvents p config
onCommand "count" $
countEvents >=> print
onCommand "size" $
P.length >=> print
onCommand "lockset" $ \events -> do
locks <- lockset . fromEvents <$> P.toListM events
forM_ locks $ printLockset pprint . over _2 (L.intercalate "," . map pprint . M.assocs)
onCommand "dataraces" $
proveCandidates config p
(const . raceCandidates)
$ prettyPrint p
onCommand "deadlocks" $
proveCandidates config p (
\h s ->
deadlockCandidates' h $ lockMap s
) $ prettyPrint p
onCommand "bugs" $
proveCandidates config p (
\h s ->
concat
[ BDeadlock <$> deadlockCandidates' h (lockMap s)
, BDataRace <$> raceCandidates h
]
) $ prettyPrint p
onCommand "check" $
(\e -> return (e >-> PM.scan' (\i e' -> (i+1, Unique i e')) 0))
>=> checkConsistency p
>=> print
where
getProgram cfg =
maybe (return Program.empty) Program.fromFolder $
program config <|> fmap takeDirectory (history cfg)
printLockset pprint (e, locks) | humanReadable config =
putStrLn $ padStr (pprint e) ' ' 60 ++ " - " ++ locks
printLockset _ (_, locks) =
putStrLn locks
withHistory :: (Handle -> IO ()) -> IO ()
withHistory f =
case history config of
Just events -> do
withFile events ReadMode f
Nothing -> do
f stdin
onCommand :: String -> (Producer Event IO () -> IO ()) -> IO ()
onCommand cmd f =
when (args `isPresent` command cmd) $
withHistory (f . readHistory)
padStr p char size =
p ++ L.replicate (size - length p) char
type LockState = M.Map Thread [(Ref, UE)]
data ProverState = ProverState
{ proven :: !(S.Set String)
, lockMap :: !(UniqueMap (M.Map Ref UE))
, lockState :: !(LockState)
, mhbGraph :: !MHB
} deriving (Show)
addProven :: String -> ProverState -> ProverState
addProven a p =
p { proven = S.insert a $ proven p }
updateLockState :: PartialHistory h => h -> ProverState -> ProverState
updateLockState h p =
p { lockState = snd $! locksetSimulation (lockState p) h }
setLockMap :: PartialHistory h => h -> ProverState -> ProverState
setLockMap h p =
p { lockMap = fst $ locksetSimulation (lockState p) h }
stateFromChunck :: PartialHistory h => h -> ProverState -> ProverState
stateFromChunck h p =
p { lockMap = fst $ locksetSimulation (lockState p) h
, mhbGraph = mustHappenBefore h
}
type ProverT m = StateT ProverState m
proveCandidates
:: forall a m. (Candidate a, Show a, Ord a, MonadIO m, MonadCatch m)
=> Config
-> Program.Program
-> (forall h. PartialHistory h => h -> ProverState -> [a])
-> (a -> IO String)
-> Producer Event m ()
-> m ()
proveCandidates config p findCandidates toString events = do
say $ "Filters: " ++ show (filters config)
say $ "Human Readable: " ++ show (humanReadable config)
runEffect $ uniqueEvents >-> PL.evalStateP initialState proverPipe
where
initialState =
(ProverState S.empty (fromUniques []) M.empty mhbEmpty)
logV = hPutStrLn stderr
say :: forall m'. (MonadIO m') => String -> m' ()
say = when (verbose config) . liftIO . logV
uniqueEvents =
PM.finite' (events >-> PM.scan' (\i e -> (i+1, Unique i e)) 0)
proverPipe =
chunkate >-> forever (await >>= lift . chunkProver)
chunkate :: Pipe (Maybe UE) [UE] (ProverT m) ()
chunkate =
case chunkSize config of
Nothing -> do
-- Read the entire history
list <- PM.asList $ PM.recoverAll >-> PM.end'
lift . modify $ stateFromChunck list
yield list
Just size ->
-- Read a little at a time
getN size >>= go size
where
offset = chunkOffset config
go size chunk = do
!ls <- lift $ gets lockState
liftIO . when (verbose config) $
case chunk of
a:_ -> do
logV $ "At event " ++ show (idx a)
forM_ (M.assocs ls) $ \(t, locks) -> when (not $ L.null locks) $ do
logV $ (pp p t) ++ " has locks from:"
forM_ locks $ \(r, event) -> do
_event <- pp p <$> instruction p (normal event)
logV $ " " ++ pp p r ++ " " ++ _event
[] -> return ()
lift . modify $ stateFromChunck chunk
yield chunk
if actualChunkSize < size
then
say $ "Done"
else do
new <- getN offset
let (dropped, remainder) = splitAt offset chunk
lift . modify $ updateLockState dropped
go size $ remainder ++ new
where actualChunkSize = length chunk
getN size = do
catMaybes <$> PM.asList (PM.take' size)
markProven prv = do
modify $ addProven prv
liftIO $ putStrLn prv
runChosenSolver prv =
case (solver config) of
"z3:qf_lia" -> runLIASolver (solveTime config) (probSymbolDef prv)
"z3:qf_idl" -> runIDLSolver (solveTime config) (probSymbolDef prv)
"z3:qf_lra" -> runLRASolver (solveTime config) (probSymbolDef prv)
"z3:qf_rdl" -> runRDLSolver (solveTime config) (probSymbolDef prv)
a -> error $ "Do not know about solver: " ++ a
chunkProver
:: forall h. (PartialHistory h)
=> h
-> StateT ProverState m ()
chunkProver chunk = do
-- Find candidates
candidates <- findCandidates chunk <$> get
say $ "Found " ++ show (length candidates) ++ " candidate(s)."
when (verbose config) . liftIO . forM_ candidates $ \ c -> do
logV $ " -+ " ++ L.intercalate " ++ " (map (pp p) . S.toList $ candidateSet c)
-- First we apply filters
let fs = getFilter (filters config)
(_, real) <- partitionEithers <$> mapM fs candidates
say $ "- after filter: " ++ show (length real)
-- The we group the results by name.
realByBug <- liftIO $ groupUnsortedOnFst <$> mapM (\c -> (,c) <$> toString c) real
say $ "- distinct: " ++ show (length realByBug)
-- And remove any that have been proved before
ps <- gets proven
let toBeProven = filter (not . (`S.member` ps) . fst) realByBug
say $ "- not proven: " ++ show (length toBeProven)
lm <- gets lockMap
mh <- gets mhbGraph
-- Then we start a batch prover, where we prove group in order, reporting
-- anything we find
case getProver (prover config) of
Just df
| length toBeProven > 0 -> do
let problem' = generateProblem (lm, mh, df) chunk
say $ "- Problem: "
say $ " + elements: " ++ (show . countEventsF . map normal $ probElements problem')
say $ " + symbols: " ++ (show . countEventsF . map normal $ probSymbols problem')
let problem = reduceProblem problem'
-- say $ "- Reduced problem: "
-- say $ " + elements: " ++ (show . countEventsF . map normal $ probElements problem)
-- say $ " + symbols: " ++ (show . countEventsF . map normal $ probSymbols problem)
e <- runChosenSolver problem $ do
assert (probBase problem)
forM_ toBeProven $ \(item, cs) -> do
say $ "- Trying to prove " ++ item ++ ", with "
++ show (length cs) ++ " candidates."
-- forM_ cs $ \c -> do
-- say (show c)
x <- solveOne problem cs
case x of
Nothing ->
say " - Could not prove constraints."
Just c -> do
when (humanReadable config) . liftIO $ do
logV ("Found: " ++ item)
forM_ (candidateSet c) $ \cs' ->
logV (" -: " ++ pp p cs')
lift $ markProven item
-- liftIO $ printProof pf
either (say . show) return $ e
_ -> do
forM_ toBeProven $ \(item, cs) -> do
when (humanReadable config) . liftIO $ do
logV ("Found: " ++ show item)
forM_ (candidateSet $ head cs) $ \cs' ->
logV (" -:" ++ pp p cs')
markProven item
-- onProverError
-- :: (PartialHistory h)
-- => h -> a -> HBL UE
-- -> IO String
-- onProverError hist c cnts = do
-- case outputProof config of
-- Just folder -> do
-- createDirectoryIfMissing True folder
-- let ls = map (show . idx) . L.sort . S.toList $ candidateSet c
-- file = folder </> (L.intercalate "-" ls ++ ".err.dot")
-- withFile file WriteMode $ \h ->
-- hPutStr h $ cnf2dot p hist (toCNF cnts)
-- return $ "Could solve constraints, outputted to '" ++ file ++ "'"
-- Nothing -> do
-- return "Could not solve constraints."
-- printProof (Proof c hbl hist) = do
-- case outputProof config of
-- Just folder -> do
-- createDirectoryIfMissing True folder
-- let ls = map (show . idx) . L.sort . S.toList $ candidateSet c
-- let fn = folder </> L.intercalate "-" ls
-- withFile (fn ++ ".hist") WriteMode $
-- \h -> runEffect $ each hist >-> P.map normal >-> writeHistory h
-- withFile (fn ++ ".dot") WriteMode $ \h ->
-- hPutStr h $ cnf2dot p hist (toCNF hbl)
-- Nothing ->
-- return ()
getFilter
:: forall m'. (MonadIO m', MonadState ProverState m')
=> [String]
-> a -> m' (Either (IO String) a)
getFilter filterNames = inner
where
inner c = runExceptT $ mapM ($c) fs $> c
fs = map (toFilter p config toString) filterNames
getProver name =
case name of
"said" -> Just dfSaid
"dirk" -> Just dfDirk
"rvpredict" -> Just dfRVPredict
"free" -> Just dfFree
"none" -> Nothing
_ -> error $ "Unknown prover: '" ++ name ++ "'"
toFilter :: (MonadIO m, Candidate t, MonadState ProverState m) =>
Program.Program
-> Config
-> (t -> IO String)
-> [Char]
-> t
-> ExceptT (IO [Char]) m ()
toFilter p config toString name c =
case name of
"lockset" -> do
lm <- gets lockMap
void $ withExceptT (\shared -> do
locks <- forM shared $ \(r, (a, b)) -> do
inst_a <- instruction p . normal $ a
inst_b <- instruction p . normal $ b
return $ L.intercalate "\n"
[ " " ++ pp p r
, " " ++ pp p inst_a
, " " ++ pp p inst_b
]
return . L.intercalate "\n" $
"Candidates shares locks:" : locks
) $ locksetFilter' lm c
"mhb" -> do
mg <- gets mhbGraph
forM_ (crossproduct1 . S.toList $ candidateSet c) $ \(a, b) ->
if mhb mg a b
then throwE $ do
inst_a <- instruction p . normal $ a
inst_b <- instruction p . normal $ b
return $ L.intercalate "\n"
[ "Must happen before related"
, " " ++ pp p inst_a
, " " ++ pp p inst_b
]
else return ()
"ignored" -> do
str <- liftIO $ toString c
if S.member str (ignoreSet config)
then throwE . return $ "In ignore set"
else return ()
"reject" ->
throwE . return $ "Rejected"
_ ->
error $ "Unknown filter " ++ name
runAll :: (Monad m') => a -> [a -> m' a] -> m' a
runAll a = L.foldl' (>>=) $ pure a
cnf2dot
:: PartialHistory h
=> Program.Program
-> h
-> [[HBLAtom UE UE]]
-> String
cnf2dot p h cnf = unlines $
[ "digraph {"
, "graph [overlap=false, splines=true];"
, "edge [ colorscheme = dark28 ]"
]
++ [ unlines $ zipWith printEvent [0..] (Wiretap.Data.History.enumerate h)]
++ [ unlines $ printConjunction color cj
| (color, cj) <- zip (cycle $ map show ([1..8] :: [Int])) cnf
]
++ [ "}" ]
where
pprint = concatMap (\c -> if c == '"' then "\\\"" else [c]) . pp p
pr u = "O" ++ show (idx u)
printEvent :: Int -> UE -> String
printEvent i u@(Unique _ event) =
pr u ++ " [ shape = box, fontsize = 10, label = \""
++ pprint (operation event) ++ "\", "
++ "pos = \"" ++ show (threadId (thread event) * 200)
++ "," ++ show (- i * 75) ++ "!\" ];"
events = S.fromList (Wiretap.Data.History.enumerate h)
printAtom color constrain atom =
case atom of
Order a b | a `S.member` events && b `S.member` events ->
"\"" ++ pr a ++ "\" -> \"" ++ pr b ++ "\" "
++ if constrain
then ";"
else "[ style=dashed, color=\"" ++ color ++ "\"];"
Concur a b ->
"\"" ++ pr a ++ "\" -> \"" ++ pr b ++ "\"; "
++ "\"" ++ pr b ++ "\" -> \"" ++ pr a ++ "\""
_ -> ""
printConjunction _ [e] =
[ printAtom "black" True e ]
printConjunction color es =
map (printAtom color False) es
data Bug
= BDataRace DataRace
| BDeadlock Deadlock
deriving (Show, Eq, Ord)
instance Candidate Bug where
candidateSet bug =
case bug of
BDataRace a -> candidateSet a
BDeadlock a -> candidateSet a
prettyPrint p bug =
case bug of
BDataRace a -> ("DR:" ++) <$> prettyPrint p a
BDeadlock a -> ("DL:" ++) <$> prettyPrint p a
| ucla-pls/wiretap-tools | src/Wiretap/Main.hs | gpl-3.0 | 21,568 | 0 | 36 | 7,185 | 5,493 | 2,767 | 2,726 | 435 | 15 |
{---------------------------------------------------------------------}
{- Copyright 2015, 2016 Nathan Bloomfield -}
{- -}
{- This file is part of Feivel. -}
{- -}
{- Feivel is free software: you can redistribute it and/or modify -}
{- it under the terms of the GNU General Public License version 3, -}
{- as published by the Free Software Foundation. -}
{- -}
{- Feivel is distributed in the hope that it will be useful, but -}
{- WITHOUT ANY WARRANTY; without even the implied warranty of -}
{- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -}
{- GNU General Public License for more details. -}
{- -}
{- You should have received a copy of the GNU General Public License -}
{- along with Feivel. If not, see <http://www.gnu.org/licenses/>. -}
{---------------------------------------------------------------------}
module Feivel.Grammar.ZZMod where
import Feivel.Grammar.Util
data ZZModExprLeaf a bool int list mat zzmod tup
= ZZModConst ZZModulo
| ZZModVar Key
| ZZModCast a
| ZZModMacro [(Type, Key, a)] a -- Expr, MacTo ZZModulo
| ZZModAtPos list int
| ZZModAtIdx mat int int
| ZZModAtSlot tup int
| ZZModIfThenElse bool zzmod zzmod
-- Arithmetic
| ZZModNeg zzmod
| ZZModInv zzmod
| ZZModAdd zzmod zzmod
| ZZModSub zzmod zzmod
| ZZModMult zzmod zzmod
| ZZModPow zzmod int
| ZZModSum list
| ZZModProd list
deriving (Eq, Show)
| nbloomf/feivel | src/Feivel/Grammar/ZZMod.hs | gpl-3.0 | 1,804 | 0 | 8 | 672 | 168 | 110 | 58 | 20 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudIOT.Projects.Locations.Registries.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a device registry configuration.
--
-- /See:/ <https://cloud.google.com/iot Cloud IoT API Reference> for @cloudiot.projects.locations.registries.get@.
module Network.Google.Resource.CloudIOT.Projects.Locations.Registries.Get
(
-- * REST Resource
ProjectsLocationsRegistriesGetResource
-- * Creating a Request
, projectsLocationsRegistriesGet
, ProjectsLocationsRegistriesGet
-- * Request Lenses
, plrgXgafv
, plrgUploadProtocol
, plrgAccessToken
, plrgUploadType
, plrgName
, plrgCallback
) where
import Network.Google.CloudIOT.Types
import Network.Google.Prelude
-- | A resource alias for @cloudiot.projects.locations.registries.get@ method which the
-- 'ProjectsLocationsRegistriesGet' request conforms to.
type ProjectsLocationsRegistriesGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] DeviceRegistry
-- | Gets a device registry configuration.
--
-- /See:/ 'projectsLocationsRegistriesGet' smart constructor.
data ProjectsLocationsRegistriesGet =
ProjectsLocationsRegistriesGet'
{ _plrgXgafv :: !(Maybe Xgafv)
, _plrgUploadProtocol :: !(Maybe Text)
, _plrgAccessToken :: !(Maybe Text)
, _plrgUploadType :: !(Maybe Text)
, _plrgName :: !Text
, _plrgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsRegistriesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plrgXgafv'
--
-- * 'plrgUploadProtocol'
--
-- * 'plrgAccessToken'
--
-- * 'plrgUploadType'
--
-- * 'plrgName'
--
-- * 'plrgCallback'
projectsLocationsRegistriesGet
:: Text -- ^ 'plrgName'
-> ProjectsLocationsRegistriesGet
projectsLocationsRegistriesGet pPlrgName_ =
ProjectsLocationsRegistriesGet'
{ _plrgXgafv = Nothing
, _plrgUploadProtocol = Nothing
, _plrgAccessToken = Nothing
, _plrgUploadType = Nothing
, _plrgName = pPlrgName_
, _plrgCallback = Nothing
}
-- | V1 error format.
plrgXgafv :: Lens' ProjectsLocationsRegistriesGet (Maybe Xgafv)
plrgXgafv
= lens _plrgXgafv (\ s a -> s{_plrgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plrgUploadProtocol :: Lens' ProjectsLocationsRegistriesGet (Maybe Text)
plrgUploadProtocol
= lens _plrgUploadProtocol
(\ s a -> s{_plrgUploadProtocol = a})
-- | OAuth access token.
plrgAccessToken :: Lens' ProjectsLocationsRegistriesGet (Maybe Text)
plrgAccessToken
= lens _plrgAccessToken
(\ s a -> s{_plrgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plrgUploadType :: Lens' ProjectsLocationsRegistriesGet (Maybe Text)
plrgUploadType
= lens _plrgUploadType
(\ s a -> s{_plrgUploadType = a})
-- | Required. The name of the device registry. For example,
-- \`projects\/example-project\/locations\/us-central1\/registries\/my-registry\`.
plrgName :: Lens' ProjectsLocationsRegistriesGet Text
plrgName = lens _plrgName (\ s a -> s{_plrgName = a})
-- | JSONP
plrgCallback :: Lens' ProjectsLocationsRegistriesGet (Maybe Text)
plrgCallback
= lens _plrgCallback (\ s a -> s{_plrgCallback = a})
instance GoogleRequest ProjectsLocationsRegistriesGet
where
type Rs ProjectsLocationsRegistriesGet =
DeviceRegistry
type Scopes ProjectsLocationsRegistriesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloudiot"]
requestClient ProjectsLocationsRegistriesGet'{..}
= go _plrgName _plrgXgafv _plrgUploadProtocol
_plrgAccessToken
_plrgUploadType
_plrgCallback
(Just AltJSON)
cloudIOTService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsRegistriesGetResource)
mempty
| brendanhay/gogol | gogol-cloudiot/gen/Network/Google/Resource/CloudIOT/Projects/Locations/Registries/Get.hs | mpl-2.0 | 5,066 | 0 | 15 | 1,113 | 700 | 410 | 290 | 105 | 1 |
-- {-# LANGUAGE #-}
{-# OPTIONS_GHC -Wall -fno-warn-missing-signatures #-}
----------------------------------------------------------------------
-- |
-- Module : Shady.Play.CseTest
-- Copyright : (c) Conal Elliott 2009
-- License : AGPLv3
--
-- Maintainer : conal@conal.net
-- Stability : experimental
--
-- Test new CSE stuff
----------------------------------------------------------------------
module Shady.Play.CseTest where
-- import Control.Applicative (liftA2)
import Data.VectorSpace
-- For testing
import Text.PrettyPrint.Leijen.DocExpr (Expr,HasExpr(expr))
import Data.Boolean
import Shady.Language.Exp
-- import Shady.Color
-- import Shady.Image
import Shady.Complex
import Shady.Misc (frac)
-- import Shady.Language.Cse
import Shady.Language.Share
x :: HasExpr a => a -> Expr
x = expr
type Point = ComplexE R
{-
xc :: Color -> Expr
xc = expr . colorToR4
xp :: Point -> Expr
xp = expr . pointToR2
-}
q :: FloatE
q = Var (var "q")
t1,t2 :: FloatE
t1 = q + q
-- Was @q * (q + q)@, now @let a = q + q in a * a@. What happened?
t2 = t1 * t1
c1 = cse t1
t3a = sin q / cos q
-- let a = sin(q) in
-- let b = cos(q) in
-- b + a / b
--
t3 = cos q + t3a
-- cse => cos(q) + sin(q) / cos(q)
t3b = cq + sq / cq
where
cq = cos q
sq = sin q
-- cse => let x3 = cos(q) in x3 + sin(q) / x3
-- let a = cos(q) in
-- a - 1.0 / a
--
t4 = cos q - 1 / cos q
-- let a = cos(q) in
-- a * (a + sin(q) / a)
--
t5 = cos q * t3
-- let a = cos(q) in
-- (a + sin(q) / a) * (a - 1.0 / a)
--
t6 = t3 * t4
-- let a = cos(q) in
-- let b = sin(q) in
-- (a + b / a) * (a - 1.0 / a) + (a + b / a)
t7 = t6 + t3
-- let a = sin(q) in
-- a + (1.0 - a) * (a < 3.0 ? 4.0 : 5.0)
--
t8 = let a = sin q in a + (1 - a) * (ifE (a <* 3) 4 5)
-- q * sin(q)
r = q * sin q
-- let a = sin(q) in
-- a * (q * a)
s = sin q * r
-- let a = sin(q) in
-- let b = q * a in
-- b + a * b
t9a = r + s
-- let a = sin(q) in
-- let b = q * a in
-- a * b + b
t9b = s + r
w = Var (var "w") :: R2E
{-
bw :: BoolE -> Color
bw = boolean white clear
ra :: R2E -> Color
ra z = bw (z <.> z <* 1)
-}
stripes (a :+ _) = frac a <* 0.5
a1 :: FloatE
a1 = magnitudeSq (t *^ uv)
{-
a2 :: BoolE
a2 = uscale2 t udisk uv
a3 :: R4E
a3 = colorToR4 $ toColor (uscale2 (cos t) udisk uv)
-}
t :: FloatE
t = Var (var "t")
u,v :: FloatE
u = Var (var "u")
v = Var (var "v")
uv :: Point
uv = u :+ v
-------------
ts = [t1,t2,t3a,t3,t4,t5,t6,t8,t9a,t9b]
main = mapM_ (print.expr) ts
| sseefried/shady-gen | src/Shady/Play/CseTest.hs | agpl-3.0 | 2,553 | 0 | 11 | 718 | 575 | 340 | 235 | 45 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
module Test.Types where
import Control.Concurrent.STM
import Prelude hiding (min,max)
import qualified Prelude as Pr
import Data.RTree.Bounds
import Data.RTree.TStore
data Vec2F = Vec2F
{ x :: Float
, y :: Float
} deriving Show
data Box2F = Box2F
{ min :: Vec2F
, max :: Vec2F
}
instance Bounds (Maybe Box2F) where
type UnitsT (Maybe Box2F) = Float
empty = Nothing
(Just box1) `intersects` (Just box2) = not $
( (x.max$box1) < (x.min$box2) ||
(y.max$box1) < (y.min$box2) ||
(x.max$box2) < (x.min$box1) ||
(y.max$box2) < (y.min$box1) )
_ `intersects` _ = False
Nothing `contains` _ = False
_ `contains` Nothing = True
(Just box1) `contains` (Just box2) =
( (x.min$box1) < (x.min$box2) &&
(y.min$box1) < (y.min$box2) &&
(x.max$box1) > (x.max$box2) &&
(y.max$box1) > (y.max$box2) )
cover Nothing b = b
cover b Nothing = b
cover (Just box1) (Just box2) =
let minx = Pr.min (x.min$box1) (x.min$box2)
miny = Pr.min (y.min$box1) (y.min$box2)
maxx = Pr.max (x.max$box1) (x.max$box2)
maxy = Pr.max (y.max$box1) (y.max$box2)
in Just $ Box2F (Vec2F minx miny) (Vec2F maxx maxy)
size Nothing = 0
size (Just box) =
let lx = (x.max$box) - (x.min$box)
ly = (y.max$box) - (y.min$box)
in lx * ly
data Ship = Ship
{ pos :: Vec2F
, vel :: Vec2F
, rad :: Float
} deriving Show
instance Spatial Ship where
type BoundsT Ship = Maybe Box2F
bounds ship =
let minx = (x.pos$ship) - rad ship
miny = (y.pos$ship) - rad ship
maxx = (x.pos$ship) + rad ship
maxy = (y.pos$ship) + rad ship
in Just $ Box2F (Vec2F minx miny) (Vec2F maxx maxy)
data ActiveShip = ActiveShip
{ shipID :: Int
, shipVar :: TVar Ship
}
instance Eq ActiveShip where
as1 == as2 = (shipID as1 == shipID as2)
instance TStored Ship where
type KeyT Ship = ActiveShip
derefStore active = readTVar (shipVar active)
modStore active new = writeTVar (shipVar active) new
| johnpmayer/concurrent-rtree | Test/Types.hs | agpl-3.0 | 2,100 | 0 | 17 | 526 | 986 | 498 | 488 | 67 | 0 |
module GameServer.State where
import Control.Concurrent.STM
import Control.Monad.Except
import Control.Monad.State
import Control.Monad.Trans (MonadIO(..))
import Data.Aeson (FromJSON, ToJSON)
import qualified Data.Map as Map
import Data.Text (Text)
import GHC.Generics (Generic)
import GHC.Natural
import System.Random (StdGen, split)
import GameServer.Game
import GameServer.Player
import GameServer.Utils
data Games = Games { seed :: StdGen
, games :: Map.Map Id Game
, players :: Map.Map Text Player
}
initialState :: StdGen -> Games
initialState initSeed = Games initSeed mempty mempty
type GameState = TVar Games
withState ::
(MonadIO m) => GameState -> State Games a -> m a
withState st =
liftIO . atomically . stateTVar st . runState
data Command = JoinGame { gameId :: Id, pName :: Text }
deriving (Eq, Show, Generic, ToJSON, FromJSON)
data Event = GameCreated { gameId :: Id }
| PlayerJoined { gameId :: Id, playerName :: Text, playerKey :: Id }
| PlayerAlreadyJoinedGame { gameId :: Id, playerName :: Text }
| PlayerRegistered { registeredName :: Text }
| DuplicatePlayer { duplicateName :: Text }
| PlayerDoesNotExist { playerName :: Text }
| PlayerNotInGame { gameId :: Id, pKey :: Id }
| UnknownGame { gameId :: Id }
| CanStartPlaying { game :: Game, player :: PlayerState }
| WaitingForPlayers { gameId :: Id, pState :: PlayerState }
deriving (Eq, Show, Generic, ToJSON, FromJSON)
newtype GameError = GameError { reason :: Event }
deriving newtype (Eq, Show, ToJSON, FromJSON)
-- * Queries
listGames :: State Games [Game]
listGames = Map.elems <$> gets games
lookupGame :: Id -> State Games (Either GameError Game)
lookupGame gameId = maybe (Left $ GameError $ UnknownGame gameId) Right . Map.lookup gameId <$> gets games
listPlayers :: State Games [Player]
listPlayers = Map.elems <$> gets players
lookupPlayer :: Text -> State Games (Either GameError Player)
lookupPlayer pName = maybe (Left $ GameError $ PlayerDoesNotExist pName) Right . Map.lookup pName <$> gets players
-- * Commands
applyCommand :: Command -> State Games (Either GameError Event)
applyCommand JoinGame{gameId,pName} = runExceptT $ do
game <- ExceptT $ lookupGame gameId
when (hasJoinedGame pName game) $ throwError $ GameError $ PlayerAlreadyJoinedGame gameId pName
player <- ExceptT $ lookupPlayer pName
pkey <- lift mkRandomId
let game' = joinPlayer pName pkey game
modify' $ \ gs -> gs { games = Map.insert gameId game' (games gs) }
pure $ PlayerJoined gameId pName pkey
createGame :: Game -> State Games Event
createGame game = do
gs <- get
let gid = randomId (seed gs)
(_,newSeed) = split (seed gs)
put (gs { seed = newSeed
, games = Map.insert gid game (games gs)
})
pure $ GameCreated gid
registerPlayer :: Player -> State Games Event
registerPlayer player@Player{playerName} = do
gs <- get
case Map.lookup playerName (players gs) of
Just _ -> pure $ DuplicatePlayer playerName
Nothing -> do
put (gs { players = Map.insert playerName player (players gs) })
pure $ PlayerRegistered playerName
canStartGame :: Id -> Id -> State Games (Either GameError Event)
canStartGame gameId playerKey = runExceptT $ do
game <- ExceptT $ lookupGame gameId
case lookupPlayerState playerKey game of
Nothing -> throwError $ GameError $ PlayerNotInGame gameId playerKey
Just player -> if canStart game
then pure $ CanStartPlaying game player
else pure $ WaitingForPlayers gameId player
-- * Generic State functions
-- | Generate a new random Id from the current `Games` state seed
-- this function also updates the current seed
mkRandomId :: State Games Id
mkRandomId = do
gs <- get
let gid = randomId (seed gs)
(_,newSeed) = split (seed gs)
put gs { seed = newSeed}
pure gid
| abailly/acquire | game-server/src/GameServer/State.hs | apache-2.0 | 3,979 | 0 | 18 | 908 | 1,299 | 681 | 618 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
module Novelist.FSopsSpec where
-- base
import Control.Monad (void)
-- containers
import qualified Data.Map.Lazy as Map
-- Hspec
import Test.Hspec (Spec, describe, it, shouldBe, Expectation)
-- microlens-ghc
import Lens.Micro.GHC
-- filepath
import System.FilePath (splitPath, pathSeparator)
-- freer
import Control.Monad.Freer (Eff, Arr, run, handleRelayS)
-- (lib)
import Novelist.Types (
unFix2
, FSopsF(ListDir), dirPath, listDir
, DirectoryListing(DirectoryListing), _dirs, _files
)
-- (test)
import Novelist.Types.MockDirectoryTreeF (
MockDirectoryTree, mDirs, mFiles
, mkDir, mkFile, buildMockDir
, flattenMockDir
)
import Novelist.Types.FSTrace (
FSTrace(GetDirLst), dirLstQuery, dirLstAns, _dirLstQuery, _dirLstAns
)
{-# ANN module ("HLint: ignore Redundant do"::String) #-}
infix 1 `shouldMatchTrace`
shouldMatchTrace :: (Show a, Eq a) => a -> a -> Expectation
shouldMatchTrace = flip shouldBe
spec :: Spec
spec = do
describe "FSops computation" $ do
it "non-effectful computations are non-effectful" $ do
shouldMatchTrace []
. getTracesWithMock mockTree1 $ do
return ()
describe "listDir" $ do
describe "queries" $ do
it "allows asking for root path: ." $ do
shouldMatchTrace [Just "."]
. map (^? dirLstQuery)
. getTracesWithMock mockTree1 $ do
listDir "."
it "allows asking for subpath: d1" $ do
shouldMatchTrace [Just "d1"]
. map (^? dirLstQuery)
. getTracesWithMock mockTree1 $ do
listDir "d1"
it "allows asking for subpath: d1/d2" $ do
shouldMatchTrace [Just "d1/d2"]
. map (^? dirLstQuery)
. getTracesWithMock mockTree1 $ do
listDir "d1/d2"
it "allows asking for subpath with leading no-ops: ././d1" $ do
shouldMatchTrace [Just "d1"]
. map (^? dirLstQuery)
. getTracesWithMock mockTree1 $ do
listDir "././d1"
it "allows asking for non-existent subpaths: e1, e1/e2" $ do
shouldMatchTrace [Just "e1", Just "e1/e2"]
. map (^? dirLstQuery)
. getTracesWithMock mockTree1 $ do
void $ listDir "e1"
void $ listDir "e1/e2"
describe "responses" $ do
it "responds with valid ans for root path: ." $ do
shouldMatchTrace [ Just $ Just DirectoryListing { _dirs = ["d1", "d2"]
, _files = ["f1", "f2"]
}
]
. map (^? dirLstAns)
. getTracesWithMock mockTree1 $ do
listDir "."
it "responds with valid ans for subpaths: d1, d2/d3" $ do
shouldMatchTrace [ Just $ Just DirectoryListing { _dirs = []
, _files = ["f1", "f2"]
}
, Just $ Just DirectoryListing { _dirs = []
, _files = ["f1"]
}
]
. map (^? dirLstAns)
. getTracesWithMock mockTree1 $ do
void $ listDir "d1"
void $ listDir "d2/d3"
it "responds with empty ans for subpaths: d999, d1/d999, d2/d3/d999/d999" $ do
shouldMatchTrace [ Just Nothing
, Just Nothing
, Just Nothing
]
. map (^? dirLstAns)
. getTracesWithMock mockTree1 $ do
void $ listDir "d99"
void $ listDir "d1/d999"
void $ listDir "d2/d3/d999/d999"
mockTree1 :: MockDirectoryTree
mockTree1 =
buildMockDir "." $ do
mkFile "f1"
mkFile "f2"
mkDir "d1" $ do
mkFile "f1"
mkFile "f2"
mkDir "d2" $ do
mkFile "f1"
mkFile "f3"
mkDir "d3" $ do
mkFile "f1"
directoryListing :: MockDirectoryTree -> DirectoryListing
directoryListing theMock = DirectoryListing { _dirs = flattened ^. mDirs . to Map.keys
, _files = flattened ^. mFiles
}
where flattened = flattenMockDir theMock
queryMock :: String -> MockDirectoryTree -> Maybe DirectoryListing
queryMock query mock | query == "." = Just . directoryListing $ mock
| otherwise = directoryListing <$> deepQueryTree
where pthComponents :: [FilePath]
pthComponents = dropTailSeparator <$> splitPath query
dropTailSeparator = takeWhile (/= pathSeparator)
deepQueryTree :: Maybe MockDirectoryTree
deepQueryTree = foldl shallowQuery (Just mock) pthComponents
shallowQuery :: Maybe MockDirectoryTree -> FilePath -> Maybe MockDirectoryTree
shallowQuery Nothing _ = Nothing
shallowQuery (Just m) pth = m ^? unFix2 . mDirs . ix pth
getTracesWithMock :: MockDirectoryTree
-> Eff '[FSopsF] a
-> [FSTrace]
getTracesWithMock mockTree =
reverse . run . handleRelayS [] (\s _ -> pure s) go
where
go :: [FSTrace]
-> FSopsF v
-> ([FSTrace] -> Arr r v [FSTrace])
-> Eff r [FSTrace]
go traces x@ListDir{} q = q (lstTrace : traces) queryResult
where
lstTrace = GetDirLst { _dirLstQuery = x ^. dirPath
, _dirLstAns = queryResult
}
queryResult = queryMock (x ^. dirPath) mockTree
| kgadek/novelist | test/Novelist/FSopsSpec.hs | bsd-2-clause | 5,999 | 0 | 28 | 2,354 | 1,389 | 704 | 685 | -1 | -1 |
-- | __Warning: internal module!__ This means that the API may change
-- arbitrarily between versions without notice. Depending on this module may
-- lead to unexpected breakages, so proceed with caution!
--
-- This module provides debugging helpers for inspecting 'Doc's.
--
-- Use the @pretty-simple@ package to get a nicer layout for 'show'n
-- 'Diag's:
--
-- > > Text.Pretty.Simple.pPrintNoColor . diag $ align (vcat ["foo", "bar"])
-- > Column
-- > [
-- > ( 10
-- > , Nesting
-- > [
-- > ( 10
-- > , Cat ( Text 3 "foo" )
-- > ( Cat ( FlatAlt Line Empty ) ( Text 3 "bar" ) )
-- > )
-- > ]
-- > )
-- > ]
module Prettyprinter.Internal.Debug where
import Data.Text (Text)
import Prettyprinter.Internal (PageWidth, Doc)
import qualified Prettyprinter.Internal as Doc
-- | A variant of 'Doc' for debugging.
--
-- Unlike in the 'Doc' type, the 'Column', 'WithPageWidth' and 'Nesting'
-- constructors don't contain functions but are \"sampled\" to allow
-- simple inspection with 'show'.
data Diag ann =
Fail
| Empty
| Char !Char
| Text !Int !Text
| Line
| FlatAlt (Diag ann) (Diag ann)
| Cat (Diag ann) (Diag ann)
| Nest !Int (Diag ann)
| Union (Diag ann) (Diag ann)
| Column [(Int, Diag ann)]
-- ^ 'Doc': @(Int -> Diag ann)@
| WithPageWidth [(PageWidth, Diag ann)]
-- ^ 'Doc': @(PageWidth -> Diag ann)@
| Nesting [(Int, Diag ann)]
-- ^ 'Doc': @(Int -> Diag ann)@
| Annotated ann (Diag ann)
deriving Show
-- | Convert a 'Doc' to its diagnostic representation.
--
-- The functions in the 'Column', 'WithPageWidth' and 'Nesting' constructors are
-- sampled with some default values.
--
-- Use `diag'` to control the function inputs yourself.
--
-- >>> diag $ Doc.align (Doc.vcat ["foo", "bar"])
-- Column [(10,Nesting [(10,Cat (Text 3 "foo") (Cat (FlatAlt Line Empty) (Text 3 "bar")))])]
diag :: Doc ann -> Diag ann
diag = diag' [10] [Doc.defaultPageWidth] [10]
diag'
:: [Int]
-- ^ Cursor positions for the 'Column' constructor
-> [PageWidth]
-- ^ For 'WithPageWidth'
-> [Int]
-- ^ Nesting levels for 'Nesting'
-> Doc ann
-> Diag ann
diag' columns pageWidths nestings = go
where
go doc = case doc of
Doc.Fail -> Fail
Doc.Empty -> Empty
Doc.Char c -> Char c
Doc.Text l t -> Text l t
Doc.Line -> Line
Doc.FlatAlt a b -> FlatAlt (go a) (go b)
Doc.Cat a b -> Cat (go a) (go b)
Doc.Nest i d -> Nest i (go d)
Doc.Union a b -> Union (go a) (go b)
Doc.Column f -> Column (apply f columns)
Doc.WithPageWidth f -> WithPageWidth (apply f pageWidths)
Doc.Nesting f -> Nesting (apply f nestings)
Doc.Annotated ann d -> Annotated ann (go d)
apply :: (a -> Doc ann) -> [a] -> [(a, Diag ann)]
apply f = map (\x -> (x, go (f x)))
| quchen/prettyprinter | prettyprinter/src/Prettyprinter/Internal/Debug.hs | bsd-2-clause | 2,963 | 0 | 13 | 836 | 679 | 370 | 309 | 52 | 13 |
module Drasil.GlassBR.Body where
import Control.Lens ((^.))
import Language.Drasil hiding (organization)
import qualified Drasil.SRS as SRS
import Drasil.DocumentLanguage (AppndxSec(..), AuxConstntSec(..),
DocSection(..), GSDSec(GSDProg2), GSDSub(UsrChars, SystCons), --DocSection uses everything but Verbatim
IntroSec(IntroProg), IntroSub(IChar, IOrgSec, IPurpose, IScope), LCsSec(..),
UCsSec(..), RefSec(RefProg), RefTab(TAandA, TUnits), ReqrmntSec(..),
ReqsSub(FReqsSub, NonFReqsSub), ScpOfProjSec(ScpOfProjProg), SSDSec(SSDVerb),
StkhldrSec(StkhldrProg2), StkhldrSub(Client, Cstmr),
TraceabilitySec(TraceabilityProg), TSIntro(SymbOrder, TSPurpose), DocDesc,
mkDoc, mkRequirement, tsymb)
import Drasil.DocumentLanguage.Definitions (Field
(DefiningEquation, Description, RefBy, Source), InclUnits(IncludeUnits),
Verbosity(Verbose), Fields)
import Drasil.DocumentLanguage.RefHelpers (cite)
import Data.Drasil.Concepts.Computation (computerApp, inParam,
computerLiteracy, inValue, inQty)
import Data.Drasil.Concepts.Documentation as Doc (analysis, appendix, aspect,
characteristic, class_, code, condition, constraint, content,
datum, definition, description, document, emphasis, endUser, failure,
figure, goal, implementation, information, interface, input_, item,
message, model, organization, output_, problem, purpose,
quantity, reference, reviewer, section_, software, standard,
symbol_, system, template, term_, theory, traceyMatrix, user, value,
physicalSystem, datumConstraint, userInput, assumption, dataDefn,
goalStmt, inModel, likelyChg, physSyst, requirement, srs, thModel,
dataConst, company)
import Data.Drasil.Concepts.Education (secondYear, undergradDegree,
civilEng, structuralEng, scndYrCalculus, structuralMechanics)
import Data.Drasil.Concepts.Math (graph, calculation, probability,
parameter)
import Data.Drasil.Concepts.PhysicalProperties (dimension)
import Data.Drasil.Concepts.Physics (distance)
import Data.Drasil.Concepts.Software (correctness, verifiability,
understandability, reusability, maintainability, portability,
performance, errMsg)
import Data.Drasil.Concepts.Thermodynamics (degree_')
import Data.Drasil.SentenceStructures (acroR, sVersus, sAnd, foldlSP,
foldlSent, foldlSent_, figureLabel, foldlList, showingCxnBw,
foldlsC, sOf, followA, ofThe, sIn, isThe, isExpctdToHv, sOr, underConsidertn,
tAndDWAcc, tAndDOnly, tAndDWSym, andThe)
import Data.Drasil.Software.Products (sciCompS)
import Data.Drasil.Utils (getES, makeTMatrix, makeListRef, itemRefToSent,
refFromType, enumSimple, enumBullet, prodUCTbl)
import Drasil.GlassBR.Assumptions (assumptionConstants, assumptionDescs,
gbRefDB, newAssumptions)
import Drasil.GlassBR.Changes (likelyChanges_SRS, unlikelyChanges_SRS)
import Drasil.GlassBR.Concepts (aR, lShareFac, gLassBR, stdOffDist, glaSlab,
blastRisk, glass, glaPlane, glassBRProg, ptOfExplsn, acronyms)
import Drasil.GlassBR.DataDefs (dataDefns, gbQDefns, hFromt, strDisFac, nonFL,
dimLL, glaTyFac, tolStrDisFac, tolPre, risk)
import Drasil.GlassBR.IMods (iModels, calOfCap, calOfDe, probOfBr)
import Drasil.GlassBR.ModuleDefs (allMods)
import Drasil.GlassBR.References (rbrtsn2012)
import Drasil.GlassBR.Symbols (this_symbols)
import Drasil.GlassBR.TMods (tModels, t1SafetyReq, t2SafetyReq)
import Drasil.GlassBR.Unitals (stressDistFac, aspectR, dimlessLoad,
lateralLoad, sflawParamM, char_weight, sD, demand, demandq,
aspectRWithEqn, aspectR, lRe, wtntWithEqn, sdWithEqn, prob_br, notSafe,
safeMessage, is_safe1, is_safe2, plate_width, plate_len, blast, glassTy,
gbInputDataConstraints, explosion, pb_tol, blast, bomb, blastTy,
glassGeo, glass_type, nom_thick, sdx, sdy, sdz, tNT, gBRSpecParamVals,
loadTypes, load, glassTypes, probBreak, termsWithAccDefn, termsWithDefsOnly,
gbConstants, gbConstrained, gbOutputs, gbInputs, glBreakage, capacity,
constant_LoadDF)
import Drasil.Sections.ReferenceMaterial (intro)
import Drasil.Sections.SpecificSystemDescription (solChSpecF,
inDataConstTbl, outDataConstTbl, dataConstraintUncertainty, goalStmtF,
physSystDesc, termDefnF, probDescF, specSysDesF)
import Drasil.Sections.TraceabilityMandGs (traceGIntro)
import Data.Drasil.Citations (koothoor2013, smithLai2005)
import Data.Drasil.People (spencerSmith, nikitha, mCampidelli)
import Data.Drasil.Phrase(for'')
import Data.Drasil.SI_Units (kilogram, metre, millimetre, newton, pascal,
second)
{--}
gbSymbMap :: ChunkDB
gbSymbMap =
cdb this_symbols (map nw acronyms ++ map nw this_symbols) ([] :: [ConceptChunk])
(map unitWrapper [metre, second, kilogram] ++ map unitWrapper [pascal, newton])
resourcePath :: String
resourcePath = "../../../datafiles/GlassBR/"
glassBR_srs :: Document
glassBR_srs = mkDoc mkSRS (for'' titleize phrase) glassSystInfo
mkSRS :: DocDesc
mkSRS = RefSec (RefProg intro [TUnits, tsymb [TSPurpose, SymbOrder], TAandA]) :
IntroSec (
IntroProg (startIntro software blstRskInvWGlassSlab gLassBR) (short gLassBR)
[IPurpose (purpose_intro_p1 document gLassBR glaSlab),
IScope incScoR endScoR,
IChar (rdrKnldgbleIn glBreakage blastRisk) undIR appStanddIR,
IOrgSec intendedReaderIntro dataDefn SRS.dataDefn intendedReaderIntro_end]):
StkhldrSec (StkhldrProg2 [Client gLassBR (S "a" +:+ phrase company +:+
S "named Entuitive. It is developed by Dr." +:+ (S $ name mCampidelli)),
Cstmr gLassBR]) :
GSDSec (GSDProg2
[UsrChars [user_chars_bullets endUser gLassBR secondYear undergradDegree
civilEng structuralEng glBreakage blastRisk], SystCons [] []]) :
ScpOfProjSec (ScpOfProjProg (short gLassBR) (prod_use_case_table)
(indiv_prod_use_case (glaSlab) (capacity) (demandq) (probability))) :
SSDSec (SSDProg [SSDProblem (
PDProg start gLassBR ending [terms_defs, phys_sys_desc, goals])],
[ SSDSolChSpec (
SCSProg
[ Assumptions
, TMs ([Label] ++ stdFields) [t1IsSafe]
, GDs [] [] HideDerivation -- No Gen Defs for GlassBR
, DDs ([Label, Symbol, Units] ++ stdFields) dataDefns ShowDerivation
, IMs ([Label, Input, Output, InConstraints, OutConstraints] ++
stdFields) [probOfBreak, testIMFromQD] HideDerivation
]
)
]
) :
ReqrmntSec (ReqsProg [
FReqsSub func_reqs_list,
NonFReqsSub [performance] (gBRpriorityNFReqs)
(S "This problem is small in size and relatively simple")
(S "Any reasonable" +:+ phrase implementation +:+.
(S "will be very quick" `sAnd` S "use minimal storage"))]) :
LCsSec (LCsProg likely_change_list) :
UCsSec (UCsProg unlikely_change_list) :
TraceabilitySec
(TraceabilityProg traceyMatrices [trace_table1Desc, trace_table2Desc, trace_table3Desc]
(traceyMatrices ++ trace_intro2 ++ traceyGraphs) []) :
AuxConstntSec (AuxConsProg gLassBR auxiliaryConstants) :
Bibliography :
AppndxSec (AppndxProg [appendix_intro, fig_5, fig_6]) : []
stdFields :: Fields
stdFields = [DefiningEquation, Description Verbose IncludeUnits, Source, RefBy]
glassSystInfo :: SystemInformation
glassSystInfo = SI {
_sys = glassBRProg,
_kind = srs,
_authors = [nikitha, spencerSmith],
_units = map unitWrapper [metre, second, kilogram] ++ map unitWrapper [pascal, newton],
_quants = this_symbols,
_concepts = [] :: [DefinedQuantityDict],
_definitions = dataDefns ++
(map (relToQD gbSymbMap) iModels {-[RelationConcept]-}) ++
(map (relToQD gbSymbMap) tModels {-[RelationConcept]-}) ++
[wtntWithEqn, sdWithEqn], -- wtntWithEqn is defined in Unitals but only appears
-- in the description of the Calculation of Demand instance model;
-- should this be included as a Data Definition?
-- (same for sdWithEqn)
_inputs = map qw gbInputs,
_outputs = map qw gbOutputs,
_defSequence = gbQDefns,
_constraints = gbConstrained,
_constants = gbConstants,
_sysinfodb = gbSymbMap,
_refdb = gbRefDB
}
--FIXME: All named ideas, not just acronyms.
testIMFromQD :: InstanceModel
testIMFromQD = imQD gbSymbMap risk EmptyS [] [] "riskFun" --shortname
glassBR_code :: CodeSpec
glassBR_code = codeSpec glassSystInfo allMods
spec_sys_desc, prob_desc, terms_defs, phys_sys_desc, goals, sol_chars_spec :: Section
prod_use_case_table,
phys_sys_desc_list, sol_chars_spec_intro, data_consts_table1,
data_consts_table2, trace_table1,
trace_table2, trace_table3, appendix_intro,
fig_glassbr, fig_2, fig_3, fig_4, fig_5,
fig_6 :: Contents
func_reqs_list, trace_intro2 :: [Contents]
--------------------------------------------------------------------------------
terms_defs_bullets :: Contents
terms_defs_bullets = Enumeration $ (Numeric $
map tAndDOnly termsWithDefsOnly
++
terms_defs_bullets_glTySubSec
++
terms_defs_bullets_loadSubSec
++
map tAndDWAcc termsWithAccDefn
++
[tAndDWSym probBreak prob_br])
--FIXME: merge? Needs 2 arguments because there is no instance for (SymbolForm ConceptChunk)...
terms_defs_bullets_glTySubSec, terms_defs_bullets_loadSubSec :: [ItemType]
terms_defs_bullets_glTySubSec = [Nested (((titleize glassTy) :+: S ":"))
(Bullet $ map tAndDWAcc glassTypes)]
terms_defs_bullets_loadSubSec = [Nested (((at_start load) :+: S ":"))
(Bullet $ map tAndDWAcc (take 2 loadTypes)
++
map tAndDOnly (drop 2 loadTypes))]
--Used in "Goal Statements" Section--
goals_list :: Contents
goals_list = enumSimple 1 (short goalStmt) goals_list_goalStmt1
--Used in "Traceability Matrices and Graphs" Section--
traceyMatrices, traceyGraphs :: [Contents]
traceyMatrices = [trace_table1, trace_table2, trace_table3]
traceyGraphs = [fig_2, fig_3, fig_4]
solChSpecSubsections :: [CI]
solChSpecSubsections = [thModel, inModel, dataDefn, dataConst]
--Used in "Values of Auxiliary Constants" Section--
auxiliaryConstants :: [QDefinition]
auxiliaryConstants = assumptionConstants ++ gBRSpecParamVals
--Used in "Functional Requirements" Section--
requiredInputs :: [QuantityDict]
requiredInputs = (map qw [plate_len, plate_width, char_weight])
++ (map qw [pb_tol, tNT]) ++ (map qw [sdx, sdy, sdz])
++ (map qw [glass_type, nom_thick])
func_reqs_req6_pulledList :: [QDefinition]
func_reqs_req6_pulledList = [nonFL, glaTyFac, dimLL, tolPre,
tolStrDisFac, strDisFac, hFromt]
--Used in "Non-Functional Requirements" Section--
gBRpriorityNFReqs :: [ConceptChunk]
gBRpriorityNFReqs = [correctness, verifiability, understandability,
reusability, maintainability, portability]
--------------------------------------------------------------------------------
{--INTRODUCTION--}
startIntro :: NamedChunk -> Sentence -> CI -> Sentence
startIntro prgm sfwrPredicts progName = foldlSent [
at_start prgm, S "is helpful to efficiently" `sAnd` S "correctly predict the"
+:+. sfwrPredicts, underConsidertn blast,
S "The", phrase prgm `sC` S "herein called", short progName,
S "aims to predict the", sfwrPredicts, S "using an intuitive",
phrase interface]
rdrKnldgbleIn :: (NamedIdea n, NamedIdea n1) => n1 -> n -> Sentence
rdrKnldgbleIn undrstd1 undrstd2 = (phrase theory +:+ S "behind" +:+
phrase undrstd1 `sAnd` phrase undrstd2)
undIR, appStanddIR, incScoR, endScoR :: Sentence
undIR = foldlList [phrase scndYrCalculus, phrase structuralMechanics,
plural computerApp `sIn` phrase civilEng]
appStanddIR = foldlSent [S "In addition" `sC` plural reviewer,
S "should be familiar with the applicable", plural standard,
S "for constructions using glass from",
sSqBr (S "4-6" {-astm_LR2009, astm_C1036, astm_C1048-}) `sIn`
(makeRef (SRS.reference SRS.missingP []))]
incScoR = foldl (+:+) EmptyS [S "getting all", plural inParam,
S "related to the", phrase glaSlab `sAnd` S "also the", plural parameter,
S "related to", phrase blastTy]
endScoR = foldl (+:+) EmptyS [S "use the", plural datum `sAnd`
S "predict whether the", phrase glaSlab, S "is safe to use" `sOr`
S "not"]
{--Purpose of Document--}
purpose_intro_p1 :: NamedChunk -> CI -> NamedChunk -> Sentence
purpose_intro_p1 typeOf progName gvnVar = foldlSent [S "The main", phrase purpose,
S "of this", phrase typeOf, S "is to predict whether a given", phrase gvnVar,
S "is likely to resist a specified" +:+. phrase blast, S "The", plural Doc.goal
`sAnd` plural thModel, S "used in the", short progName, phrase code,
S "are provided" `sC` S "with an", phrase emphasis,
S "on explicitly identifying", (plural assumption) `sAnd` S "unambiguous" +:+.
plural definition, S "This", phrase typeOf, S "is intended to be used as a",
phrase reference, S "to provide all", phrase information,
S "necessary to understand" `sAnd` S "verify the" +:+. phrase analysis,
S "The", short srs, S "is abstract because the", plural content, S "say what",
phrase problem, S "is being solved" `sC` S "but not how to solve it"]
--FIXME: Last sentence is also present in SWHS and NoPCM... pull out?
{--Scope of Requirements--}
{--Organization of Document--}
intendedReaderIntro_end, intendedReaderIntro :: Sentence
intendedReaderIntro = foldlSent [S "The", phrase organization, S "of this",
phrase document, S "follows the", phrase template, S "for an", short srs,
S "for", phrase sciCompS, S "proposed by" +:+ cite gbRefDB koothoor2013
`sAnd` cite gbRefDB smithLai2005 `sC` S "with some",
plural aspect, S "taken from Volere", phrase template,
S "16", cite gbRefDB rbrtsn2012]
intendedReaderIntro_end = foldl (+:+) EmptyS [(at_start' $ the dataDefn),
S "are used to support", (plural definition `ofThe` S "different"),
plural model]
{--STAKEHOLDERS--}
{--The Client--}
{--The Customer--}
{--GENERAL SYSTEM DESCRIPTION--}
{--User Characteristics--}
user_chars_bullets :: (NamedIdea n1, NamedIdea n, NamedIdea n2, NamedIdea n3,
NamedIdea n4, NamedIdea n5, Idea c, NamedIdea n6) =>
n6 -> c -> n5 -> n4 -> n3 -> n2 -> n1 -> n -> Contents
user_chars_bullets intendedIndvdl progName yr degreeType prog1 prog2 undrstd1 undrstd2
= enumBullet [foldlSent [(phrase intendedIndvdl `sOf` short progName)
`isExpctdToHv` S "completed at least", (S "equivalent" `ofThe` (phrase yr)),
S "of an", phrase degreeType `sIn` phrase prog1 `sOr` phrase prog2],
(phrase intendedIndvdl `isExpctdToHv` S "an understanding of" +:+.
rdrKnldgbleIn (undrstd1) (undrstd2)), foldlSent [phrase intendedIndvdl
`isExpctdToHv` S "basic", phrase computerLiteracy, S "to handle the",
phrase software]]
{--System Constraints--}
{--SCOPE OF THE PROJECT-}
{--Product Use Case Table--}
prod_use_case_table = prodUCTbl [prod_use_case_table_UC1, prod_use_case_table_UC2]
prod_use_case_table_UC1, prod_use_case_table_UC2 :: [Sentence]
prod_use_case_table_UC1 = [titleize user, titleize' characteristic +:+ S "of the"
+:+ phrase glaSlab `sAnd` S "of the" +:+. phrase blast +:+ S "Details in"
+:+ makeRef (SRS.indPRCase SRS.missingP [])]
prod_use_case_table_UC2 = [short gLassBR, S "Whether" `sOr` S "not the" +:+
phrase glaSlab +:+ S "is safe for the" +:+ S "calculated" +:+ phrase load
`sAnd` S "supporting calculated" +:+ plural value]
{--Individual Product Use Case--}
indiv_prod_use_case :: NamedChunk -> ConceptChunk -> ConceptChunk -> ConceptChunk ->
Contents
indiv_prod_use_case mainObj compare1 compare2 factorOfComparison =
foldlSP [S "The", phrase user, S "provides the", plural input_, S "to",
short gLassBR, S "for use within the" +:+. phrase analysis,
S "There are two main", plural class_, S "of" +: plural input_ +:+.
(phrase glassGeo `sAnd` phrase blastTy), S "The", phrase glassGeo, S "based",
plural input_, S "include" +:+. (phrase glassTy `sAnd` plural dimension `ofThe`
phrase glaPlane), blastTy ^. defn, S "These", plural parameter, S "describe"
+:+. (phrase char_weight `sAnd` S "stand off blast"), S "Another",
phrase input_, S "the", phrase user, S "gives is the tolerable" +:+.
(phrase value `sOf` phrase prob_br)
+:+
short gLassBR, plural output_, S "if the", phrase mainObj,
S "will be safe by comparing whether", phrase compare1, S "is greater than"
+:+. phrase compare2, (at_start compare1 `isThe` (compare1 ^. defn))
`sAnd` (phrase compare2 `isThe` phrase requirement) +:+.
(S "which" `isThe` (compare2 ^. defn)), S "The second", phrase condition,
S "is to check whether the calculated", phrase factorOfComparison,
sParen (getES prob_br), S "is less than the tolerable",
phrase factorOfComparison, sParen (getES pb_tol),
S "which is obtained from the", phrase user, S "as an" +:+. phrase input_,
S "If both", plural condition, S "return true then it's shown that the",
phrase mainObj, S "is safe to use" `sC`
S "else if both return false then the", phrase mainObj +:+.
S "is considered unsafe", S "All the supporting calculated", plural value,
S "are also displayed as", phrase output_]
{--SPECIFIC SYSTEM DESCRIPTION--}
spec_sys_desc = specSysDesF (S "and" +:+ plural definition) [prob_desc, sol_chars_spec]
{--PROBLEM DESCRIPTION--}
start, ending :: Sentence
start = foldlSent [S "A", phrase system,
S "is needed to efficiently" `sAnd` S "correctly predict the",
phrase blastRisk +:+ S "involved with the glass"]
ending = foldl (+:+) EmptyS [S "interpret the", plural input_,
S "to give out the", plural output_,
S "which predicts whether the", phrase glaSlab,
S "can withstand the", phrase blast, S "under the",
plural condition]
prob_desc = probDescF start gLassBR ending [terms_defs, phys_sys_desc, goals]
{--Terminology and Definitions--}
terms_defs = termDefnF (Just (S "All" `sOf` S "the" +:+ plural term_ +:+
S "are extracted from" +:+ (sSqBrNum 4 {-astm_LR2009-}) `sIn`
(makeRef (SRS.reference SRS.missingP [])))) [terms_defs_bullets]
{--Physical System Description--}
phys_sys_desc = physSystDesc (short gLassBR) (fig_glassbr) [phys_sys_desc_list, fig_glassbr]
fig_glassbr = fig (at_start $ the physicalSystem) (resourcePath ++ "physicalsystimage.png")
"physSystImage"
phys_sys_desc_list = enumSimple 1 (short physSyst) phys_sys_desc_list_physys
--"Dead" knowledge?
phys_sys_desc_list_physys :: [Sentence]
phys_sys_desc_list_physys1 :: Sentence
phys_sys_desc_list_physys2 :: NamedIdea n => n -> Sentence
phys_sys_desc_list_physys = [phys_sys_desc_list_physys1, phys_sys_desc_list_physys2 (ptOfExplsn)]
phys_sys_desc_list_physys1 = at_start glaSlab
phys_sys_desc_list_physys2 imprtntElem = foldlSent [S "The"
+:+. phrase imprtntElem, S "Where the", phrase bomb `sC`
S "or", (blast ^. defn) `sC` S "is located. The", phrase sD
`isThe` phrase distance, S "between the", phrase imprtntElem `sAnd`
S "the glass"]
{--Goal Statements--}
goals = goalStmtF [foldlList [plural dimension `ofThe` phrase glaPlane,
phrase glassTy, plural characteristic `ofThe` phrase explosion,
S "the" +:+ phrase pb_tol]] [goals_list]
goals_list_goalStmt1 :: [Sentence]
goals_list_goalStmt1 = [foldlSent [S "Analyze" `sAnd` S "predict whether",
S "the", phrase glaSlab, S "under consideration will be able to withstand",
S "the", phrase explosion `sOf` S "a certain", phrase degree_',
S "which is calculated based on", phrase userInput]]
{--SOLUTION CHARACTERISTICS SPECIFICATION--}
sol_chars_spec = solChSpecF gLassBR (prob_desc, (SRS.likeChg SRS.missingP []), (SRS.unlikeChg SRS.missingP [])) EmptyS
(EmptyS, dataConstraintUncertainty, end)
(assumps_list, map reldefn tModels, [], map datadefn dataDefns,
map reldefn iModels,
[data_consts_table1, data_consts_table2]) []
where
end = foldlSent [(makeRef (SRS.valsOfAuxCons SRS.missingP [])),
S "gives", (plural value `ofThe` S "specification"),
plural parameter, S "used in", (makeRef data_consts_table1)]
+:+ data_consts_intro2
sol_chars_spec_intro = foldlSP [S "This", phrase section_, S "explains all the",
plural assumption, S "considered" `sAnd` S "the", plural thModel,
S "which are supported by the", plural dataDefn]
{--Assumptions--}
assumps_list :: [Contents]
assumps_list = assumpList newAssumptions
assumpList :: [AssumpChunk] -> [Contents]
assumpList = map Assumption
assumptions :: [Contents] -- FIXME: Remove this entirely and use new refs + docLang.
assumptions = fst (foldr (\s (ls, n) -> ((Assumption $ assump ("A" ++ show n) s ("A" ++ show n)) : ls, n-1))
([], (length assumptionDescs)::Int) assumptionDescs)
-- These correspond to glassTyAssumps, glassCondition, explsnScenario,
-- standardValues, glassLiteAssmp, bndryConditions, responseTyAssump, ldfConstant
{--Theoretical Models--}
{--Data Definitions--}
{--Data Constraints--}
{-input and output tables-}
data_consts_table1 = inDataConstTbl gbInputDataConstraints
data_consts_table2 = outDataConstTbl [prob_br]
data_consts_intro2 :: Sentence
data_consts_intro2 = foldlSent [makeRef data_consts_table2, S "shows the",
plural constraint, S "that must be satisfied by the", phrase output_]
{--REQUIREMENTS--}
{--Functional Requirements--}
func_reqs_list = (func_reqs_listOfReqs) ++ func_reqs_req6 ++ [func_reqs_req1Table]
func_reqs_req1, func_reqs_req2, func_reqs_req3, func_reqs_req4, func_reqs_req5 :: Contents
req1Desc, req2Desc, req3Desc, req4Desc :: Sentence
req5Desc :: NamedChunk -> Sentence
func_reqs_req6 :: [Contents] --FIXME: Issue #327
func_reqs_listOfReqs :: [Contents]
func_reqs_listOfReqs = [func_reqs_req1, func_reqs_req2, func_reqs_req3, func_reqs_req4, func_reqs_req5]
func_reqs_req1 = mkRequirement "func_reqs_req1" req1Desc "Input-Glass-Props"
func_reqs_req2 = mkRequirement "func_reqs_req2" req2Desc "System-Set-Values-Following-Assumptions"
func_reqs_req3 = mkRequirement "func_reqs_req3" req3Desc "Check-Input-with-Data_Constraints"
func_reqs_req4 = mkRequirement "func_reqs_req4" req4Desc "Output-Values-and-Known-Quantities"
func_reqs_req5 = mkRequirement "func_reqs_req5" (req5Desc (output_)) "Check-Glass-Safety"
req1Desc = foldlSent [at_start input_, S "the", plural quantity, S "from",
makeRef func_reqs_req1Table `sC` S "which define the", phrase glass,
plural dimension `sC` (glassTy ^. defn) `sC` S "tolerable",
phrase probability `sOf` phrase failure, S "and",
(plural characteristic `ofThe` phrase blast), S "Note:",
getES plate_len `sAnd` getES plate_width,
S "will be input in terms of", plural millimetre `sAnd`
S "will be converted to the equivalent value in", plural metre]
func_reqs_req1Table :: Contents
func_reqs_req1Table = Table
[at_start symbol_, at_start description, S "Units"]
(mkTable
[getES,
at_start, unit'2Contents] requiredInputs)
(S "Required Inputs following R1") True "R1ReqInputs"
req2Desc = foldlSent [S "The", phrase system,
S "shall set the known", plural value +: S "as follows",
foldlList [(foldlsC (map getES (take 4 assumptionConstants)) `followA` 4),
((getES constant_LoadDF) `followA` 8), (short lShareFac `followA` 5),
(getES hFromt) +:+ sParen (S "from" +:+ (makeRef hFromt)),
(getES glaTyFac) +:+ sParen (S "from" +:+ (makeRef glaTyFac))]]
--ItemType
{-func_reqs_req2 = (Nested (S "The" +:+ phrase system +:+
S "shall set the known" +:+ plural value +: S "as follows")
(Bullet $ map Flat
[foldlsC (map getS (take 4 assumptionConstants)) `followA` 4,
(getS loadDF) `followA` 8,
short lShareFac `followA` 5]))
-}
--FIXME:should constants, LDF, and LSF have some sort of field that holds
-- the assumption(s) that're being followed? (Issue #349)
req3Desc = foldlSent [S "The", phrase system, S "shall check the entered",
plural inValue, S "to ensure that they do not exceed the",
plural datumConstraint, S "mentioned in" +:+. makeRef
(SRS.datCon SRS.missingP []), S "If any" `sOf` S "the", plural inParam,
S "is out" `sOf` S "bounds" `sC` S "an", phrase errMsg, S "is displayed"
`andThe` plural calculation, S "stop"]
req4Desc = foldlSent [titleize output_, S "the", plural inQty,
S "from", acroR 1 `andThe` S "known", plural quantity,
S "from", acroR 2]
req5Desc cmd = foldlSent_ [S "If", (getES is_safe1) `sAnd` (getES is_safe2),
sParen (S "from" +:+ (makeRef (reldefn t1SafetyReq))
`sAnd` (makeRef (reldefn t2SafetyReq))), S "are true" `sC`
phrase cmd, S "the", phrase message, Quote (safeMessage ^. defn),
S "If the", phrase condition, S "is false, then", phrase cmd,
S "the", phrase message, Quote (notSafe ^. defn)]
testing :: [QuantityDict]
testing = qw prob_br : qw lRe : qw demand : [] -- all different types!
testing1 :: [RelationConcept]
testing1 = [probOfBr, calOfCap, calOfDe]
--FIXME: rename or find better implementation?
func_reqs_req6 = [(Enumeration $ Simple $ [(acroR 6, Nested (titleize output_ +:+
S "the following" +: plural quantity)
(Bullet $
map (\(a, d) -> Flat $ (at_start a) +:+ sParen (getES a) +:+
sParen (makeRef (reldefn d))) (zip testing testing1)
++
map (\d -> Flat $ (at_start d) +:+ sParen (getES d) +:+
sParen (makeRef (datadefn d))) func_reqs_req6_pulledList
++
[Flat $ (titleize aspectR) +:+ sParen (getES aspectR) +:+
E (aspectRWithEqn^.equat)]
))])]
{--Nonfunctional Requirements--}
{--LIKELY CHANGES--}
likely_change_list :: [Contents]
likely_change_list = likelyChanges_SRS
{--UNLIKELY CHANGES--}
unlikely_change_list :: [Contents]
unlikely_change_list = unlikelyChanges_SRS
{--TRACEABLITY MATRICES AND GRAPHS--}
trace_table1Desc :: Sentence
trace_table1Desc = foldlList (map plural (take 3 solChSpecSubsections)) +:+.
S "with each other"
trace_table2Desc :: Sentence
trace_table2Desc = plural requirement +:+ S "on" +:+. foldlList
(map plural solChSpecSubsections)
trace_table3Desc :: Sentence
trace_table3Desc = foldlsC (map plural (take 3 solChSpecSubsections)) `sC`
plural likelyChg `sAnd` plural requirement +:+ S "on the" +:+
plural assumption
trace_theorys, trace_instaModel, trace_dataDef, trace_data, trace_funcReq, trace_assump,
trace_likelyChg :: [String]
trace_theorysRef, trace_instaModelRef, trace_dataDefRef, trace_dataRef, trace_funcReqRef,
trace_assumpRef, trace_likelyChgRef :: [Sentence]
trace_theorys = ["T1", "T2"]
trace_theorysRef = map (refFromType Theory) tModels
trace_instaModel = ["IM1", "IM2", "IM3"]
trace_instaModelRef = map (refFromType Theory) iModels
trace_dataDef = ["DD1", "DD2", "DD3", "DD4", "DD5", "DD6", "DD7", "DD8"]
trace_dataDefRef = map (refFromType Data) dataDefns
trace_data = ["Data Constraints"]
trace_dataRef = [makeRef (SRS.datCon SRS.missingP [])]
trace_funcReq = ["R1", "R2", "R3", "R4", "R5", "R6"]
trace_funcReqRef = makeListRef trace_funcReq (SRS.funcReq SRS.missingP [])
trace_assump = ["A1", "A2", "A3", "A4", "A5", "A6", "A7", "A8"]
trace_assumpRef = makeListRef trace_assump (SRS.assumpt SRS.missingP [])
trace_likelyChg = ["LC1", "LC2", "LC3", "LC4", "LC5"]
trace_likelyChgRef = makeListRef trace_likelyChg (SRS.likeChg SRS.missingP [])
trace_row_t1 :: [String]
trace_row_t1 = trace_theorys ++ trace_instaModel ++ trace_dataDef
-- The headers for the first row, and column
trace_row_header_t1 :: [Sentence]
trace_row_header_t1 = zipWith itemRefToSent trace_row_t1 (trace_theorysRef ++
trace_instaModelRef ++ trace_dataDefRef)
-- list of columns and their rows for traceability matrix
trace_columns_t1 :: [[String]]
trace_columns_t1 = [trace_t1_T1, trace_t1_T2, trace_t1_IM1, trace_t1_IM2, trace_t1_IM3,
trace_t1_DD1, trace_t1_DD2, trace_t1_DD3, trace_t1_DD4, trace_t1_DD5, trace_t1_DD6, trace_t1_DD7,
trace_t1_DD8]
trace_t1_T1, trace_t1_T2, trace_t1_IM1, trace_t1_IM2, trace_t1_IM3, trace_t1_DD1, trace_t1_DD2,
trace_t1_DD3, trace_t1_DD4, trace_t1_DD5, trace_t1_DD6, trace_t1_DD7, trace_t1_DD8 :: [String]
-- list of each item that "this" item requires for traceability matrix
trace_t1_T1 = ["T2", "IM1"]
trace_t1_T2 = ["T1", "IM2", "IM3"]
trace_t1_IM1 = ["DD1", "DD2", "DD3"]
trace_t1_IM2 = ["DD4", "DD5"]
trace_t1_IM3 = []
trace_t1_DD1 = []
trace_t1_DD2 = []
trace_t1_DD3 = ["DD6"]
trace_t1_DD4 = ["DD2", "DD6"]
trace_t1_DD5 = []
trace_t1_DD6 = ["IM3", "DD2", "DD5"]
trace_t1_DD7 = ["DD8"]
trace_t1_DD8 = ["DD2"]
trace_table1 = Table (EmptyS:trace_row_header_t1)
(makeTMatrix trace_row_header_t1 trace_columns_t1 trace_row_t1)
(showingCxnBw (traceyMatrix)
(titleize' item +:+ S "of Different" +:+ titleize' section_)) True "TraceyItemSecs"
--
trace_row_t2 :: [String]
trace_row_t2 = trace_row_t1 ++ trace_data ++ trace_funcReq
trace_row_header_t2, trace_col_header_t2 :: [Sentence]
trace_row_header_t2 = trace_row_header_t1 ++
(zipWith itemRefToSent (trace_data ++ trace_funcReq) (trace_dataRef ++ trace_funcReqRef))
trace_col_header_t2 = zipWith (\x y -> (S x) +:+ (sParen (S "in" +:+ y)))
trace_funcReq trace_funcReqRef
trace_t2_r1, trace_t2_r2, trace_t2_r3, trace_t2_r4, trace_t2_r5,
trace_t2_r6 :: [String]
trace_columns_t2 :: [[String]]
trace_columns_t2 = [trace_t2_r1, trace_t2_r2, trace_t2_r3, trace_t2_r4, trace_t2_r5, trace_t2_r6]
trace_t2_r1 = []
trace_t2_r2 = []
trace_t2_r3 = ["Data Constraints"]
trace_t2_r4 = ["R1", "R2"]
trace_t2_r5 = ["T1", "T2"]
trace_t2_r6 = ["IM1", "IM2", "IM3", "DD2", "DD3", "DD4", "DD5", "DD6", "DD7", "DD8"]
trace_table2 = Table (EmptyS:trace_row_header_t2)
(makeTMatrix trace_col_header_t2 trace_columns_t2 trace_row_t2)
(showingCxnBw (traceyMatrix) (titleize' requirement `sAnd` S "Other" +:+
titleize' item)) True "TraceyReqsItems"
--
trace_row_t3 :: [String]
trace_row_t3 = trace_assump
trace_row_header_t3, trace_col_header_t3 :: [Sentence]
trace_row_header_t3 = zipWith itemRefToSent trace_assump trace_assumpRef
trace_col_header_t3 = trace_row_header_t1 ++ (zipWith itemRefToSent
(trace_likelyChg ++ trace_funcReq) (trace_likelyChgRef ++ trace_funcReqRef))
trace_columns_t3 :: [[String]]
trace_columns_t3 = [trace_t3_T1, trace_t3_T2, trace_t3_IM1, trace_t3_IM2, trace_t3_IM3, trace_t3_DD1,
trace_t3_DD2, trace_t3_DD3, trace_t3_DD4, trace_t3_DD5, trace_t3_DD6, trace_t3_DD7, trace_t3_DD8,
trace_t3_lc1, trace_t3_lc2, trace_t3_lc3, trace_t3_lc4, trace_t3_lc5, trace_t3_r1, trace_t3_r2,
trace_t3_r3, trace_t3_r4, trace_t3_r5, trace_t3_r6]
trace_t3_T1, trace_t3_T2, trace_t3_IM1, trace_t3_IM2, trace_t3_IM3, trace_t3_DD1, trace_t3_DD2,
trace_t3_DD3, trace_t3_DD4, trace_t3_DD5, trace_t3_DD6, trace_t3_DD7, trace_t3_DD8,
trace_t3_lc1, trace_t3_lc2, trace_t3_lc3, trace_t3_lc4, trace_t3_lc5, trace_t3_r1,
trace_t3_r2, trace_t3_r3, trace_t3_r4, trace_t3_r5, trace_t3_r6 :: [String]
-- list of each item that "this" item requires for traceability matrix
trace_t3_T1 = []
trace_t3_T2 = []
trace_t3_IM1 = ["A4", "A6", "A7"]
trace_t3_IM2 = ["A1", "A2", "A5"]
trace_t3_IM3 = []
trace_t3_DD1 = []
trace_t3_DD2 = []
trace_t3_DD3 = []
trace_t3_DD4 = ["A4"]
trace_t3_DD5 = []
trace_t3_DD6 = ["A5"]
trace_t3_DD7 = []
trace_t3_DD8 = ["A4"]
trace_t3_lc1 = ["A3"]
trace_t3_lc2 = ["A4", "A8"]
trace_t3_lc3 = ["A5"]
trace_t3_lc4 = ["A6"]
trace_t3_lc5 = ["A7"]
trace_t3_r1 = []
trace_t3_r2 = ["A4", "A5", "A8"]
trace_t3_r3 = []
trace_t3_r4 = []
trace_t3_r5 = []
trace_t3_r6 = []
trace_table3 = Table (EmptyS:trace_row_header_t3)
(makeTMatrix trace_col_header_t3 trace_columns_t3 trace_row_t3)
(showingCxnBw (traceyMatrix) (titleize' assumption `sAnd` S "Other"
+:+ titleize' item)) True "TraceyAssumpsOthers"
--
trace_intro2 = traceGIntro traceyGraphs
[(foldlList (map plural (take 3 solChSpecSubsections)) +:+.
S "on each other"), (plural requirement +:+ S "on" +:+. foldlList
(map plural solChSpecSubsections)),
(foldlList ((map plural (take 3 solChSpecSubsections))++
[plural requirement, plural likelyChg +:+ S "on" +:+ plural assumption]))]
fig_2 = figureLabel 2 (traceyMatrix)
(titleize' item +:+ S "of Different" +:+ titleize' section_)
(resourcePath ++ "Trace.png") "TraceyItemSecs"
fig_3 = figureLabel 3 (traceyMatrix)
(titleize' requirement `sAnd` S "Other" +:+ titleize' item)
(resourcePath ++ "RTrace.png") "TraceyReqsItems"
fig_4 = figureLabel 4 (traceyMatrix)
(titleize' assumption `sAnd` S "Other" +:+ titleize' item)
(resourcePath ++ "ATrace.png") "TraceyAssumpsOthers"
{--VALUES OF AUXILIARY CONSTANTS--}
{--REFERENCES--}
{--APPENDIX--}
appendix_intro = foldlSP [
S "This", phrase appendix, S "holds the", plural graph,
sParen ((makeRef fig_5) `sAnd` (makeRef fig_6)),
S "used for interpolating", plural value, S "needed in the", plural model]
fig_5 = fig (titleize figure +: S "5" +:+ (demandq ^. defn) +:+
sParen (getES demand) `sVersus` at_start sD +:+ sParen (getAcc stdOffDist)
`sVersus` at_start char_weight +:+ sParen (getES sflawParamM))
(resourcePath ++ "ASTM_F2248-09.png") "demandVSsod"
fig_6 = fig (titleize figure +: S "6" +:+ S "Non dimensional" +:+
phrase lateralLoad +:+ sParen (getES dimlessLoad)
`sVersus` titleize aspectR +:+ sParen (getAcc aR)
`sVersus` at_start stressDistFac +:+ sParen (getES stressDistFac))
(resourcePath ++ "ASTM_F2248-09_BeasonEtAl.png") "dimlessloadVSaspect"
blstRskInvWGlassSlab :: Sentence
blstRskInvWGlassSlab = phrase blastRisk +:+ S "involved with the" +:+
phrase glaSlab
| JacquesCarette/literate-scientific-software | People/Dan/Presentations/CommitteeMeeting4/BodyNew.hs | bsd-2-clause | 32,942 | 0 | 26 | 4,930 | 9,169 | 5,188 | 3,981 | 565 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Application.HXournal.NetworkClipboard.Server.Type
import Application.HXournal.NetworkClipboard.Server.Yesod ()
import Yesod
import qualified Data.Map as M
import Data.Acid
-- import Data.Xournal.Simple
-- import Data.Strict.Tuple
-- import Data.UUID.V5
import Application.HXournal.NetworkClipboard.Type
-- teststroke = [Stroke "test" "test" 1.1 [ 0.0 :!: 1.0 ] ]
-- teststrokeinfo = HXournalClipInfo namespaceURL teststroke
main :: IO ()
main = do
putStrLn "hxournalclip-server"
acid <- openLocalState (HXournalClipInfoRepository [] M.empty)
-- (M.insert namespaceURL teststrokeinfo M.empty )
warpDebug 7800 (HXournalClipServer acid) | wavewave/hxournalclip-server | exe/hxournalclip-server.hs | bsd-2-clause | 718 | 0 | 11 | 100 | 110 | 66 | 44 | 13 | 1 |
module Text.FastAleck.Text
( module Text.FastAleck
, fastAleck
) where
--------------------------------------------------------------------------------
import Data.Text (Text)
import qualified Data.Text.Encoding as T
--------------------------------------------------------------------------------
import Text.FastAleck
import qualified Text.FastAleck.Internal as I
--------------------------------------------------------------------------------
fastAleck :: FastAleckConfig -> Text -> Text
fastAleck config = T.decodeUtf8 . I.fastAleck config . T.encodeUtf8
{-# INLINE fastAleck #-}
| jaspervdj/fast-aleck-hs | src/Text/FastAleck/Text.hs | bsd-3-clause | 642 | 0 | 8 | 104 | 93 | 58 | 35 | 10 | 1 |
{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-}
-- | Planning how to build everything in a project.
--
module Distribution.Client.ProjectPlanning (
-- * elaborated install plan types
ElaboratedInstallPlan,
ElaboratedConfiguredPackage(..),
ElaboratedPlanPackage,
ElaboratedSharedConfig(..),
ElaboratedReadyPackage,
BuildStyle(..),
CabalFileText,
--TODO: [code cleanup] these types should live with execution, not with
-- plan definition. Need to better separate InstallPlan definition.
GenericBuildResult(..),
BuildResult,
BuildSuccess(..),
BuildFailure(..),
DocsResult(..),
TestsResult(..),
-- * Producing the elaborated install plan
rebuildInstallPlan,
-- * Build targets
PackageTarget(..),
ComponentTarget(..),
SubComponentTarget(..),
showComponentTarget,
-- * Selecting a plan subset
pruneInstallPlanToTargets,
-- * Utils required for building
pkgHasEphemeralBuildTargets,
pkgBuildTargetWholeComponents,
-- * Setup.hs CLI flags for building
setupHsScriptOptions,
setupHsConfigureFlags,
setupHsBuildFlags,
setupHsBuildArgs,
setupHsReplFlags,
setupHsReplArgs,
setupHsCopyFlags,
setupHsRegisterFlags,
setupHsHaddockFlags,
packageHashInputs,
-- TODO: [code cleanup] utils that should live in some shared place?
createPackageDBIfMissing
) where
import Distribution.Client.ProjectPlanning.Types
import Distribution.Client.PackageHash
import Distribution.Client.RebuildMonad
import Distribution.Client.ProjectConfig
import Distribution.Client.ProjectPlanOutput
import Distribution.Client.Types
hiding ( BuildResult, BuildSuccess(..), BuildFailure(..)
, DocsResult(..), TestsResult(..) )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.Dependency
import Distribution.Client.Dependency.Types
import qualified Distribution.Client.IndexUtils as IndexUtils
import Distribution.Client.Targets (userToPackageConstraint)
import Distribution.Client.DistDirLayout
import Distribution.Client.SetupWrapper
import Distribution.Client.JobControl
import Distribution.Client.FetchUtils
import qualified Hackage.Security.Client as Sec
import Distribution.Client.Setup hiding (packageName, cabalVersion)
import Distribution.Utils.NubList
import qualified Distribution.Solver.Types.ComponentDeps as CD
import Distribution.Solver.Types.ComponentDeps (ComponentDeps)
import Distribution.Solver.Types.ConstraintSource
import Distribution.Solver.Types.LabeledPackageConstraint
import Distribution.Solver.Types.OptionalStanza
import Distribution.Solver.Types.PackageFixedDeps
import Distribution.Solver.Types.PkgConfigDb
import Distribution.Solver.Types.Settings
import Distribution.Solver.Types.SolverId
import Distribution.Solver.Types.SolverPackage
import Distribution.Solver.Types.SourcePackage
import Distribution.Package hiding
(InstalledPackageId, installedPackageId)
import Distribution.System
import qualified Distribution.PackageDescription as Cabal
import qualified Distribution.PackageDescription as PD
import qualified Distribution.PackageDescription.Configuration as PD
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.Compiler hiding (Flag)
import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate
import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate
import Distribution.Simple.Program
import Distribution.Simple.Program.Db
import Distribution.Simple.Program.Find
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Setup
(Flag, toFlag, flagToMaybe, flagToList, fromFlagOrDefault)
import qualified Distribution.Simple.Configure as Cabal
import qualified Distribution.Simple.LocalBuildInfo as Cabal
import Distribution.Simple.LocalBuildInfo (ComponentName(..))
import qualified Distribution.Simple.Register as Cabal
import qualified Distribution.Simple.InstallDirs as InstallDirs
import qualified Distribution.Simple.BuildTarget as Cabal
import Distribution.Simple.Utils hiding (matchFileGlob)
import Distribution.Version
import Distribution.Verbosity
import Distribution.Text
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Graph as Graph
import qualified Data.Tree as Tree
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad
import Control.Monad.State as State
import Control.Exception
import Data.List
import Data.Maybe
import Data.Either
import Data.Monoid
import Data.Function
import System.FilePath
import System.Directory (doesDirectoryExist)
------------------------------------------------------------------------------
-- * Elaborated install plan
------------------------------------------------------------------------------
-- "Elaborated" -- worked out with great care and nicety of detail;
-- executed with great minuteness: elaborate preparations;
-- elaborate care.
--
-- So here's the idea:
--
-- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc
-- all passed in as separate args and which are then further selected,
-- transformed etc during the execution of the build. Instead we construct
-- an elaborated install plan that includes everything we will need, and then
-- during the execution of the plan we do as little transformation of this
-- info as possible.
--
-- So we're trying to split the work into two phases: construction of the
-- elaborated install plan (which as far as possible should be pure) and
-- then simple execution of that plan without any smarts, just doing what the
-- plan says to do.
--
-- So that means we need a representation of this fully elaborated install
-- plan. The representation consists of two parts:
--
-- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a
-- representation of source packages that includes a lot more detail about
-- that package's individual configuration
--
-- * A 'ElaboratedSharedConfig'. Some package configuration is the same for
-- every package in a plan. Rather than duplicate that info every entry in
-- the 'GenericInstallPlan' we keep that separately.
--
-- The division between the shared and per-package config is /not set in stone
-- for all time/. For example if we wanted to generalise the install plan to
-- describe a situation where we want to build some packages with GHC and some
-- with GHCJS then the platform and compiler would no longer be shared between
-- all packages but would have to be per-package (probably with some sanity
-- condition on the graph structure).
--
-- Refer to ProjectPlanning.Types for details of these important types:
-- type ElaboratedInstallPlan = ...
-- type ElaboratedPlanPackage = ...
-- data ElaboratedSharedConfig = ...
-- data ElaboratedConfiguredPackage = ...
-- data BuildStyle =
-- | Check that an 'ElaboratedConfiguredPackage' actually makes
-- sense under some 'ElaboratedSharedConfig'.
sanityCheckElaboratedConfiguredPackage :: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> a
-> a
sanityCheckElaboratedConfiguredPackage sharedConfig
pkg@ElaboratedConfiguredPackage{..}
ret =
-- we should only have enabled stanzas that actually can be built
-- (according to the solver)
assert (pkgStanzasEnabled `Set.isSubsetOf` pkgStanzasAvailable)
-- the stanzas that the user explicitly requested should be
-- enabled (by the previous test, they are also available)
. assert (Map.keysSet (Map.filter id pkgStanzasRequested)
`Set.isSubsetOf` pkgStanzasEnabled)
-- the stanzas explicitly disabled should not be available
. assert (Set.null (Map.keysSet (Map.filter not pkgStanzasRequested)
`Set.intersection` pkgStanzasAvailable))
-- either a package is being built inplace, or the
-- 'installedPackageId' we assigned is consistent with
-- the 'hashedInstalledPackageId' we would compute from
-- the elaborated configured package
. assert (pkgBuildStyle == BuildInplaceOnly ||
installedPackageId pkg == hashedInstalledPackageId
(packageHashInputs sharedConfig pkg))
-- either a package is built inplace, or we are not attempting to
-- build any test suites or benchmarks (we never build these
-- for remote packages!)
. assert (pkgBuildStyle == BuildInplaceOnly ||
Set.null pkgStanzasAvailable)
$ ret
------------------------------------------------------------------------------
-- * Deciding what to do: making an 'ElaboratedInstallPlan'
------------------------------------------------------------------------------
rebuildInstallPlan :: Verbosity
-> FilePath -> DistDirLayout -> CabalDirLayout
-> ProjectConfig
-> IO ( ElaboratedInstallPlan
, ElaboratedSharedConfig
, ProjectConfig )
rebuildInstallPlan verbosity
projectRootDir
distDirLayout@DistDirLayout {
distDirectory,
distProjectCacheFile,
distProjectCacheDirectory
}
cabalDirLayout@CabalDirLayout {
cabalPackageCacheDirectory,
cabalStoreDirectory,
cabalStorePackageDB
}
cliConfig =
runRebuild projectRootDir $ do
progsearchpath <- liftIO $ getSystemSearchPath
let cliConfigPersistent = cliConfig { projectConfigBuildOnly = mempty }
-- The overall improved plan is cached
rerunIfChanged verbosity fileMonitorImprovedPlan
-- react to changes in command line args and the path
(cliConfigPersistent, progsearchpath) $ do
-- And so is the elaborated plan that the improved plan based on
(elaboratedPlan, elaboratedShared,
projectConfig) <-
rerunIfChanged verbosity fileMonitorElaboratedPlan
(cliConfigPersistent, progsearchpath) $ do
(projectConfig, projectConfigTransient) <- phaseReadProjectConfig
localPackages <- phaseReadLocalPackages projectConfig
compilerEtc <- phaseConfigureCompiler projectConfig
_ <- phaseConfigurePrograms projectConfig compilerEtc
solverPlan <- phaseRunSolver projectConfigTransient
compilerEtc localPackages
(elaboratedPlan,
elaboratedShared) <- phaseElaboratePlan projectConfigTransient
compilerEtc
solverPlan localPackages
phaseMaintainPlanOutputs elaboratedPlan elaboratedShared
return (elaboratedPlan, elaboratedShared,
projectConfig)
-- The improved plan changes each time we install something, whereas
-- the underlying elaborated plan only changes when input config
-- changes, so it's worth caching them separately.
improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared
return (improvedPlan, elaboratedShared, projectConfig)
where
fileMonitorCompiler = newFileMonitorInCacheDir "compiler"
fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan"
fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes"
fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan"
fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan"
newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b
newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile
-- Read the cabal.project (or implicit config) and combine it with
-- arguments from the command line
--
phaseReadProjectConfig :: Rebuild (ProjectConfig, ProjectConfig)
phaseReadProjectConfig = do
liftIO $ do
info verbosity "Project settings changed, reconfiguring..."
createDirectoryIfMissingVerbose verbosity False distDirectory
createDirectoryIfMissingVerbose verbosity False distProjectCacheDirectory
projectConfig <- readProjectConfig verbosity projectRootDir
-- The project config comming from the command line includes "build only"
-- flags that we don't cache persistently (because like all "build only"
-- flags they do not affect the value of the outcome) but that we do
-- sometimes using during planning (in particular the http transport)
let projectConfigTransient = projectConfig <> cliConfig
projectConfigPersistent = projectConfig
<> cliConfig {
projectConfigBuildOnly = mempty
}
liftIO $ writeProjectConfigFile (distProjectCacheFile "config")
projectConfigPersistent
return (projectConfigPersistent, projectConfigTransient)
-- Look for all the cabal packages in the project
-- some of which may be local src dirs, tarballs etc
--
phaseReadLocalPackages :: ProjectConfig
-> Rebuild [UnresolvedSourcePackage]
phaseReadLocalPackages projectConfig = do
localCabalFiles <- findProjectPackages projectRootDir projectConfig
mapM (readSourcePackage verbosity) localCabalFiles
-- Configure the compiler we're using.
--
-- This is moderately expensive and doesn't change that often so we cache
-- it independently.
--
phaseConfigureCompiler :: ProjectConfig
-> Rebuild (Compiler, Platform, ProgramDb)
phaseConfigureCompiler ProjectConfig {
projectConfigShared = ProjectConfigShared {
projectConfigHcFlavor,
projectConfigHcPath,
projectConfigHcPkg
},
projectConfigLocalPackages = PackageConfig {
packageConfigProgramPaths,
packageConfigProgramArgs,
packageConfigProgramPathExtra
}
} = do
progsearchpath <- liftIO $ getSystemSearchPath
rerunIfChanged verbosity fileMonitorCompiler
(hcFlavor, hcPath, hcPkg, progsearchpath,
packageConfigProgramPaths,
packageConfigProgramArgs,
packageConfigProgramPathExtra) $ do
liftIO $ info verbosity "Compiler settings changed, reconfiguring..."
result@(_, _, progdb') <- liftIO $
Cabal.configCompilerEx
hcFlavor hcPath hcPkg
progdb verbosity
-- Note that we added the user-supplied program locations and args
-- for /all/ programs, not just those for the compiler prog and
-- compiler-related utils. In principle we don't know which programs
-- the compiler will configure (and it does vary between compilers).
-- We do know however that the compiler will only configure the
-- programs it cares about, and those are the ones we monitor here.
monitorFiles (programsMonitorFiles progdb')
return result
where
hcFlavor = flagToMaybe projectConfigHcFlavor
hcPath = flagToMaybe projectConfigHcPath
hcPkg = flagToMaybe projectConfigHcPkg
progdb =
userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths))
. userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs))
. modifyProgramSearchPath
(++ [ ProgramSearchPathDir dir
| dir <- fromNubList packageConfigProgramPathExtra ])
$ defaultProgramDb
-- Configuring other programs.
--
-- Having configred the compiler, now we configure all the remaining
-- programs. This is to check we can find them, and to monitor them for
-- changes.
--
-- TODO: [required eventually] we don't actually do this yet.
--
-- We rely on the fact that the previous phase added the program config for
-- all local packages, but that all the programs configured so far are the
-- compiler program or related util programs.
--
phaseConfigurePrograms :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> Rebuild ()
phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do
-- Users are allowed to specify program locations independently for
-- each package (e.g. to use a particular version of a pre-processor
-- for some packages). However they cannot do this for the compiler
-- itself as that's just not going to work. So we check for this.
liftIO $ checkBadPerPackageCompilerPaths
(configuredPrograms compilerprogdb)
(getMapMappend (projectConfigSpecificPackage projectConfig))
--TODO: [required eventually] find/configure other programs that the
-- user specifies.
--TODO: [required eventually] find/configure all build-tools
-- but note that some of them may be built as part of the plan.
-- Run the solver to get the initial install plan.
-- This is expensive so we cache it independently.
--
phaseRunSolver :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> [UnresolvedSourcePackage]
-> Rebuild SolverInstallPlan
phaseRunSolver projectConfig@ProjectConfig {
projectConfigShared,
projectConfigBuildOnly
}
(compiler, platform, progdb)
localPackages =
rerunIfChanged verbosity fileMonitorSolverPlan
(solverSettings, cabalPackageCacheDirectory,
localPackages, localPackagesEnabledStanzas,
compiler, platform, programsDbSignature progdb) $ do
installedPkgIndex <- getInstalledPackages verbosity
compiler progdb platform
corePackageDbs
sourcePkgDb <- getSourcePackages verbosity withRepoCtx
pkgConfigDB <- getPkgConfigDb verbosity progdb
--TODO: [code cleanup] it'd be better if the Compiler contained the
-- ConfiguredPrograms that it needs, rather than relying on the progdb
-- since we don't need to depend on all the programs here, just the
-- ones relevant for the compiler.
liftIO $ do
solver <- chooseSolver verbosity
(solverSettingSolver solverSettings)
(compilerInfo compiler)
notice verbosity "Resolving dependencies..."
foldProgress logMsg die return $
planPackages compiler platform solver solverSettings
installedPkgIndex sourcePkgDb pkgConfigDB
localPackages localPackagesEnabledStanzas
where
corePackageDbs = [GlobalPackageDB]
withRepoCtx = projectConfigWithSolverRepoContext verbosity
cabalPackageCacheDirectory
projectConfigShared
projectConfigBuildOnly
solverSettings = resolveSolverSettings projectConfig
logMsg message rest = debugNoWrap verbosity message >> rest
localPackagesEnabledStanzas =
Map.fromList
[ (pkgname, stanzas)
| pkg <- localPackages
, let pkgname = packageName pkg
testsEnabled = lookupLocalPackageConfig
packageConfigTests
projectConfig pkgname
benchmarksEnabled = lookupLocalPackageConfig
packageConfigBenchmarks
projectConfig pkgname
stanzas =
Map.fromList $
[ (TestStanzas, enabled)
| enabled <- flagToList testsEnabled ]
++ [ (BenchStanzas , enabled)
| enabled <- flagToList benchmarksEnabled ]
]
-- Elaborate the solver's install plan to get a fully detailed plan. This
-- version of the plan has the final nix-style hashed ids.
--
phaseElaboratePlan :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> SolverInstallPlan
-> [SourcePackage loc]
-> Rebuild ( ElaboratedInstallPlan
, ElaboratedSharedConfig )
phaseElaboratePlan ProjectConfig {
projectConfigShared,
projectConfigLocalPackages,
projectConfigSpecificPackage,
projectConfigBuildOnly
}
(compiler, platform, progdb)
solverPlan localPackages = do
liftIO $ debug verbosity "Elaborating the install plan..."
sourcePackageHashes <-
rerunIfChanged verbosity fileMonitorSourceHashes
(packageLocationsSignature solverPlan) $
getPackageSourceHashes verbosity withRepoCtx solverPlan
defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler
return $
elaborateInstallPlan
platform compiler progdb
distDirLayout
cabalDirLayout
solverPlan
localPackages
sourcePackageHashes
defaultInstallDirs
projectConfigShared
projectConfigLocalPackages
(getMapMappend projectConfigSpecificPackage)
where
withRepoCtx = projectConfigWithSolverRepoContext verbosity
cabalPackageCacheDirectory
projectConfigShared
projectConfigBuildOnly
-- Update the files we maintain that reflect our current build environment.
-- In particular we maintain a JSON representation of the elaborated
-- install plan.
--
-- TODO: [required eventually] maintain the ghc environment file reflecting
-- the libs available. This will need to be after plan improvement phase.
--
phaseMaintainPlanOutputs :: ElaboratedInstallPlan
-> ElaboratedSharedConfig
-> Rebuild ()
phaseMaintainPlanOutputs elaboratedPlan elaboratedShared = do
liftIO $ debug verbosity "Updating plan.json"
liftIO $ writePlanExternalRepresentation
distDirLayout
elaboratedPlan
elaboratedShared
-- Improve the elaborated install plan. The elaborated plan consists
-- mostly of source packages (with full nix-style hashed ids). Where
-- corresponding installed packages already exist in the store, replace
-- them in the plan.
--
-- Note that we do monitor the store's package db here, so we will redo
-- this improvement phase when the db changes -- including as a result of
-- executing a plan and installing things.
--
phaseImprovePlan :: ElaboratedInstallPlan
-> ElaboratedSharedConfig
-> Rebuild ElaboratedInstallPlan
phaseImprovePlan elaboratedPlan elaboratedShared = do
liftIO $ debug verbosity "Improving the install plan..."
recreateDirectory verbosity True storeDirectory
storePkgIndex <- getPackageDBContents verbosity
compiler progdb platform
storePackageDb
let improvedPlan = improveInstallPlanWithPreExistingPackages
storePkgIndex
elaboratedPlan
return improvedPlan
where
storeDirectory = cabalStoreDirectory (compilerId compiler)
storePackageDb = cabalStorePackageDB (compilerId compiler)
ElaboratedSharedConfig {
pkgConfigPlatform = platform,
pkgConfigCompiler = compiler,
pkgConfigCompilerProgs = progdb
} = elaboratedShared
programsMonitorFiles :: ProgramDb -> [MonitorFilePath]
programsMonitorFiles progdb =
[ monitor
| prog <- configuredPrograms progdb
, monitor <- monitorFileSearchPath (programMonitorFiles prog)
(programPath prog)
]
-- | Select the bits of a 'ProgramDb' to monitor for value changes.
-- Use 'programsMonitorFiles' for the files to monitor.
--
programsDbSignature :: ProgramDb -> [ConfiguredProgram]
programsDbSignature progdb =
[ prog { programMonitorFiles = []
, programOverrideEnv = filter ((/="PATH") . fst)
(programOverrideEnv prog) }
| prog <- configuredPrograms progdb ]
getInstalledPackages :: Verbosity
-> Compiler -> ProgramDb -> Platform
-> PackageDBStack
-> Rebuild InstalledPackageIndex
getInstalledPackages verbosity compiler progdb platform packagedbs = do
monitorFiles . map monitorFileOrDirectory
=<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles
verbosity compiler
packagedbs progdb platform)
liftIO $ IndexUtils.getInstalledPackages
verbosity compiler
packagedbs progdb
getPackageDBContents :: Verbosity
-> Compiler -> ProgramDb -> Platform
-> PackageDB
-> Rebuild InstalledPackageIndex
getPackageDBContents verbosity compiler progdb platform packagedb = do
monitorFiles . map monitorFileOrDirectory
=<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles
verbosity compiler
[packagedb] progdb platform)
liftIO $ do
createPackageDBIfMissing verbosity compiler
progdb [packagedb]
Cabal.getPackageDBContents verbosity compiler
packagedb progdb
getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a)
-> Rebuild SourcePackageDb
getSourcePackages verbosity withRepoCtx = do
(sourcePkgDb, repos) <-
liftIO $
withRepoCtx $ \repoctx -> do
sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoctx
return (sourcePkgDb, repoContextRepos repoctx)
monitorFiles . map monitorFile
. IndexUtils.getSourcePackagesMonitorFiles
$ repos
return sourcePkgDb
createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb
-> PackageDBStack -> IO ()
createPackageDBIfMissing verbosity compiler progdb packageDbs =
case reverse packageDbs of
SpecificPackageDB dbPath : _ -> do
exists <- liftIO $ Cabal.doesPackageDBExist dbPath
unless exists $ do
createDirectoryIfMissingVerbose verbosity False (takeDirectory dbPath)
Cabal.createPackageDB verbosity compiler progdb False dbPath
_ -> return ()
getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb
getPkgConfigDb verbosity progdb = do
dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb
-- Just monitor the dirs so we'll notice new .pc files.
-- Alternatively we could monitor all the .pc files too.
forM_ dirs $ \dir -> do
dirExists <- liftIO $ doesDirectoryExist dir
-- TODO: turn this into a utility function
monitorFiles [if dirExists
then monitorDirectory dir
else monitorNonExistentDirectory dir]
liftIO $ readPkgConfigDb verbosity progdb
recreateDirectory :: Verbosity -> Bool -> FilePath -> Rebuild ()
recreateDirectory verbosity createParents dir = do
liftIO $ createDirectoryIfMissingVerbose verbosity createParents dir
monitorFiles [monitorDirectoryExistence dir]
-- | Select the config values to monitor for changes package source hashes.
packageLocationsSignature :: SolverInstallPlan
-> [(PackageId, PackageLocation (Maybe FilePath))]
packageLocationsSignature solverPlan =
[ (packageId pkg, packageSource pkg)
| InstallPlan.Configured (SolverPackage { solverPkgSource = pkg})
<- InstallPlan.toList solverPlan
]
-- | Get the 'HashValue' for all the source packages where we use hashes,
-- and download any packages required to do so.
--
-- Note that we don't get hashes for local unpacked packages.
--
getPackageSourceHashes :: Verbosity
-> (forall a. (RepoContext -> IO a) -> IO a)
-> SolverInstallPlan
-> Rebuild (Map PackageId PackageSourceHash)
getPackageSourceHashes verbosity withRepoCtx solverPlan = do
-- Determine if and where to get the package's source hash from.
--
let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))]
allPkgLocations =
[ (packageId pkg, packageSource pkg)
| InstallPlan.Configured (SolverPackage { solverPkgSource = pkg})
<- InstallPlan.toList solverPlan ]
-- Tarballs that were local in the first place.
-- We'll hash these tarball files directly.
localTarballPkgs :: [(PackageId, FilePath)]
localTarballPkgs =
[ (pkgid, tarball)
| (pkgid, LocalTarballPackage tarball) <- allPkgLocations ]
-- Tarballs from remote URLs. We must have downloaded these already
-- (since we extracted the .cabal file earlier)
--TODO: [required eventually] finish remote tarball functionality
-- allRemoteTarballPkgs =
-- [ (pkgid, )
-- | (pkgid, RemoteTarballPackage ) <- allPkgLocations ]
-- Tarballs from repositories, either where the repository provides
-- hashes as part of the repo metadata, or where we will have to
-- download and hash the tarball.
repoTarballPkgsWithMetadata :: [(PackageId, Repo)]
repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)]
(repoTarballPkgsWithMetadata,
repoTarballPkgsWithoutMetadata) =
partitionEithers
[ case repo of
RepoSecure{} -> Left (pkgid, repo)
_ -> Right (pkgid, repo)
| (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ]
-- For tarballs from repos that do not have hashes available we now have
-- to check if the packages were downloaded already.
--
(repoTarballPkgsToDownload,
repoTarballPkgsDownloaded)
<- fmap partitionEithers $
liftIO $ sequence
[ do mtarball <- checkRepoTarballFetched repo pkgid
case mtarball of
Nothing -> return (Left (pkgid, repo))
Just tarball -> return (Right (pkgid, tarball))
| (pkgid, repo) <- repoTarballPkgsWithoutMetadata ]
(hashesFromRepoMetadata,
repoTarballPkgsNewlyDownloaded) <-
-- Avoid having to initialise the repository (ie 'withRepoCtx') if we
-- don't have to. (The main cost is configuring the http client.)
if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata
then return (Map.empty, [])
else liftIO $ withRepoCtx $ \repoctx -> do
-- For tarballs from repos that do have hashes available as part of the
-- repo metadata we now load up the index for each repo and retrieve
-- the hashes for the packages
--
hashesFromRepoMetadata <-
Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions
fmap (Map.fromList . concat) $
sequence
-- Reading the repo index is expensive so we group the packages by repo
[ repoContextWithSecureRepo repoctx repo $ \secureRepo ->
Sec.withIndex secureRepo $ \repoIndex ->
sequence
[ do hash <- Sec.trusted <$> -- strip off Trusted tag
Sec.indexLookupHash repoIndex pkgid
-- Note that hackage-security currently uses SHA256
-- but this API could in principle give us some other
-- choice in future.
return (pkgid, hashFromTUF hash)
| pkgid <- pkgids ]
| (repo, pkgids) <-
map (\grp@((_,repo):_) -> (repo, map fst grp))
. groupBy ((==) `on` (remoteRepoName . repoRemote . snd))
. sortBy (compare `on` (remoteRepoName . repoRemote . snd))
$ repoTarballPkgsWithMetadata
]
-- For tarballs from repos that do not have hashes available, download
-- the ones we previously determined we need.
--
repoTarballPkgsNewlyDownloaded <-
sequence
[ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid
return (pkgid, tarball)
| (pkgid, repo) <- repoTarballPkgsToDownload ]
return (hashesFromRepoMetadata,
repoTarballPkgsNewlyDownloaded)
-- Hash tarball files for packages where we have to do that. This includes
-- tarballs that were local in the first place, plus tarballs from repos,
-- either previously cached or freshly downloaded.
--
let allTarballFilePkgs :: [(PackageId, FilePath)]
allTarballFilePkgs = localTarballPkgs
++ repoTarballPkgsDownloaded
++ repoTarballPkgsNewlyDownloaded
hashesFromTarballFiles <- liftIO $
fmap Map.fromList $
sequence
[ do srchash <- readFileHashValue tarball
return (pkgid, srchash)
| (pkgid, tarball) <- allTarballFilePkgs
]
monitorFiles [ monitorFile tarball
| (_pkgid, tarball) <- allTarballFilePkgs ]
-- Return the combination
return $! hashesFromRepoMetadata
<> hashesFromTarballFiles
-- ------------------------------------------------------------
-- * Installation planning
-- ------------------------------------------------------------
planPackages :: Compiler
-> Platform
-> Solver -> SolverSettings
-> InstalledPackageIndex
-> SourcePackageDb
-> PkgConfigDb
-> [UnresolvedSourcePackage]
-> Map PackageName (Map OptionalStanza Bool)
-> Progress String String SolverInstallPlan
planPackages comp platform solver SolverSettings{..}
installedPkgIndex sourcePkgDb pkgConfigDB
localPackages pkgStanzasEnable =
resolveDependencies
platform (compilerInfo comp)
pkgConfigDB solver
resolverParams
where
--TODO: [nice to have] disable multiple instances restriction in the solver, but then
-- make sure we can cope with that in the output.
resolverParams =
setMaxBackjumps solverSettingMaxBackjumps
--TODO: [required eventually] should only be configurable for custom installs
-- . setIndependentGoals solverSettingIndependentGoals
. setReorderGoals solverSettingReorderGoals
--TODO: [required eventually] should only be configurable for custom installs
-- . setAvoidReinstalls solverSettingAvoidReinstalls
--TODO: [required eventually] should only be configurable for custom installs
-- . setShadowPkgs solverSettingShadowPkgs
. setStrongFlags solverSettingStrongFlags
--TODO: [required eventually] decide if we need to prefer installed for
-- global packages, or prefer latest even for global packages. Perhaps
-- should be configurable but with a different name than "upgrade-dependencies".
. setPreferenceDefault PreferLatestForSelected
{-(if solverSettingUpgradeDeps
then PreferAllLatest
else PreferLatestForSelected)-}
. removeUpperBounds solverSettingAllowNewer
. addDefaultSetupDependencies (defaultSetupDeps comp platform
. PD.packageDescription
. packageDescription)
. addPreferences
-- preferences from the config file or command line
[ PackageVersionPreference name ver
| Dependency name ver <- solverSettingPreferences ]
. addConstraints
-- version constraints from the config file or command line
[ LabeledPackageConstraint (userToPackageConstraint pc) src
| (pc, src) <- solverSettingConstraints ]
. addPreferences
-- enable stanza preference where the user did not specify
[ PackageStanzasPreference pkgname stanzas
| pkg <- localPackages
, let pkgname = packageName pkg
stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable
stanzas = [ stanza | stanza <- [minBound..maxBound]
, Map.lookup stanza stanzaM == Nothing ]
, not (null stanzas)
]
. addConstraints
-- enable stanza constraints where the user asked to enable
[ LabeledPackageConstraint
(PackageConstraintStanzas pkgname stanzas)
ConstraintSourceConfigFlagOrTarget
| pkg <- localPackages
, let pkgname = packageName pkg
stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable
stanzas = [ stanza | stanza <- [minBound..maxBound]
, Map.lookup stanza stanzaM == Just True ]
, not (null stanzas)
]
. addConstraints
--TODO: [nice to have] should have checked at some point that the
-- package in question actually has these flags.
[ LabeledPackageConstraint
(PackageConstraintFlags pkgname flags)
ConstraintSourceConfigFlagOrTarget
| (pkgname, flags) <- Map.toList solverSettingFlagAssignments ]
. addConstraints
--TODO: [nice to have] we have user-supplied flags for unspecified
-- local packages (as well as specific per-package flags). For the
-- former we just apply all these flags to all local targets which
-- is silly. We should check if the flags are appropriate.
[ LabeledPackageConstraint
(PackageConstraintFlags pkgname flags)
ConstraintSourceConfigFlagOrTarget
| let flags = solverSettingFlagAssignment
, not (null flags)
, pkg <- localPackages
, let pkgname = packageName pkg ]
$ stdResolverParams
stdResolverParams =
-- Note: we don't use the standardInstallPolicy here, since that uses
-- its own addDefaultSetupDependencies that is not appropriate for us.
basicInstallPolicy
installedPkgIndex sourcePkgDb
(map SpecificSourcePackage localPackages)
------------------------------------------------------------------------------
-- * Install plan post-processing
------------------------------------------------------------------------------
-- This phase goes from the InstallPlan we get from the solver and has to
-- make an elaborated install plan.
--
-- We go in two steps:
--
-- 1. elaborate all the source packages that the solver has chosen.
-- 2. swap source packages for pre-existing installed packages wherever
-- possible.
--
-- We do it in this order, elaborating and then replacing, because the easiest
-- way to calculate the installed package ids used for the replacement step is
-- from the elaborated configuration for each package.
------------------------------------------------------------------------------
-- * Install plan elaboration
------------------------------------------------------------------------------
-- | Produce an elaborated install plan using the policy for local builds with
-- a nix-style shared store.
--
-- In theory should be able to make an elaborated install plan with a policy
-- matching that of the classic @cabal install --user@ or @--global@
--
elaborateInstallPlan
:: Platform -> Compiler -> ProgramDb
-> DistDirLayout
-> CabalDirLayout
-> SolverInstallPlan
-> [SourcePackage loc]
-> Map PackageId PackageSourceHash
-> InstallDirs.InstallDirTemplates
-> ProjectConfigShared
-> PackageConfig
-> Map PackageName PackageConfig
-> (ElaboratedInstallPlan, ElaboratedSharedConfig)
elaborateInstallPlan platform compiler compilerprogdb
DistDirLayout{..}
cabalDirLayout@CabalDirLayout{cabalStorePackageDB}
solverPlan localPackages
sourcePackageHashes
defaultInstallDirs
_sharedPackageConfig
localPackagesConfig
perPackageConfig =
(elaboratedInstallPlan, elaboratedSharedConfig)
where
elaboratedSharedConfig =
ElaboratedSharedConfig {
pkgConfigPlatform = platform,
pkgConfigCompiler = compiler,
pkgConfigCompilerProgs = compilerprogdb
}
elaboratedInstallPlan =
flip InstallPlan.mapPreservingGraph solverPlan $ \mapDep planpkg ->
case planpkg of
InstallPlan.PreExisting pkg ->
InstallPlan.PreExisting pkg
InstallPlan.Configured pkg ->
InstallPlan.Configured
(elaborateSolverPackage mapDep pkg)
_ -> error "elaborateInstallPlan: unexpected package state"
elaborateSolverPackage :: (UnitId -> UnitId)
-> SolverPackage UnresolvedPkgLoc
-> ElaboratedConfiguredPackage
elaborateSolverPackage
mapDep
pkg@(SolverPackage (SourcePackage pkgid gdesc srcloc descOverride)
flags stanzas deps0) =
elaboratedPackage
where
-- Knot tying: the final elaboratedPackage includes the
-- pkgInstalledId, which is calculated by hashing many
-- of the other fields of the elaboratedPackage.
--
elaboratedPackage = ElaboratedConfiguredPackage {..}
deps = fmap (map elaborateSolverId) deps0
elaborateSolverId sid =
ConfiguredId {
confSrcId = packageId sid,
-- Update the 'UnitId' to the final nix-style hashed ID
confInstId = mapDep (installedPackageId sid)
}
pkgInstalledId
| shouldBuildInplaceOnly pkg
= mkUnitId (display pkgid ++ "-inplace")
| otherwise
= assert (isJust pkgSourceHash) $
hashedInstalledPackageId
(packageHashInputs
elaboratedSharedConfig
elaboratedPackage) -- recursive use of elaboratedPackage
| otherwise
= error $ "elaborateInstallPlan: non-inplace package "
++ " is missing a source hash: " ++ display pkgid
-- All the other fields of the ElaboratedConfiguredPackage
--
pkgSourceId = pkgid
pkgDescription = let Right (desc, _) =
PD.finalizePackageDescription
flags (const True)
platform (compilerInfo compiler)
[] gdesc
in desc
pkgFlagAssignment = flags
pkgFlagDefaults = [ (Cabal.flagName flag, Cabal.flagDefault flag)
| flag <- PD.genPackageFlags gdesc ]
pkgDependencies = deps
pkgStanzasAvailable = Set.fromList stanzas
pkgStanzasRequested =
-- NB: even if a package stanza is requested, if the package
-- doesn't actually have any of that stanza we omit it from
-- the request, to ensure that we don't decide that this
-- package needs to be rebuilt. (It needs to be done here,
-- because the ElaboratedConfiguredPackage is where we test
-- whether or not there have been changes.)
Map.fromList $ [ (TestStanzas, v) | v <- maybeToList tests
, _ <- PD.testSuites pkgDescription ]
++ [ (BenchStanzas, v) | v <- maybeToList benchmarks
, _ <- PD.benchmarks pkgDescription ]
where
tests, benchmarks :: Maybe Bool
tests = perPkgOptionMaybe pkgid packageConfigTests
benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks
-- This is a placeholder which will get updated by 'pruneInstallPlanPass1'
-- and 'pruneInstallPlanPass2'. We can't populate it here
-- because whether or not tests/benchmarks should be enabled
-- is heuristically calculated based on whether or not the
-- dependencies of the test suite have already been installed,
-- but this function doesn't know what is installed (since
-- we haven't improved the plan yet), so we do it in another pass.
-- Check the comments of those functions for more details.
pkgStanzasEnabled = Set.empty
pkgBuildTargets = []
pkgReplTarget = Nothing
pkgBuildHaddocks = False
pkgSourceLocation = srcloc
pkgSourceHash = Map.lookup pkgid sourcePackageHashes
pkgBuildStyle = if shouldBuildInplaceOnly pkg
then BuildInplaceOnly else BuildAndInstall
pkgBuildPackageDBStack = buildAndRegisterDbs
pkgRegisterPackageDBStack = buildAndRegisterDbs
pkgRequiresRegistration = PD.hasPublicLib pkgDescription
pkgSetupScriptStyle = packageSetupScriptStyle pkgDescription
pkgSetupScriptCliVersion = packageSetupScriptSpecVersion
pkgSetupScriptStyle pkgDescription deps
pkgSetupPackageDBStack = buildAndRegisterDbs
buildAndRegisterDbs
| shouldBuildInplaceOnly pkg = inplacePackageDbs
| otherwise = storePackageDbs
pkgDescriptionOverride = descOverride
pkgVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively
pkgSharedLib = pkgid `Set.member` pkgsUseSharedLibrary
pkgDynExe = perPkgOptionFlag pkgid False packageConfigDynExe
pkgGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still
pkgProfExe = perPkgOptionFlag pkgid False packageConfigProf
pkgProfLib = pkgid `Set.member` pkgsUseProfilingLibrary
(pkgProfExeDetail,
pkgProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault
packageConfigProfDetail
packageConfigProfLibDetail
pkgCoverage = perPkgOptionFlag pkgid False packageConfigCoverage
pkgOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization
pkgSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs
pkgStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs
pkgStripExes = perPkgOptionFlag pkgid False packageConfigStripExes
pkgDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo
-- Combine the configured compiler prog settings with the user-supplied
-- config. For the compiler progs any user-supplied config was taken
-- into account earlier when configuring the compiler so its ok that
-- our configured settings for the compiler override the user-supplied
-- config here.
pkgProgramPaths = Map.fromList
[ (programId prog, programPath prog)
| prog <- configuredPrograms compilerprogdb ]
<> perPkgOptionMapLast pkgid packageConfigProgramPaths
pkgProgramArgs = Map.fromList
[ (programId prog, args)
| prog <- configuredPrograms compilerprogdb
, let args = programOverrideArgs prog
, not (null args)
]
<> perPkgOptionMapMappend pkgid packageConfigProgramArgs
pkgProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra
pkgConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs
pkgExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs
pkgExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs
pkgExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs
pkgProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix
pkgProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix
pkgInstallDirs
| shouldBuildInplaceOnly pkg
-- use the ordinary default install dirs
= (InstallDirs.absoluteInstallDirs
pkgid
(installedUnitId pkg)
(compilerInfo compiler)
InstallDirs.NoCopyDest
platform
defaultInstallDirs) {
InstallDirs.libsubdir = "", -- absoluteInstallDirs sets these as
InstallDirs.datasubdir = "" -- 'undefined' but we have to use
} -- them as "Setup.hs configure" args
| otherwise
-- use special simplified install dirs
= storePackageInstallDirs
cabalDirLayout
(compilerId compiler)
pkgInstalledId
pkgHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle
pkgHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml
pkgHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation
pkgHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables
pkgHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites
pkgHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks
pkgHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal
pkgHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss
pkgHaddockHscolour = perPkgOptionFlag pkgid False packageConfigHaddockHscolour
pkgHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss
pkgHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents
perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a
perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a
perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a]
perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f)
perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f)
perPkgOptionList pkgid f = lookupPerPkgOption pkgid f
perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f)
perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f)
perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f)
perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib)
where
exe = fromFlagOrDefault def bothflag
lib = fromFlagOrDefault def (bothflag <> libflag)
bothflag = lookupPerPkgOption pkgid fboth
libflag = lookupPerPkgOption pkgid flib
lookupPerPkgOption :: (Package pkg, Monoid m)
=> pkg -> (PackageConfig -> m) -> m
lookupPerPkgOption pkg f
-- the project config specifies values that apply to packages local to
-- but by default non-local packages get all default config values
-- the project, and can specify per-package values for any package,
| isLocalToProject pkg = local <> perpkg
| otherwise = perpkg
where
local = f localPackagesConfig
perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig)
inplacePackageDbs = storePackageDbs
++ [ distPackageDB (compilerId compiler) ]
storePackageDbs = [ GlobalPackageDB
, cabalStorePackageDB (compilerId compiler) ]
-- For this local build policy, every package that lives in a local source
-- dir (as opposed to a tarball), or depends on such a package, will be
-- built inplace into a shared dist dir. Tarball packages that depend on
-- source dir packages will also get unpacked locally.
shouldBuildInplaceOnly :: HasUnitId pkg => pkg -> Bool
shouldBuildInplaceOnly pkg = Set.member (installedPackageId pkg)
pkgsToBuildInplaceOnly
pkgsToBuildInplaceOnly :: Set InstalledPackageId
pkgsToBuildInplaceOnly =
Set.fromList
$ map installedPackageId
$ InstallPlan.reverseDependencyClosure
solverPlan
[ installedPackageId (PlannedId (packageId pkg))
| pkg <- localPackages ]
isLocalToProject :: Package pkg => pkg -> Bool
isLocalToProject pkg = Set.member (packageId pkg)
pkgsLocalToProject
pkgsLocalToProject :: Set PackageId
pkgsLocalToProject = Set.fromList [ packageId pkg | pkg <- localPackages ]
pkgsUseSharedLibrary :: Set PackageId
pkgsUseSharedLibrary =
packagesWithDownwardClosedProperty needsSharedLib
where
needsSharedLib pkg =
fromMaybe compilerShouldUseSharedLibByDefault
(liftM2 (||) pkgSharedLib pkgDynExe)
where
pkgid = packageId pkg
pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib
pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe
--TODO: [code cleanup] move this into the Cabal lib. It's currently open
-- coded in Distribution.Simple.Configure, but should be made a proper
-- function of the Compiler or CompilerInfo.
compilerShouldUseSharedLibByDefault =
case compilerFlavor compiler of
GHC -> GHC.isDynamic compiler
GHCJS -> GHCJS.isDynamic compiler
_ -> False
pkgsUseProfilingLibrary :: Set PackageId
pkgsUseProfilingLibrary =
packagesWithDownwardClosedProperty needsProfilingLib
where
needsProfilingLib pkg =
fromFlagOrDefault False (profBothFlag <> profLibFlag)
where
pkgid = packageId pkg
profBothFlag = lookupPerPkgOption pkgid packageConfigProf
profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib
--TODO: [code cleanup] unused: the old deprecated packageConfigProfExe
packagesWithDownwardClosedProperty property =
Set.fromList
$ map packageId
$ InstallPlan.dependencyClosure
solverPlan
[ installedPackageId pkg
| pkg <- InstallPlan.toList solverPlan
, property pkg ] -- just the packages that satisfy the propety
--TODO: [nice to have] this does not check the config consistency,
-- e.g. a package explicitly turning off profiling, but something
-- depending on it that needs profiling. This really needs a separate
-- package config validation/resolution pass.
--TODO: [nice to have] config consistency checking:
-- * profiling libs & exes, exe needs lib, recursive
-- * shared libs & exes, exe needs lib, recursive
-- * vanilla libs & exes, exe needs lib, recursive
-- * ghci or shared lib needed by TH, recursive, ghc version dependent
---------------------------
-- Build targets
--
-- Refer to ProjectPlanning.Types for details of these important types:
-- data PackageTarget = ...
-- data ComponentTarget = ...
-- data SubComponentTarget = ...
--TODO: this needs to report some user target/config errors
elaboratePackageTargets :: ElaboratedConfiguredPackage -> [PackageTarget]
-> ([ComponentTarget], Maybe ComponentTarget, Bool)
elaboratePackageTargets ElaboratedConfiguredPackage{..} targets =
let buildTargets = nubComponentTargets
. map compatSubComponentTargets
. concatMap elaborateBuildTarget
$ targets
--TODO: instead of listToMaybe we should be reporting an error here
replTargets = listToMaybe
. nubComponentTargets
. map compatSubComponentTargets
. concatMap elaborateReplTarget
$ targets
buildHaddocks = HaddockDefaultComponents `elem` targets
in (buildTargets, replTargets, buildHaddocks)
where
--TODO: need to report an error here if defaultComponents is empty
elaborateBuildTarget BuildDefaultComponents = pkgDefaultComponents
elaborateBuildTarget (BuildSpecificComponent t) = [t]
elaborateBuildTarget _ = []
--TODO: need to report an error here if defaultComponents is empty
elaborateReplTarget ReplDefaultComponent = take 1 pkgDefaultComponents
elaborateReplTarget (ReplSpecificComponent t) = [t]
elaborateReplTarget _ = []
pkgDefaultComponents =
[ ComponentTarget cname WholeComponent
| c <- Cabal.pkgComponents pkgDescription
, PD.buildable (Cabal.componentBuildInfo c)
, let cname = Cabal.componentName c
, enabledOptionalStanza cname
]
where
enabledOptionalStanza cname =
case componentOptionalStanza cname of
Nothing -> True
Just stanza -> Map.lookup stanza pkgStanzasRequested
== Just True
-- Not all Cabal Setup.hs versions support sub-component targets, so switch
-- them over to the whole component
compatSubComponentTargets :: ComponentTarget -> ComponentTarget
compatSubComponentTargets target@(ComponentTarget cname _subtarget)
| not setupHsSupportsSubComponentTargets
= ComponentTarget cname WholeComponent
| otherwise = target
-- Actually the reality is that no current version of Cabal's Setup.hs
-- build command actually support building specific files or modules.
setupHsSupportsSubComponentTargets = False
-- TODO: when that changes, adjust this test, e.g.
-- | pkgSetupScriptCliVersion >= Version [x,y] []
nubComponentTargets :: [ComponentTarget] -> [ComponentTarget]
nubComponentTargets =
concatMap (wholeComponentOverrides . map snd)
. groupBy ((==) `on` fst)
. sortBy (compare `on` fst)
. map (\t@(ComponentTarget cname _) -> (cname, t))
-- If we're building the whole component then that the only target all we
-- need, otherwise we can have several targets within the component.
wholeComponentOverrides :: [ComponentTarget] -> [ComponentTarget]
wholeComponentOverrides ts =
case [ t | t@(ComponentTarget _ WholeComponent) <- ts ] of
(t:_) -> [t]
[] -> ts
pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool
pkgHasEphemeralBuildTargets pkg =
isJust (pkgReplTarget pkg)
|| (not . null) [ () | ComponentTarget _ subtarget <- pkgBuildTargets pkg
, subtarget /= WholeComponent ]
-- | The components that we'll build all of, meaning that after they're built
-- we can skip building them again (unlike with building just some modules or
-- other files within a component).
--
pkgBuildTargetWholeComponents :: ElaboratedConfiguredPackage
-> Set ComponentName
pkgBuildTargetWholeComponents pkg =
Set.fromList
[ cname | ComponentTarget cname WholeComponent <- pkgBuildTargets pkg ]
------------------------------------------------------------------------------
-- * Install plan pruning
------------------------------------------------------------------------------
-- | Given a set of package targets (and optionally component targets within
-- those packages), take the subset of the install plan needed to build those
-- targets. Also, update the package config to specify which optional stanzas
-- to enable, and which targets within each package to build.
--
pruneInstallPlanToTargets :: Map InstalledPackageId [PackageTarget]
-> ElaboratedInstallPlan -> ElaboratedInstallPlan
pruneInstallPlanToTargets perPkgTargetsMap =
either (\_ -> assert False undefined) id
. InstallPlan.new (IndependentGoals False)
. PackageIndex.fromList
-- We have to do this in two passes
. pruneInstallPlanPass2
. pruneInstallPlanPass1 perPkgTargetsMap
. InstallPlan.toList
-- | The first pass does three things:
--
-- * Set the build targets based on the user targets (but not rev deps yet).
-- * A first go at determining which optional stanzas (testsuites, benchmarks)
-- are needed. We have a second go in the next pass.
-- * Take the dependency closure using pruned dependencies. We prune deps that
-- are used only by unneeded optional stanzas. These pruned deps are only
-- used for the dependency closure and are not persisted in this pass.
--
pruneInstallPlanPass1 :: Map InstalledPackageId [PackageTarget]
-> [ElaboratedPlanPackage]
-> [ElaboratedPlanPackage]
pruneInstallPlanPass1 perPkgTargetsMap pkgs =
map fst $
dependencyClosure
(installedPackageId . fst) -- the pkg id
snd -- the pruned deps
[ (pkg', pruneOptionalDependencies pkg')
| pkg <- pkgs
, let pkg' = mapConfiguredPackage
(pruneOptionalStanzas . setBuildTargets) pkg
]
(Map.keys perPkgTargetsMap)
where
-- Elaborate and set the targets we'll build for this package. This is just
-- based on the targets from the user, not targets implied by reverse
-- dependencies. Those comes in the second pass once we know the rev deps.
--
setBuildTargets pkg =
pkg {
pkgBuildTargets = buildTargets,
pkgReplTarget = replTarget,
pkgBuildHaddocks = buildHaddocks
}
where
(buildTargets, replTarget, buildHaddocks)
= elaboratePackageTargets pkg targets
targets = fromMaybe []
$ Map.lookup (installedPackageId pkg) perPkgTargetsMap
-- Decide whether or not to enable testsuites and benchmarks
--
-- The testsuite and benchmark targets are somewhat special in that we need
-- to configure the packages with them enabled, and we need to do that even
-- if we only want to build one of several testsuites.
--
-- There are two cases in which we will enable the testsuites (or
-- benchmarks): if one of the targets is a testsuite, or if all of the
-- testsuite dependencies are already cached in the store. The rationale
-- for the latter is to minimise how often we have to reconfigure due to
-- the particular targets we choose to build. Otherwise choosing to build
-- a testsuite target, and then later choosing to build an exe target
-- would involve unnecessarily reconfiguring the package with testsuites
-- disabled. Technically this introduces a little bit of stateful
-- behaviour to make this "sticky", but it should be benign.
--
pruneOptionalStanzas pkg = pkg { pkgStanzasEnabled = stanzas }
where
stanzas :: Set OptionalStanza
stanzas = optionalStanzasRequiredByTargets pkg
<> optionalStanzasRequestedByDefault pkg
<> optionalStanzasWithDepsAvailable availablePkgs pkg
-- Calculate package dependencies but cut out those needed only by
-- optional stanzas that we've determined we will not enable.
-- These pruned deps are not persisted in this pass since they're based on
-- the optional stanzas and we'll make further tweaks to the optional
-- stanzas in the next pass.
--
pruneOptionalDependencies :: ElaboratedPlanPackage -> [InstalledPackageId]
pruneOptionalDependencies (InstallPlan.Configured pkg) =
(CD.flatDeps . CD.filterDeps keepNeeded) (depends pkg)
where
keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas
keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas
keepNeeded _ _ = True
stanzas = pkgStanzasEnabled pkg
pruneOptionalDependencies pkg =
CD.flatDeps (depends pkg)
optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage
-> Set OptionalStanza
optionalStanzasRequiredByTargets pkg =
Set.fromList
[ stanza
| ComponentTarget cname _ <- pkgBuildTargets pkg
++ maybeToList (pkgReplTarget pkg)
, stanza <- maybeToList (componentOptionalStanza cname)
]
optionalStanzasRequestedByDefault :: ElaboratedConfiguredPackage
-> Set OptionalStanza
optionalStanzasRequestedByDefault =
Map.keysSet
. Map.filter (id :: Bool -> Bool)
. pkgStanzasRequested
availablePkgs =
Set.fromList
[ installedPackageId pkg
| InstallPlan.PreExisting pkg <- pkgs ]
-- | Given a set of already installed packages @availablePkgs@,
-- determine the set of available optional stanzas from @pkg@
-- which have all of their dependencies already installed. This is used
-- to implement "sticky" testsuites, where once we have installed
-- all of the deps needed for the test suite, we go ahead and
-- enable it always.
optionalStanzasWithDepsAvailable :: Set InstalledPackageId
-> ElaboratedConfiguredPackage
-> Set OptionalStanza
optionalStanzasWithDepsAvailable availablePkgs pkg =
Set.fromList
[ stanza
| stanza <- Set.toList (pkgStanzasAvailable pkg)
, let deps :: [InstalledPackageId]
deps = map installedPackageId
$ CD.select (optionalStanzaDeps stanza)
(pkgDependencies pkg)
, all (`Set.member` availablePkgs) deps
]
where
optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True
optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True
optionalStanzaDeps _ _ = False
-- The second pass does three things:
--
-- * A second go at deciding which optional stanzas to enable.
-- * Prune the dependencies based on the final choice of optional stanzas.
-- * Extend the targets within each package to build, now we know the reverse
-- dependencies, ie we know which libs are needed as deps by other packages.
--
-- Achieving sticky behaviour with enabling\/disabling optional stanzas is
-- tricky. The first approximation was handled by the first pass above, but
-- it's not quite enough. That pass will enable stanzas if all of the deps
-- of the optional stanza are already installed /in the store/. That's important
-- but it does not account for dependencies that get built inplace as part of
-- the project. We cannot take those inplace build deps into account in the
-- pruning pass however because we don't yet know which ones we're going to
-- build. Once we do know, we can have another go and enable stanzas that have
-- all their deps available. Now we can consider all packages in the pruned
-- plan to be available, including ones we already decided to build from
-- source.
--
-- Deciding which targets to build depends on knowing which packages have
-- reverse dependencies (ie are needed). This requires the result of first
-- pass, which is another reason we have to split it into two passes.
--
-- Note that just because we might enable testsuites or benchmarks (in the
-- first or second pass) doesn't mean that we build all (or even any) of them.
-- That depends on which targets we picked in the first pass.
--
pruneInstallPlanPass2 :: [ElaboratedPlanPackage]
-> [ElaboratedPlanPackage]
pruneInstallPlanPass2 pkgs =
map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs
where
setStanzasDepsAndTargets pkg =
pkg {
pkgStanzasEnabled = stanzas,
pkgDependencies = CD.filterDeps keepNeeded (pkgDependencies pkg),
pkgBuildTargets = pkgBuildTargets pkg ++ targetsRequiredForRevDeps
}
where
stanzas :: Set OptionalStanza
stanzas = pkgStanzasEnabled pkg
<> optionalStanzasWithDepsAvailable availablePkgs pkg
keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas
keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas
keepNeeded _ _ = True
targetsRequiredForRevDeps =
[ ComponentTarget (Cabal.defaultLibName (pkgSourceId pkg)) WholeComponent
-- if anything needs this pkg, build the library component
| installedPackageId pkg `Set.member` hasReverseLibDeps
]
--TODO: also need to track build-tool rev-deps for exes
availablePkgs :: Set InstalledPackageId
availablePkgs = Set.fromList (map installedPackageId pkgs)
hasReverseLibDeps :: Set InstalledPackageId
hasReverseLibDeps =
Set.fromList [ depid | pkg <- pkgs
, depid <- CD.flatDeps (depends pkg) ]
mapConfiguredPackage :: (ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage)
-> ElaboratedPlanPackage
-> ElaboratedPlanPackage
mapConfiguredPackage f (InstallPlan.Configured pkg) =
InstallPlan.Configured (f pkg)
mapConfiguredPackage _ pkg = pkg
componentOptionalStanza :: Cabal.ComponentName -> Maybe OptionalStanza
componentOptionalStanza (Cabal.CTestName _) = Just TestStanzas
componentOptionalStanza (Cabal.CBenchName _) = Just BenchStanzas
componentOptionalStanza _ = Nothing
dependencyClosure :: (pkg -> InstalledPackageId)
-> (pkg -> [InstalledPackageId])
-> [pkg]
-> [InstalledPackageId]
-> [pkg]
dependencyClosure pkgid deps allpkgs =
map vertexToPkg
. concatMap Tree.flatten
. Graph.dfs graph
. map pkgidToVertex
where
(graph, vertexToPkg, pkgidToVertex) = dependencyGraph pkgid deps allpkgs
dependencyGraph :: (pkg -> InstalledPackageId)
-> (pkg -> [InstalledPackageId])
-> [pkg]
-> (Graph.Graph,
Graph.Vertex -> pkg,
InstalledPackageId -> Graph.Vertex)
dependencyGraph pkgid deps pkgs =
(graph, vertexToPkg', pkgidToVertex')
where
(graph, vertexToPkg, pkgidToVertex) =
Graph.graphFromEdges [ ( pkg, pkgid pkg, deps pkg )
| pkg <- pkgs ]
vertexToPkg' = (\(pkg,_,_) -> pkg)
. vertexToPkg
pkgidToVertex' = fromMaybe (error "dependencyGraph: lookup failure")
. pkgidToVertex
---------------------------
-- Setup.hs script policy
--
-- Handling for Setup.hs scripts is a bit tricky, part of it lives in the
-- solver phase, and part in the elaboration phase. We keep the helper
-- functions for both phases together here so at least you can see all of it
-- in one place.
--
-- There are four major cases for Setup.hs handling:
--
-- 1. @build-type@ Custom with a @custom-setup@ section
-- 2. @build-type@ Custom without a @custom-setup@ section
-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@
-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@
--
-- It's also worth noting that packages specifying @cabal-version: >= 1.23@
-- or later that have @build-type@ Custom will always have a @custom-setup@
-- section. Therefore in case 2, the specified @cabal-version@ will always be
-- less than 1.23.
--
-- In cases 1 and 2 we obviously have to build an external Setup.hs script,
-- while in case 4 we can use the internal library API. In case 3 we also have
-- to build an external Setup.hs script because the package needs a later
-- Cabal lib version than we can support internally.
--
-- data SetupScriptStyle = ... -- see ProjectPlanning.Types
-- | Work out the 'SetupScriptStyle' given the package description.
--
packageSetupScriptStyle :: PD.PackageDescription -> SetupScriptStyle
packageSetupScriptStyle pkg
| buildType == PD.Custom
, Just setupbi <- PD.setupBuildInfo pkg -- does have a custom-setup stanza
, not (PD.defaultSetupDepends setupbi) -- but not one we added internally
= SetupCustomExplicitDeps
| buildType == PD.Custom
, Just setupbi <- PD.setupBuildInfo pkg -- we get this case post-solver as
, PD.defaultSetupDepends setupbi -- the solver fills in the deps
= SetupCustomImplicitDeps
| buildType == PD.Custom
, Nothing <- PD.setupBuildInfo pkg -- we get this case pre-solver
= SetupCustomImplicitDeps
| PD.specVersion pkg > cabalVersion -- one cabal-install is built against
= SetupNonCustomExternalLib
| otherwise
= SetupNonCustomInternalLib
where
buildType = fromMaybe PD.Custom (PD.buildType pkg)
-- | Part of our Setup.hs handling policy is implemented by getting the solver
-- to work out setup dependencies for packages. The solver already handles
-- packages that explicitly specify setup dependencies, but we can also tell
-- the solver to treat other packages as if they had setup dependencies.
-- That's what this function does, it gets called by the solver for all
-- packages that don't already have setup dependencies.
--
-- The dependencies we want to add is different for each 'SetupScriptStyle'.
--
-- Note that adding default deps means these deps are actually /added/ to the
-- packages that we get out of the solver in the 'SolverInstallPlan'. Making
-- implicit setup deps explicit is a problem in the post-solver stages because
-- we still need to distinguish the case of explicit and implict setup deps.
-- See 'rememberImplicitSetupDeps'.
--
defaultSetupDeps :: Compiler -> Platform
-> PD.PackageDescription
-> Maybe [Dependency]
defaultSetupDeps compiler platform pkg =
case packageSetupScriptStyle pkg of
-- For packages with build type custom that do not specify explicit
-- setup dependencies, we add a dependency on Cabal and a number
-- of other packages.
SetupCustomImplicitDeps ->
Just $
[ Dependency depPkgname anyVersion
| depPkgname <- legacyCustomSetupPkgs compiler platform ] ++
[ Dependency cabalPkgname cabalConstraint
| packageName pkg /= cabalPkgname ]
where
-- The Cabal dep is slightly special:
-- * We omit the dep for the Cabal lib itself, since it bootstraps.
-- * We constrain it to be >= 1.18 < 2
--
cabalConstraint = orLaterVersion cabalCompatMinVer
`intersectVersionRanges`
orLaterVersion (PD.specVersion pkg)
`intersectVersionRanges`
earlierVersion cabalCompatMaxVer
-- The idea here is that at some point we will make significant
-- breaking changes to the Cabal API that Setup.hs scripts use.
-- So for old custom Setup scripts that do not specify explicit
-- constraints, we constrain them to use a compatible Cabal version.
-- The exact version where we'll make this API break has not yet been
-- decided, so for the meantime we guess at 2.x.
cabalCompatMaxVer = Version [2] []
-- In principle we can talk to any old Cabal version, and we need to
-- be able to do that for custom Setup scripts that require older
-- Cabal lib versions. However in practice we have currently have
-- problems with Cabal-1.16. (1.16 does not know about build targets)
-- If this is fixed we can relax this constraint.
cabalCompatMinVer = Version [1,18] []
-- For other build types (like Simple) if we still need to compile an
-- external Setup.hs, it'll be one of the simple ones that only depends
-- on Cabal and base.
SetupNonCustomExternalLib ->
Just [ Dependency cabalPkgname cabalConstraint
, Dependency basePkgname anyVersion ]
where
cabalConstraint = orLaterVersion (PD.specVersion pkg)
-- The internal setup wrapper method has no deps at all.
SetupNonCustomInternalLib -> Just []
SetupCustomExplicitDeps ->
error $ "defaultSetupDeps: called for a package with explicit "
++ "setup deps: " ++ display (packageId pkg)
-- | Work out which version of the Cabal spec we will be using to talk to the
-- Setup.hs interface for this package.
--
-- This depends somewhat on the 'SetupScriptStyle' but most cases are a result
-- of what the solver picked for us, based on the explicit setup deps or the
-- ones added implicitly by 'defaultSetupDeps'.
--
packageSetupScriptSpecVersion :: Package pkg
=> SetupScriptStyle
-> PD.PackageDescription
-> ComponentDeps [pkg]
-> Version
-- We're going to be using the internal Cabal library, so the spec version of
-- that is simply the version of the Cabal library that cabal-install has been
-- built with.
packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ =
cabalVersion
-- If we happen to be building the Cabal lib itself then because that
-- bootstraps itself then we use the version of the lib we're building.
packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _
| packageName pkg == cabalPkgname
= packageVersion pkg
-- In all other cases we have a look at what version of the Cabal lib the
-- solver picked. Or if it didn't depend on Cabal at all (which is very rare)
-- then we look at the .cabal file to see what spec version it declares.
packageSetupScriptSpecVersion _ pkg deps =
case find ((cabalPkgname ==) . packageName) (CD.setupDeps deps) of
Just dep -> packageVersion dep
Nothing -> PD.specVersion pkg
cabalPkgname, basePkgname :: PackageName
cabalPkgname = PackageName "Cabal"
basePkgname = PackageName "base"
legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName]
legacyCustomSetupPkgs compiler (Platform _ os) =
map PackageName $
[ "array", "base", "binary", "bytestring", "containers"
, "deepseq", "directory", "filepath", "old-time", "pretty"
, "process", "time", "transformers" ]
++ [ "Win32" | os == Windows ]
++ [ "unix" | os /= Windows ]
++ [ "ghc-prim" | isGHC ]
++ [ "template-haskell" | isGHC ]
where
isGHC = compilerCompatFlavor GHC compiler
-- The other aspects of our Setup.hs policy lives here where we decide on
-- the 'SetupScriptOptions'.
--
-- Our current policy for the 'SetupCustomImplicitDeps' case is that we
-- try to make the implicit deps cover everything, and we don't allow the
-- compiler to pick up other deps. This may or may not be sustainable, and
-- we might have to allow the deps to be non-exclusive, but that itself would
-- be tricky since we would have to allow the Setup access to all the packages
-- in the store and local dbs.
setupHsScriptOptions :: ElaboratedReadyPackage
-> ElaboratedSharedConfig
-> FilePath
-> FilePath
-> Bool
-> Lock
-> SetupScriptOptions
setupHsScriptOptions (ReadyPackage ElaboratedConfiguredPackage{..})
ElaboratedSharedConfig{..} srcdir builddir
isParallelBuild cacheLock =
SetupScriptOptions {
useCabalVersion = thisVersion pkgSetupScriptCliVersion,
useCabalSpecVersion = Just pkgSetupScriptCliVersion,
useCompiler = Just pkgConfigCompiler,
usePlatform = Just pkgConfigPlatform,
usePackageDB = pkgSetupPackageDBStack,
usePackageIndex = Nothing,
useDependencies = [ (uid, srcid)
| ConfiguredId srcid uid <- CD.setupDeps pkgDependencies ],
useDependenciesExclusive = True,
useVersionMacros = pkgSetupScriptStyle == SetupCustomExplicitDeps,
useProgramConfig = pkgConfigCompilerProgs,
useDistPref = builddir,
useLoggingHandle = Nothing, -- this gets set later
useWorkingDir = Just srcdir,
useWin32CleanHack = False, --TODO: [required eventually]
forceExternalSetupMethod = isParallelBuild,
setupCacheLock = Just cacheLock
}
-- | To be used for the input for elaborateInstallPlan.
--
-- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure.
--
userInstallDirTemplates :: Compiler
-> IO InstallDirs.InstallDirTemplates
userInstallDirTemplates compiler = do
InstallDirs.defaultInstallDirs
(compilerFlavor compiler)
True -- user install
False -- unused
storePackageInstallDirs :: CabalDirLayout
-> CompilerId
-> InstalledPackageId
-> InstallDirs.InstallDirs FilePath
storePackageInstallDirs CabalDirLayout{cabalStorePackageDirectory}
compid ipkgid =
InstallDirs.InstallDirs {..}
where
prefix = cabalStorePackageDirectory compid ipkgid
bindir = prefix </> "bin"
libdir = prefix </> "lib"
libsubdir = ""
dynlibdir = libdir
libexecdir = prefix </> "libexec"
includedir = libdir </> "include"
datadir = prefix </> "share"
datasubdir = ""
docdir = datadir </> "doc"
mandir = datadir </> "man"
htmldir = docdir </> "html"
haddockdir = htmldir
sysconfdir = prefix </> "etc"
--TODO: [code cleanup] perhaps reorder this code
-- based on the ElaboratedInstallPlan + ElaboratedSharedConfig,
-- make the various Setup.hs {configure,build,copy} flags
setupHsConfigureFlags :: ElaboratedReadyPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.ConfigFlags
setupHsConfigureFlags (ReadyPackage
pkg@ElaboratedConfiguredPackage{..})
sharedConfig@ElaboratedSharedConfig{..}
verbosity builddir =
sanityCheckElaboratedConfiguredPackage sharedConfig pkg
(Cabal.ConfigFlags {..})
where
configDistPref = toFlag builddir
configVerbosity = toFlag verbosity
configIPID = toFlag (display (installedUnitId pkg))
configProgramPaths = Map.toList pkgProgramPaths
configProgramArgs = Map.toList pkgProgramArgs
configProgramPathExtra = toNubList pkgProgramPathExtra
configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler)
configHcPath = mempty -- we use configProgramPaths instead
configHcPkg = mempty -- we use configProgramPaths instead
configVanillaLib = toFlag pkgVanillaLib
configSharedLib = toFlag pkgSharedLib
configDynExe = toFlag pkgDynExe
configGHCiLib = toFlag pkgGHCiLib
configProfExe = mempty
configProfLib = toFlag pkgProfLib
configProf = toFlag pkgProfExe
-- configProfDetail is for exe+lib, but overridden by configProfLibDetail
-- so we specify both so we can specify independently
configProfDetail = toFlag pkgProfExeDetail
configProfLibDetail = toFlag pkgProfLibDetail
configCoverage = toFlag pkgCoverage
configLibCoverage = mempty
configOptimization = toFlag pkgOptimization
configSplitObjs = toFlag pkgSplitObjs
configStripExes = toFlag pkgStripExes
configStripLibs = toFlag pkgStripLibs
configDebugInfo = toFlag pkgDebugInfo
configAllowNewer = mempty -- we use configExactConfiguration True
configConfigurationsFlags = pkgFlagAssignment
configConfigureArgs = pkgConfigureScriptArgs
configExtraLibDirs = pkgExtraLibDirs
configExtraFrameworkDirs = pkgExtraFrameworkDirs
configExtraIncludeDirs = pkgExtraIncludeDirs
configProgPrefix = maybe mempty toFlag pkgProgPrefix
configProgSuffix = maybe mempty toFlag pkgProgSuffix
configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate)
pkgInstallDirs
-- we only use configDependencies, unless we're talking to an old Cabal
-- in which case we use configConstraints
configDependencies = [ (packageName srcid, uid)
| ConfiguredId srcid uid <- CD.nonSetupDeps pkgDependencies ]
configConstraints = [ thisPackageVersion srcid
| ConfiguredId srcid _uid <- CD.nonSetupDeps pkgDependencies ]
-- explicitly clear, then our package db stack
-- TODO: [required eventually] have to do this differently for older Cabal versions
configPackageDBs = Nothing : map Just pkgBuildPackageDBStack
configTests = toFlag (TestStanzas `Set.member` pkgStanzasEnabled)
configBenchmarks = toFlag (BenchStanzas `Set.member` pkgStanzasEnabled)
configExactConfiguration = toFlag True
configFlagError = mempty --TODO: [research required] appears not to be implemented
configRelocatable = mempty --TODO: [research required] ???
configScratchDir = mempty -- never use
configUserInstall = mempty -- don't rely on defaults
configPrograms_ = mempty -- never use, shouldn't exist
setupHsBuildFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.BuildFlags
setupHsBuildFlags ElaboratedConfiguredPackage{..} _ verbosity builddir =
Cabal.BuildFlags {
buildProgramPaths = mempty, --unused, set at configure time
buildProgramArgs = mempty, --unused, set at configure time
buildVerbosity = toFlag verbosity,
buildDistPref = toFlag builddir,
buildAssumeDepsUpToDate = toFlag False,
buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs),
buildArgs = mempty -- unused, passed via args not flags
}
setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String]
setupHsBuildArgs pkg =
map (showComponentTarget pkg) (pkgBuildTargets pkg)
showComponentTarget :: ElaboratedConfiguredPackage -> ComponentTarget -> String
showComponentTarget _pkg =
showBuildTarget . toBuildTarget
where
showBuildTarget t =
Cabal.showBuildTarget (qlBuildTarget t) t
qlBuildTarget Cabal.BuildTargetComponent{} = Cabal.QL2
qlBuildTarget _ = Cabal.QL3
toBuildTarget :: ComponentTarget -> Cabal.BuildTarget
toBuildTarget (ComponentTarget cname subtarget) =
case subtarget of
WholeComponent -> Cabal.BuildTargetComponent cname
ModuleTarget mname -> Cabal.BuildTargetModule cname mname
FileTarget fname -> Cabal.BuildTargetFile cname fname
setupHsReplFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.ReplFlags
setupHsReplFlags ElaboratedConfiguredPackage{..} _ verbosity builddir =
Cabal.ReplFlags {
replProgramPaths = mempty, --unused, set at configure time
replProgramArgs = mempty, --unused, set at configure time
replVerbosity = toFlag verbosity,
replDistPref = toFlag builddir,
replReload = mempty --only used as callback from repl
}
setupHsReplArgs :: ElaboratedConfiguredPackage -> [String]
setupHsReplArgs pkg =
maybe [] (\t -> [showComponentTarget pkg t]) (pkgReplTarget pkg)
--TODO: should be able to give multiple modules in one component
setupHsCopyFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.CopyFlags
setupHsCopyFlags _ _ verbosity builddir =
Cabal.CopyFlags {
--TODO: [nice to have] we currently just rely on Setup.hs copy to always do the right
-- thing, but perhaps we ought really to copy into an image dir and do
-- some sanity checks and move into the final location ourselves
copyArgs = [], -- TODO: could use this to only copy what we enabled
copyDest = toFlag InstallDirs.NoCopyDest,
copyDistPref = toFlag builddir,
copyAssumeDepsUpToDate = toFlag False,
copyVerbosity = toFlag verbosity
}
setupHsRegisterFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> FilePath
-> Cabal.RegisterFlags
setupHsRegisterFlags ElaboratedConfiguredPackage {pkgBuildStyle} _
verbosity builddir pkgConfFile =
Cabal.RegisterFlags {
regPackageDB = mempty, -- misfeature
regGenScript = mempty, -- never use
regGenPkgConf = toFlag (Just pkgConfFile),
regInPlace = case pkgBuildStyle of
BuildInplaceOnly -> toFlag True
_ -> toFlag False,
regPrintId = mempty, -- never use
regDistPref = toFlag builddir,
regVerbosity = toFlag verbosity,
-- Currently not used, because this is per-package.
regAssumeDepsUpToDate = toFlag False,
regArgs = []
}
setupHsHaddockFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.HaddockFlags
setupHsHaddockFlags ElaboratedConfiguredPackage{..} _ verbosity builddir =
Cabal.HaddockFlags {
haddockProgramPaths = mempty, --unused, set at configure time
haddockProgramArgs = mempty, --unused, set at configure time
haddockHoogle = toFlag pkgHaddockHoogle,
haddockHtml = toFlag pkgHaddockHtml,
haddockHtmlLocation = maybe mempty toFlag pkgHaddockHtmlLocation,
haddockForHackage = mempty, --TODO: new flag
haddockExecutables = toFlag pkgHaddockExecutables,
haddockTestSuites = toFlag pkgHaddockTestSuites,
haddockBenchmarks = toFlag pkgHaddockBenchmarks,
haddockInternal = toFlag pkgHaddockInternal,
haddockCss = maybe mempty toFlag pkgHaddockCss,
haddockHscolour = toFlag pkgHaddockHscolour,
haddockHscolourCss = maybe mempty toFlag pkgHaddockHscolourCss,
haddockContents = maybe mempty toFlag pkgHaddockContents,
haddockDistPref = toFlag builddir,
haddockKeepTempFiles = mempty, --TODO: from build settings
haddockVerbosity = toFlag verbosity
}
{-
setupHsTestFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.TestFlags
setupHsTestFlags _ _ verbosity builddir =
Cabal.TestFlags {
}
-}
------------------------------------------------------------------------------
-- * Sharing installed packages
------------------------------------------------------------------------------
--
-- Nix style store management for tarball packages
--
-- So here's our strategy:
--
-- We use a per-user nix-style hashed store, but /only/ for tarball packages.
-- So that includes packages from hackage repos (and other http and local
-- tarballs). For packages in local directories we do not register them into
-- the shared store by default, we just build them locally inplace.
--
-- The reason we do it like this is that it's easy to make stable hashes for
-- tarball packages, and these packages benefit most from sharing. By contrast
-- unpacked dir packages are harder to hash and they tend to change more
-- frequently so there's less benefit to sharing them.
--
-- When using the nix store approach we have to run the solver *without*
-- looking at the packages installed in the store, just at the source packages
-- (plus core\/global installed packages). Then we do a post-processing pass
-- to replace configured packages in the plan with pre-existing ones, where
-- possible. Where possible of course means where the nix-style package hash
-- equals one that's already in the store.
--
-- One extra wrinkle is that unless we know package tarball hashes upfront, we
-- will have to download the tarballs to find their hashes. So we have two
-- options: delay replacing source with pre-existing installed packages until
-- the point during the execution of the install plan where we have the
-- tarball, or try to do as much up-front as possible and then check again
-- during plan execution. The former isn't great because we would end up
-- telling users we're going to re-install loads of packages when in fact we
-- would just share them. It'd be better to give as accurate a prediction as
-- we can. The latter is better for users, but we do still have to check
-- during plan execution because it's important that we don't replace existing
-- installed packages even if they have the same package hash, because we
-- don't guarantee ABI stability.
-- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but
-- not replace installed packages with ghc-pkg.
packageHashInputs :: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> PackageHashInputs
packageHashInputs
pkgshared
pkg@ElaboratedConfiguredPackage{
pkgSourceId,
pkgSourceHash = Just srchash,
pkgDependencies
} =
PackageHashInputs {
pkgHashPkgId = pkgSourceId,
pkgHashSourceHash = srchash,
pkgHashDirectDeps = Set.fromList
[ installedPackageId dep
| dep <- CD.select relevantDeps pkgDependencies ],
pkgHashOtherConfig = packageHashConfigInputs pkgshared pkg
}
where
-- Obviously the main deps are relevant
relevantDeps (CD.ComponentLib _) = True
relevantDeps (CD.ComponentExe _) = True
-- Setup deps can affect the Setup.hs behaviour and thus what is built
relevantDeps CD.ComponentSetup = True
-- However testsuites and benchmarks do not get installed and should not
-- affect the result, so we do not include them.
relevantDeps (CD.ComponentTest _) = False
relevantDeps (CD.ComponentBench _) = False
packageHashInputs _ pkg =
error $ "packageHashInputs: only for packages with source hashes. "
++ display (packageId pkg)
packageHashConfigInputs :: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> PackageHashConfigInputs
packageHashConfigInputs
ElaboratedSharedConfig{..}
ElaboratedConfiguredPackage{..} =
PackageHashConfigInputs {
pkgHashCompilerId = compilerId pkgConfigCompiler,
pkgHashPlatform = pkgConfigPlatform,
pkgHashFlagAssignment = pkgFlagAssignment,
pkgHashConfigureScriptArgs = pkgConfigureScriptArgs,
pkgHashVanillaLib = pkgVanillaLib,
pkgHashSharedLib = pkgSharedLib,
pkgHashDynExe = pkgDynExe,
pkgHashGHCiLib = pkgGHCiLib,
pkgHashProfLib = pkgProfLib,
pkgHashProfExe = pkgProfExe,
pkgHashProfLibDetail = pkgProfLibDetail,
pkgHashProfExeDetail = pkgProfExeDetail,
pkgHashCoverage = pkgCoverage,
pkgHashOptimization = pkgOptimization,
pkgHashSplitObjs = pkgSplitObjs,
pkgHashStripLibs = pkgStripLibs,
pkgHashStripExes = pkgStripExes,
pkgHashDebugInfo = pkgDebugInfo,
pkgHashExtraLibDirs = pkgExtraLibDirs,
pkgHashExtraFrameworkDirs = pkgExtraFrameworkDirs,
pkgHashExtraIncludeDirs = pkgExtraIncludeDirs,
pkgHashProgPrefix = pkgProgPrefix,
pkgHashProgSuffix = pkgProgSuffix
}
-- | Given the 'InstalledPackageIndex' for a nix-style package store, and an
-- 'ElaboratedInstallPlan', replace configured source packages by pre-existing
-- installed packages whenever they exist.
--
improveInstallPlanWithPreExistingPackages :: InstalledPackageIndex
-> ElaboratedInstallPlan
-> ElaboratedInstallPlan
improveInstallPlanWithPreExistingPackages installedPkgIndex installPlan =
replaceWithPreExisting installPlan
[ ipkg
| InstallPlan.Configured pkg
<- InstallPlan.reverseTopologicalOrder installPlan
, ipkg <- maybeToList (canPackageBeImproved pkg) ]
where
--TODO: sanity checks:
-- * the installed package must have the expected deps etc
-- * the installed package must not be broken, valid dep closure
--TODO: decide what to do if we encounter broken installed packages,
-- since overwriting is never safe.
canPackageBeImproved pkg =
PackageIndex.lookupUnitId
installedPkgIndex (installedPackageId pkg)
replaceWithPreExisting =
foldl' (\plan ipkg -> InstallPlan.preexisting
(installedPackageId ipkg) ipkg plan)
| headprogrammingczar/cabal | cabal-install/Distribution/Client/ProjectPlanning.hs | bsd-3-clause | 100,058 | 0 | 28 | 28,841 | 12,842 | 7,008 | 5,834 | 1,357 | 7 |
module Htads where
import System.IO
import qualified Data.Char as Char
import qualified Data.Map.Strict as Map
import qualified Data.List as List
import qualified Data.Text as Text
import qualified Data.Maybe as Maybe
import qualified Data.Set as Set
import GHC.Generics
-- local module
import Alias
import qualified Util as U
type TextWord = String
type RoomName = TextWord
type ItemName = TextWord
type ItemDesc = String
type WordSet = Set.Set TextWord
data Compass = North | NorthEast | East | SouthEast | South | SouthWest | West | NorthWest
deriving (Ord, Eq, Show, Read, Generic)
data Verb = Look | Go Compass | Examine ItemDesc | Get ItemDesc | Inventory | Quit | Skip RoomName | Error String
deriving (Show)
data Result = Message String | Start | End
deriving (Show, Eq)
data ItemAttribute = Fixed | Bulky | Score Int
deriving (Show, Generic)
type Connection = Map.Map Compass String
type AliasMap = Map.Map String String
type RoomMap = Map.Map RoomName Room
type ItemMap = Map.Map TextWord Item
data Room = Room {
summary :: RoomName
, description :: String
, connections :: Connection
} deriving (Show, Generic)
data Item = Item {
itemId :: TextWord
, nouns :: [TextWord]
, adjectives :: [TextWord]
, itemDescription :: String
, itemAttributes :: [ItemAttribute]
, startLocation :: String
} deriving (Show, Generic)
data Object = ObjectRoom Room | ObjectItem Item
deriving (Show)
itemName :: Item -> ItemName
itemName item = Text.unpack $ Text.strip $ Text.pack $ adjs ++ " " ++ noun
where adjs = if null $ adjectives item
then ""
else List.intercalate " " $ adjectives item
noun = head $ nouns item
data PlayerInfo = PlayerInfo {
currentRoom :: RoomName
, visitedRooms :: WordSet
, inventory :: WordSet
} deriving (Show)
data WorldDefinition = WorldDefinition {
roomMap :: RoomMap
, itemMap :: ItemMap
} deriving (Show)
lookupItem :: WorldDefinition -> ItemName -> Item
lookupItem wd name = maybe (error $ "missing item " ++ name) id $ Map.lookup name $ itemMap wd
lookupRoom :: WorldDefinition -> RoomName -> Room
lookupRoom wd name = maybe (error $ "missing room " ++ name) id $ Map.lookup name $ roomMap wd
getItemDescriptions :: Item -> [String]
getItemDescriptions item = [ a ++ " " ++ n | a <- adjectPhrases, n <- nounPhrases ] ++ nounPhrases
where adjectPhrases = List.map (List.intercalate " ") $ List.concatMap
genComb [1..(length adjects)]
nounPhrases = nouns item
adjects = adjectives item
genComb n = U.combinations n adjects
itemIsFixed :: Item -> Bool
itemIsFixed = Maybe.isJust . List.find isFixed . itemAttributes
where isFixed Fixed = True
isFixed _ = False
getItemScore :: Item -> Int
getItemScore = maybe 0 exScore . List.find isScore . itemAttributes
where isScore (Score _) = True
isScore _ = False
exScore (Score n) = n
itemByDesc :: [Item] -> String -> Maybe Item
itemByDesc items itemDesc = List.find matches items
where matches item = itemDesc `elem` getItemDescriptions item
data WorldState = WorldState {
worldDefinition :: WorldDefinition
, playerInfo :: PlayerInfo
, roomItemMap :: Map.Map RoomName [ItemName]
, aliases :: AliasMap
} deriving (Show)
makeWorldState :: WorldDefinition -> AliasMap -> WorldState
makeWorldState wd aliasMap = WorldState wd (PlayerInfo "<none>" Set.empty Set.empty) (generateWorldItemMap wd) aliasMap
where
generateWorldItemMap wd =
Map.fromListWith (++) $ map (\pair -> (startLocation $ snd pair, [fst pair])) $ Map.assocs $ itemMap wd
getItemsFromRoom :: WorldState -> RoomName -> [Item]
getItemsFromRoom ws roomName = map (lookupItem $ worldDefinition ws) itemNames
where itemNames = maybe [] id $ Map.lookup roomName $ roomItemMap ws
getRoomDescription :: WorldState -> RoomName -> String
getRoomDescription ws roomName =
"\n_" ++ (summary room) ++ "_\n" ++ (description room) ++ "\n" ++ itemDesc
where room = lookupRoom (worldDefinition ws) roomName
items = getItemsFromRoom ws roomName
itemDesc = if null items
then ""
else "You see a " ++ List.intercalate ", a " (map itemName items) ++ "."
parseCompass :: String -> Maybe Compass
parseCompass dir = case dir of
"north" -> Just North
"northeast" -> Just NorthEast
"northwest" -> Just NorthWest
"east" -> Just East
"west" -> Just West
"south" -> Just South
"southeast" -> Just SouthEast
"southwest" -> Just SouthWest
_ -> Nothing
parseCommand :: WorldState -> String -> Verb
parseCommand ws cmdline =
case cmd of
"go" -> maybe (Error $ "Unrecognized direction " ++ head rest) Go $ parseCompass $ head rest
"get" -> Get restStr
"inventory" -> Inventory
"look" -> Look
":j" -> Skip $ head rest
"quit" -> Quit
"examine" -> Examine restStr
_ -> Error $ "Unrecognized command " ++ cmd
where wordList = words $ map Char.toLower $ translateCommand (aliases ws) cmdline
cmd = head wordList
rest = tail wordList
restStr = List.intercalate " " rest
translateCommand aliases cmd = maybe cmd id $ Map.lookup cmd aliases
goToRoom :: WorldState -> RoomName -> (WorldState, Result)
goToRoom ws roomName =
let oldPi = (playerInfo ws)
pi = oldPi { currentRoom = roomName
, visitedRooms = Set.insert roomName (visitedRooms oldPi) }
msg = if roomName `Set.member` (visitedRooms oldPi)
then summary newRoom
else getRoomDescription ws roomName in
(ws { playerInfo = pi }, Message msg)
where newRoom = lookupRoom (worldDefinition ws) roomName
examineItem :: WorldState -> ItemDesc -> (WorldState, Result)
examineItem ws itemDesc =
case maybeItem of
Just item -> (ws, Message $ "It looks like a " ++ itemDesc)
Nothing -> (ws, Message $ "There is no " ++ itemDesc ++ " here.")
where roomName = currentRoom (playerInfo ws)
roomItems = case Map.lookup roomName $ roomItemMap ws of
Just lst -> map (lookupItem $ worldDefinition ws) lst
Nothing -> []
playerItems = map (lookupItem (worldDefinition ws)) $ Set.elems (inventory (playerInfo ws))
maybeItem = itemByDesc (roomItems ++ playerItems) itemDesc
tryPickupItem :: WorldState -> ItemDesc -> (WorldState, Result)
tryPickupItem ws itemDesc =
case maybeItem of
Just item ->
if itemIsFixed item
then (ws, Message $ desc ++ " cannot be picked up.")
else let pi = (playerInfo ws)
newPi = pi { inventory = Set.insert iId (inventory pi) }
newIm = Map.update removeItem roomName (roomItemMap ws) in
(ws { playerInfo = newPi,
roomItemMap = newIm
}, Message $ desc ++ " picked up." )
where removeItem itemList = Just $ List.delete iId itemList
name = itemName item
desc = itemDescription item
iId = itemId item
Nothing -> (ws, Message $ "There is no " ++ itemDesc ++ " here.")
where roomName = currentRoom (playerInfo ws)
items = case Map.lookup roomName $ roomItemMap ws of
Just lst -> map (lookupItem $ worldDefinition ws) lst
Nothing -> []
maybeItem = itemByDesc items itemDesc
showInventory :: WorldState -> (WorldState, Result)
showInventory ws = (ws, Message $ "You are carrying:\n " ++ List.intercalate "\n " items )
where items =
Set.elems $ Set.map
(itemDescription . lookupItem (worldDefinition ws))
(inventory (playerInfo ws))
evalString :: WorldState -> String -> (WorldState, Result)
evalString ws cmdline =
case parseCommand ws cmdline of
Look -> (ws, Message $ getRoomDescription ws roomName)
Go dir -> case Map.lookup dir (connections room) of
Just roomName -> goToRoom ws roomName
Nothing -> (ws, Message "You can't go that direction")
Skip roomName -> goToRoom ws roomName
Examine itemDesc -> examineItem ws itemDesc
Get itemDesc -> tryPickupItem ws itemDesc
Inventory -> showInventory ws
Quit -> (ws, End)
Error msg -> (ws, Message msg)
where roomName = currentRoom $ playerInfo ws
room = lookupRoom (worldDefinition ws) roomName
flushStr :: String -> IO ()
flushStr str = putStr str >> hFlush stdout
readPrompt :: String -> IO String
readPrompt prompt = flushStr prompt >> getLine
evalAndPrint :: (WorldState, Result) -> String -> IO (WorldState, Result)
evalAndPrint (ws, res) expr =
do let (nws, result) = evalString ws expr
case result of
Message msg -> putStrLn $ U.wrap 60 msg
End -> putStrLn "Exiting the game!"
return (nws, result)
until_ :: Monad m => (s -> Bool) -> m a -> (s -> a -> m s) -> s -> m s
until_ pred prompt action state =
do cmdline <- prompt
res <- action state cmdline
if pred res
then return state
else until_ pred prompt action res
done :: (WorldState, Result) -> Bool
done (ws, res) = res == End
runRepl :: (WorldState, Result) -> IO (WorldState, Result)
runRepl = until_ done (readPrompt "> ") evalAndPrint
getScore :: WorldState -> Int
getScore ws =
sum $ Set.elems $ Set.map (getItemScore . (lookupItem (worldDefinition ws))) (inventory (playerInfo ws))
runAdventure :: RoomMap -> ItemMap -> AliasMap -> IO (WorldState, Result)
runAdventure roomMap itemMap aliasMap =
do let ws = makeWorldState (WorldDefinition roomMap itemMap) aliasMap
evalAndPrint (ws, Start) ":j start" >>= runRepl
| Danl2620/htads | src/Htads.hs | bsd-3-clause | 9,945 | 0 | 17 | 2,673 | 3,143 | 1,643 | 1,500 | 222 | 9 |
module TicTacToe.Game where
import qualified TicTacToe.Data.Board as B
import qualified TicTacToe.Data.Move as M
import qualified TicTacToe.Data.Token as T
import qualified TicTacToe.Internal.Matrix as MX
import qualified TicTacToe.Internal.Move as IM
startingBoard :: B.Board
startingBoard = B.Board $ MX.getCells MX.startingMatrix
startingMoves :: T.Token -> [M.Move]
startingMoves token = map (IM.mkMove token MX.startingMatrix) [1..9]
| rodamber/haskell-tic-tac-toe | src/TicTacToe/Game.hs | bsd-3-clause | 460 | 0 | 8 | 71 | 122 | 76 | 46 | 10 | 1 |
{-
Copyright 2013-2019 Mario Blazevic
License: BSD3 (see BSD3-LICENSE.txt file)
-}
-- | This module defines the monoid transformer data type 'Concat'.
--
{-# LANGUAGE Haskell2010 #-}
module Data.Monoid.Instances.Concat (
Concat, concatenate, extract, force
)
where
import Control.Applicative -- (Applicative(..))
import Control.Arrow (first)
import qualified Data.Foldable as Foldable
import qualified Data.List as List
import Data.String (IsString(..))
import Data.Semigroup (Semigroup(..))
import Data.Monoid (Monoid(..), First(..), Sum(..))
import Data.Semigroup.Cancellative (LeftReductive(..), RightReductive(..))
import Data.Semigroup.Factorial (Factorial(..), StableFactorial)
import Data.Monoid.GCD (LeftGCDMonoid(..), RightGCDMonoid(..))
import Data.Monoid.Null (MonoidNull(null), PositiveMonoid)
import Data.Monoid.Factorial (FactorialMonoid(..))
import Data.Monoid.Textual (TextualMonoid(..))
import qualified Data.Monoid.Factorial as Factorial
import qualified Data.Monoid.Textual as Textual
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Text as Text
import Prelude hiding (all, any, break, filter, foldl, foldl1, foldr, foldr1, map, concatMap,
length, null, reverse, scanl, scanr, scanl1, scanr1, span, splitAt, pi)
-- | @'Concat'@ is a transparent monoid transformer. The behaviour of the @'Concat' a@ instances of monoid subclasses is
-- identical to the behaviour of their @a@ instances, up to the 'pure' isomorphism.
--
-- The only purpose of 'Concat' then is to change the performance characteristics of various operations. Most
-- importantly, injecting a monoid into 'Concat' has the effect of making 'mappend' a constant-time operation. The
-- `splitPrimePrefix` and `splitPrimeSuffix` operations are amortized to constant time, provided that only one or the
-- other is used. Using both operations alternately will trigger the worst-case behaviour of O(n).
--
data Concat a = Leaf a
| Concat a :<> Concat a
deriving Show
{-# DEPRECATED concatenate, extract "Concat is not wrapping Seq any more, don't use concatenate nor extract." #-}
concatenate :: PositiveMonoid a => Seq a -> Concat a
concatenate q
| Foldable.all null q = mempty
| otherwise = Foldable.foldr (\a c-> if null a then c else Leaf a <> c) mempty q
extract :: Concat a -> Seq a
extract = Seq.fromList . Foldable.toList
force :: Semigroup a => Concat a -> a
force (Leaf x) = x
force (x :<> y) = force x <> force y
instance (Eq a, Semigroup a) => Eq (Concat a) where
x == y = force x == force y
instance (Ord a, Semigroup a) => Ord (Concat a) where
compare x y = compare (force x) (force y)
instance Functor Concat where
fmap f (Leaf x) = Leaf (f x)
fmap f (l :<> r) = fmap f l :<> fmap f r
instance Applicative Concat where
pure = Leaf
Leaf f <*> x = f <$> x
(f1 :<> f2) <*> x = (f1 <*> x) :<> (f2 <*> x)
instance Foldable.Foldable Concat where
fold (Leaf x) = x
fold (x :<> y) = Foldable.fold x `mappend` Foldable.fold y
foldMap f (Leaf x) = f x
foldMap f (x :<> y) = Foldable.foldMap f x `mappend` Foldable.foldMap f y
foldl f a (Leaf x) = f a x
foldl f a (x :<> y) = Foldable.foldl f (Foldable.foldl f a x) y
foldl' f a (Leaf x) = f a x
foldl' f a (x :<> y) = let a' = Foldable.foldl' f a x in a' `seq` Foldable.foldl' f a' y
foldr f a (Leaf x) = f x a
foldr f a (x :<> y) = Foldable.foldr f (Foldable.foldr f a y) x
foldr' f a (Leaf x) = f x a
foldr' f a (x :<> y) = let a' = Foldable.foldr' f a y in Foldable.foldr' f a' x
instance PositiveMonoid a => Semigroup (Concat a) where
x <> y
| null x = y
| null y = x
| otherwise = x :<> y
instance PositiveMonoid a => Monoid (Concat a) where
mempty = Leaf mempty
mappend = (<>)
instance PositiveMonoid a => MonoidNull (Concat a) where
null (Leaf x) = null x
null _ = False
instance PositiveMonoid a => PositiveMonoid (Concat a)
instance (LeftReductive a, StableFactorial a, PositiveMonoid a) => LeftReductive (Concat a) where
stripPrefix (Leaf x) (Leaf y) = Leaf <$> stripPrefix x y
stripPrefix (xp :<> xs) y = stripPrefix xp y >>= stripPrefix xs
stripPrefix x (yp :<> ys) = case (stripPrefix x yp, stripPrefix yp x)
of (Just yps, _) -> Just (yps <> ys)
(Nothing, Nothing) -> Nothing
(Nothing, Just xs) -> stripPrefix xs ys
instance (RightReductive a, StableFactorial a, PositiveMonoid a) => RightReductive (Concat a) where
stripSuffix (Leaf x) (Leaf y) = Leaf <$> stripSuffix x y
stripSuffix (xp :<> xs) y = stripSuffix xs y >>= stripSuffix xp
stripSuffix x (yp :<> ys) = case (stripSuffix x ys, stripSuffix ys x)
of (Just ysp, _) -> Just (yp <> ysp)
(Nothing, Nothing) -> Nothing
(Nothing, Just xp) -> stripSuffix xp yp
instance (LeftGCDMonoid a, StableFactorial a, PositiveMonoid a) => LeftGCDMonoid (Concat a) where
stripCommonPrefix (Leaf x) (Leaf y) = map3 Leaf (stripCommonPrefix x y)
stripCommonPrefix (xp :<> xs) y
| null xps = (xp <> xsp, xss, yss)
| otherwise = (xpp, xps <> xs, ys)
where (xpp, xps, ys) = stripCommonPrefix xp y
(xsp, xss, yss) = stripCommonPrefix xs ys
stripCommonPrefix x (yp :<> ys)
| null yps = (yp <> ysp, xss, yss)
| otherwise = (ypp, xs, yps <> ys)
where (ypp, xs, yps) = stripCommonPrefix x yp
(ysp, xss, yss) = stripCommonPrefix xs ys
instance (RightGCDMonoid a, StableFactorial a, PositiveMonoid a) => RightGCDMonoid (Concat a) where
stripCommonSuffix (Leaf x) (Leaf y) = map3 Leaf (stripCommonSuffix x y)
stripCommonSuffix (xp :<> xs) y
| null xsp = (xpp, ypp, xps <> xs)
| otherwise = (xp <> xsp, yp, xss)
where (xsp, yp, xss) = stripCommonSuffix xs y
(xpp, ypp, xps) = stripCommonSuffix xp yp
stripCommonSuffix x (yp :<> ys)
| null ysp = (xpp, ypp, yps <> ys)
| otherwise = (xp, yp <> ysp, yss)
where (xp, ysp, yss) = stripCommonSuffix x ys
(xpp, ypp, yps) = stripCommonSuffix xp yp
instance (Factorial a, PositiveMonoid a) => Factorial (Concat a) where
factors c = toList c []
where toList (Leaf x) rest
| null x = rest
| otherwise = (Leaf <$> factors x) ++ rest
toList (x :<> y) rest = toList x (toList y rest)
primePrefix (Leaf x) = Leaf (primePrefix x)
primePrefix (x :<> _) = primePrefix x
primeSuffix (Leaf x) = Leaf (primeSuffix x)
primeSuffix (_ :<> y) = primeSuffix y
foldl f = Foldable.foldl g
where g = Factorial.foldl (\a-> f a . Leaf)
foldl' f = Foldable.foldl' g
where g = Factorial.foldl' (\a-> f a . Leaf)
foldr f = Foldable.foldr g
where g a b = Factorial.foldr (f . Leaf) b a
foldMap f = Foldable.foldMap (Factorial.foldMap (f . Leaf))
length x = getSum $ Foldable.foldMap (Sum . length) x
reverse (Leaf x) = Leaf (reverse x)
reverse (x :<> y) = reverse y :<> reverse x
instance (FactorialMonoid a, PositiveMonoid a) => FactorialMonoid (Concat a) where
splitPrimePrefix (Leaf x) = map2 Leaf <$> splitPrimePrefix x
splitPrimePrefix (x :<> y) = ((<> y) <$>) <$> splitPrimePrefix x
splitPrimeSuffix (Leaf x) = map2 Leaf <$> splitPrimeSuffix x
splitPrimeSuffix (x :<> y) = first (x <>) <$> splitPrimeSuffix y
span p (Leaf x) = map2 Leaf (Factorial.span (p . Leaf) x)
span p (x :<> y)
| null xs = (x <> yp, ys)
| otherwise = (xp, xs :<> y)
where (xp, xs) = Factorial.span p x
(yp, ys) = Factorial.span p y
spanMaybe s0 f (Leaf x) = first2 Leaf (Factorial.spanMaybe s0 (\s-> f s . Leaf) x)
spanMaybe s0 f (x :<> y)
| null xs = (x :<> yp, ys, s2)
| otherwise = (xp, xs :<> y, s1)
where (xp, xs, s1) = Factorial.spanMaybe s0 f x
(yp, ys, s2) = Factorial.spanMaybe s1 f y
spanMaybe' s0 f c = seq s0 $
case c
of Leaf x -> first2 Leaf (Factorial.spanMaybe' s0 (\s-> f s . Leaf) x)
x :<> y -> let (xp, xs, s1) = Factorial.spanMaybe' s0 f x
(yp, ys, s2) = Factorial.spanMaybe' s1 f y
in if null xs then (x :<> yp, ys, s2) else (xp, xs :<> y, s1)
split p = Foldable.foldr splitNext [mempty]
where splitNext a ~(xp:xs) =
let as = Leaf <$> Factorial.split (p . Leaf) a
in if null xp
then as ++ xs
else init as ++ (last as <> xp):xs
splitAt 0 c = (mempty, c)
splitAt n (Leaf x) = map2 Leaf (Factorial.splitAt n x)
splitAt n (x :<> y)
| k < n = (x :<> yp, ys)
| k > n = (xp, xs :<> y)
| otherwise = (x, y)
where k = length x
(yp, ys) = splitAt (n - k) y
(xp, xs) = splitAt n x
instance (Factorial a, PositiveMonoid a) => StableFactorial (Concat a)
instance (IsString a) => IsString (Concat a) where
fromString s = Leaf (fromString s)
instance (Eq a, TextualMonoid a, StableFactorial a, PositiveMonoid a) => TextualMonoid (Concat a) where
fromText t = Leaf (fromText t)
singleton = Leaf . singleton
splitCharacterPrefix (Leaf x) = (Leaf <$>) <$> splitCharacterPrefix x
splitCharacterPrefix (x :<> y) = ((<> y) <$>) <$> splitCharacterPrefix x
characterPrefix (Leaf x) = characterPrefix x
characterPrefix (x :<> _) = characterPrefix x
map f x = map f <$> x
toString ft x = List.concatMap (toString $ ft . Leaf) (Foldable.toList x)
toText ft x = Text.concat (toText (ft . Leaf) <$> Foldable.toList x)
foldl ft fc = Foldable.foldl g
where g = Textual.foldl (\a-> ft a . Leaf) fc
foldl' ft fc = Foldable.foldl' g
where g = Textual.foldl' (\a-> ft a . Leaf) fc
foldr ft fc = Foldable.foldr g
where g a b = Textual.foldr (ft . Leaf) fc b a
any p = Foldable.any (any p)
all p = Foldable.all (all p)
span pt pc (Leaf x) = map2 Leaf (Textual.span (pt . Leaf) pc x)
span pt pc (x :<> y)
| null xs = (x <> yp, ys)
| otherwise = (xp, xs :<> y)
where (xp, xs) = Textual.span pt pc x
(yp, ys) = Textual.span pt pc y
span_ bt pc (Leaf x) = map2 Leaf (Textual.span_ bt pc x)
span_ bt pc (x :<> y)
| null xs = (x <> yp, ys)
| otherwise = (xp, xs :<> y)
where (xp, xs) = Textual.span_ bt pc x
(yp, ys) = Textual.span_ bt pc y
break pt pc = Textual.span (not . pt) (not . pc)
takeWhile_ bt pc = fst . span_ bt pc
dropWhile_ bt pc = snd . span_ bt pc
break_ bt pc = span_ (not bt) (not . pc)
spanMaybe s0 ft fc (Leaf x) = first2 Leaf (Textual.spanMaybe s0 (\s-> ft s . Leaf) fc x)
spanMaybe s0 ft fc (x :<> y)
| null xs = (x :<> yp, ys, s2)
| otherwise = (xp, xs :<> y, s1)
where (xp, xs, s1) = Textual.spanMaybe s0 ft fc x
(yp, ys, s2) = Textual.spanMaybe s1 ft fc y
spanMaybe' s0 ft fc c = seq s0 $
case c
of Leaf x -> first2 Leaf (Textual.spanMaybe' s0 (\s-> ft s . Leaf) fc x)
x :<> y -> let (xp, xs, s1) = Textual.spanMaybe' s0 ft fc x
(yp, ys, s2) = Textual.spanMaybe' s1 ft fc y
in if null xs then (x :<> yp, ys, s2) else (xp, xs :<> y, s1)
spanMaybe_ s0 fc (Leaf x) = first2 Leaf (Textual.spanMaybe_ s0 fc x)
spanMaybe_ s0 fc (x :<> y)
| null xs = (x :<> yp, ys, s2)
| otherwise = (xp, xs :<> y, s1)
where (xp, xs, s1) = Textual.spanMaybe_ s0 fc x
(yp, ys, s2) = Textual.spanMaybe_ s1 fc y
spanMaybe_' s0 fc c = seq s0 $
case c
of Leaf x -> first2 Leaf (Textual.spanMaybe_' s0 fc x)
x :<> y -> let (xp, xs, s1) = Textual.spanMaybe_' s0 fc x
(yp, ys, s2) = Textual.spanMaybe_' s1 fc y
in if null xs then (x :<> yp, ys, s2) else (xp, xs :<> y, s1)
split p = Foldable.foldr splitNext [mempty]
where splitNext a ~(xp:xs) =
let as = Leaf <$> Textual.split p a
in if null xp
then as ++ xs
else init as ++ (last as <> xp):xs
find p x = getFirst $ Foldable.foldMap (First . find p) x
elem i = Foldable.any (Textual.elem i)
-- Utility functions
map2 :: (a -> b) -> (a, a) -> (b, b)
map2 f (x, y) = (f x, f y)
map3 :: (a -> b) -> (a, a, a) -> (b, b, b)
map3 f (x, y, z) = (f x, f y, f z)
first2 :: (a -> b) -> (a, a, c) -> (b, b, c)
first2 f (x, y, z) = (f x, f y, z)
| blamario/monoid-subclasses | src/Data/Monoid/Instances/Concat.hs | bsd-3-clause | 12,540 | 0 | 16 | 3,481 | 5,428 | 2,822 | 2,606 | 243 | 2 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, ScopedTypeVariables, MagicHash #-}
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.List
-- Copyright : (c) The University of Glasgow 1994-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- The List data type and its operations
--
-----------------------------------------------------------------------------
-- NOTE: This code was adapted from the GHC.List module specified above.
-- All licensing remains as specified above.
module MyList (
-- [] (..), -- built-in syntax; can't be used in export list
map, (++), filter, concat,
head, last, tail, init, uncons, null, length, (!!),
intersperse, intercalate, subsequences, permutations,
foldl, foldl1, foldr, foldr1,
iterate, repeat, replicate, cycle,
take, drop, sum, product, maximum, minimum, splitAt, takeWhile, dropWhile,
span, break, reverse, and, or,
elem, notElem, lookup,
concatMap,
zip, zipWith, unzip, sort
) where
import Data.Maybe
import GHC.Base
import GHC.Num (Num(..))
import GHC.Integer (Integer)
infixl 9 !!
infix 4 `elem`, `notElem`
sort :: (Ord a) => [a] -> [a]
sort = foldr (insertBy compare) []
-- | The non-overloaded version of 'insert'.
insertBy :: (a -> a -> Ordering) -> a -> [a] -> [a]
insertBy _ x [] = [x]
insertBy cmp x ys@(y:ys')
= case cmp x y of
GT -> y : insertBy cmp x ys'
_ -> x : ys
--------------------------------------------------------------
-- List-manipulation functions
--------------------------------------------------------------
-- | Extract the first element of a list, which must be non-empty.
head :: [a] -> a
head (x:_) = x
head [] = badHead
{-# NOINLINE [1] head #-}
badHead :: a
badHead = errorEmptyList "head"
-- This rule is useful in cases like
-- head [y | (x,y) <- ps, x==t]
{-# RULES
"head/build" forall (g::forall b.(a->b->b)->b->b) .
head (build g) = g (\x _ -> x) badHead
"head/augment" forall xs (g::forall b. (a->b->b) -> b -> b) .
head (augment g xs) = g (\x _ -> x) (head xs)
#-}
-- | Decompose a list into its head and tail. If the list is empty,
-- returns 'Nothing'. If the list is non-empty, returns @'Just' (x, xs)@,
-- where @x@ is the head of the list and @xs@ its tail.
--
-- @since 4.8.0.0
uncons :: [a] -> Maybe (a, [a])
uncons [] = Nothing
uncons (x:xs) = Just (x, xs)
-- | Extract the elements after the head of a list, which must be non-empty.
tail :: [a] -> [a]
tail (_:xs) = xs
tail [] = errorEmptyList "tail"
-- | Extract the last element of a list, which must be finite and non-empty.
last :: [a] -> a
#ifdef USE_REPORT_PRELUDE
last [x] = x
last (_:xs) = last xs
last [] = errorEmptyList "last"
#else
-- use foldl to allow fusion
last = foldl (\_ x -> x) (errorEmptyList "last")
#endif
-- | Return all the elements of a list except the last one.
-- The list must be non-empty.
init :: [a] -> [a]
#ifdef USE_REPORT_PRELUDE
init [x] = []
init (x:xs) = x : init xs
init [] = errorEmptyList "init"
#else
-- eliminate repeated cases
init [] = errorEmptyList "init"
init (x:xs) = init' x xs
where init' _ [] = []
init' y (z:zs) = y : init' z zs
#endif
-- | Test whether a list is empty.
null :: [a] -> Bool
null [] = True
null (_:_) = False
-- | /O(n)/. 'length' returns the length of a finite list as an 'Int'.
-- It is an instance of the more general 'Data.List.genericLength',
-- the result type of which may be any kind of number.
{-# NOINLINE [1] length #-}
length :: [a] -> Int
length xs = lenAcc xs 0
lenAcc :: [a] -> Int -> Int
lenAcc [] n = n
lenAcc (_:ys) n = lenAcc ys (n+1)
-- | 'filter', applied to a predicate and a list, returns the list of
-- those elements that satisfy the predicate; i.e.,
--
-- > filter p xs = [ x | x <- xs, p x]
{-# NOINLINE [1] filter #-}
filter :: (a -> Bool) -> [a] -> [a]
filter _pred [] = []
filter pred (x:xs)
| pred x = x : filter pred xs
| otherwise = filter pred xs
-- Note the filterFB rule, which has p and q the "wrong way round" in the RHS.
-- filterFB (filterFB c p) q a b
-- = if q a then filterFB c p a b else b
-- = if q a then (if p a then c a b else b) else b
-- = if q a && p a then c a b else b
-- = filterFB c (\x -> q x && p x) a b
-- I originally wrote (\x -> p x && q x), which is wrong, and actually
-- gave rise to a live bug report. SLPJ.
-- | 'foldl', applied to a binary operator, a starting value (typically
-- the left-identity of the operator), and a list, reduces the list
-- using the binary operator, from left to right:
--
-- > foldl f z [x1, x2, ..., xn] == (...((z `f` x1) `f` x2) `f`...) `f` xn
--
-- The list must be finite.
-- We write foldl as a non-recursive thing, so that it
-- can be inlined, and then (often) strictness-analysed,
-- and hence the classic space leak on foldl (+) 0 xs
foldl :: forall a b. (b -> a -> b) -> b -> [a] -> b
{-# INLINE foldl #-}
foldl k z0 xs =
foldr (\(v::a) (fn::b->b) -> (\(z::b) -> fn (k z v))) (id :: b -> b) xs z0
-- See Note [Left folds via right fold]
{-
Note [Left folds via right fold]
Implementing foldl et. al. via foldr is only a good idea if the compiler can
optimize the resulting code (eta-expand the recursive "go"). See #7994.
We hope that one of the two measure kick in:
* Call Arity (-fcall-arity, enabled by default) eta-expands it if it can see
all calls and determine that the arity is large.
* The oneShot annotation gives a hint to the regular arity analysis that
it may assume that the lambda is called at most once.
See [One-shot lambdas] in CoreArity and especially [Eta expanding thunks]
in CoreArity.
The oneShot annotations used in this module are correct, as we only use them in
argumets to foldr, where we know how the arguments are called.
-}
-- ----------------------------------------------------------------------------
-- | 'foldl1' is a variant of 'foldl' that has no starting value argument,
-- and thus must be applied to non-empty lists.
foldl1 :: (a -> a -> a) -> [a] -> a
foldl1 f (x:xs) = foldl f x xs
foldl1 _ [] = errorEmptyList "foldl1"
-- -----------------------------------------------------------------------------
-- List sum and product
-- | The 'sum' function computes the sum of a finite list of numbers.
sum :: (Num a) => [a] -> a
{-# INLINE sum #-}
sum = foldl (+) 0
-- | The 'product' function computes the product of a finite list of numbers.
product :: (Num a) => [a] -> a
{-# INLINE product #-}
product = foldl (*) 1
-- foldr, foldr1, scanr, and scanr1 are the right-to-left duals of the
-- above functions.
-- | 'foldr1' is a variant of 'foldr' that has no starting value argument,
-- and thus must be applied to non-empty lists.
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 _ [x] = x
foldr1 f (x:xs) = f x (foldr1 f xs)
foldr1 _ [] = errorEmptyList "foldr1"
-- | 'maximum' returns the maximum value from a list,
-- which must be non-empty, finite, and of an ordered type.
-- It is a special case of 'Data.List.maximumBy', which allows the
-- programmer to supply their own comparison function.
maximum :: (Ord a) => [a] -> a
{-# INLINE [1] maximum #-}
maximum [] = errorEmptyList "maximum"
maximum xs = foldl1 max xs
-- | 'minimum' returns the minimum value from a list,
-- which must be non-empty, finite, and of an ordered type.
-- It is a special case of 'Data.List.minimumBy', which allows the
-- programmer to supply their own comparison function.
minimum :: (Ord a) => [a] -> a
{-# INLINE [1] minimum #-}
minimum [] = errorEmptyList "minimum"
minimum xs = foldl1 min xs
-- | 'iterate' @f x@ returns an infinite list of repeated applications
-- of @f@ to @x@:
--
-- > iterate f x == [x, f x, f (f x), ...]
{-# NOINLINE [1] iterate #-}
iterate :: (a -> a) -> a -> [a]
iterate f x = x : iterate f (f x)
-- | 'repeat' @x@ is an infinite list, with @x@ the value of every element.
repeat :: a -> [a]
{-# INLINE [0] repeat #-}
-- The pragma just gives the rules more chance to fire
repeat x = xs where xs = x : xs
-- | 'replicate' @n x@ is a list of length @n@ with @x@ the value of
-- every element.
-- It is an instance of the more general 'Data.List.genericReplicate',
-- in which @n@ may be of any integral type.
{-# INLINE replicate #-}
replicate :: Int -> a -> [a]
replicate n x = take n (repeat x)
-- | 'cycle' ties a finite list into a circular one, or equivalently,
-- the infinite repetition of the original list. It is the identity
-- on infinite lists.
cycle :: [a] -> [a]
cycle [] = errorEmptyList "cycle"
cycle xs = xs' where xs' = xs ++ xs'
-- | 'takeWhile', applied to a predicate @p@ and a list @xs@, returns the
-- longest prefix (possibly empty) of @xs@ of elements that satisfy @p@:
--
-- > takeWhile (< 3) [1,2,3,4,1,2,3,4] == [1,2]
-- > takeWhile (< 9) [1,2,3] == [1,2,3]
-- > takeWhile (< 0) [1,2,3] == []
--
{-# NOINLINE [1] takeWhile #-}
takeWhile :: (a -> Bool) -> [a] -> [a]
takeWhile _ [] = []
takeWhile p (x:xs)
| p x = x : takeWhile p xs
| otherwise = []
-- | 'dropWhile' @p xs@ returns the suffix remaining after 'takeWhile' @p xs@:
--
-- > dropWhile (< 3) [1,2,3,4,5,1,2,3] == [3,4,5,1,2,3]
-- > dropWhile (< 9) [1,2,3] == []
-- > dropWhile (< 0) [1,2,3] == [1,2,3]
--
dropWhile :: (a -> Bool) -> [a] -> [a]
dropWhile _ [] = []
dropWhile p xs@(x:xs')
| p x = dropWhile p xs'
| otherwise = xs
-- | 'take' @n@, applied to a list @xs@, returns the prefix of @xs@
-- of length @n@, or @xs@ itself if @n > 'length' xs@:
--
-- > take 5 "Hello World!" == "Hello"
-- > take 3 [1,2,3,4,5] == [1,2,3]
-- > take 3 [1,2] == [1,2]
-- > take 3 [] == []
-- > take (-1) [1,2] == []
-- > take 0 [1,2] == []
--
-- It is an instance of the more general 'Data.List.genericTake',
-- in which @n@ may be of any integral type.
take :: Int -> [a] -> [a]
#ifdef USE_REPORT_PRELUDE
take n _ | n <= 0 = []
take _ [] = []
take n (x:xs) = x : take (n-1) xs
#else
{- We always want to inline this to take advantage of a known length argument
sign. Note, however, that it's important for the RULES to grab take, rather
than trying to INLINE take immediately and then letting the RULES grab
unsafeTake. Presumably the latter approach doesn't grab it early enough; it led
to an allocation regression in nofib/fft2. -}
{-# INLINE [1] take #-}
take n xs | 0 < n = unsafeTake n xs
| otherwise = []
-- A version of take that takes the whole list if it's given an argument less
-- than 1.
{-# NOINLINE [1] unsafeTake #-}
unsafeTake :: Int -> [a] -> [a]
unsafeTake !_ [] = []
unsafeTake 1 (x: _) = [x]
unsafeTake m (x:xs) = x : unsafeTake (m - 1) xs
#endif
-- | 'drop' @n xs@ returns the suffix of @xs@
-- after the first @n@ elements, or @[]@ if @n > 'length' xs@:
--
-- > drop 6 "Hello World!" == "World!"
-- > drop 3 [1,2,3,4,5] == [4,5]
-- > drop 3 [1,2] == []
-- > drop 3 [] == []
-- > drop (-1) [1,2] == [1,2]
-- > drop 0 [1,2] == [1,2]
--
-- It is an instance of the more general 'Data.List.genericDrop',
-- in which @n@ may be of any integral type.
drop :: Int -> [a] -> [a]
#ifdef USE_REPORT_PRELUDE
drop n xs | n <= 0 = xs
drop _ [] = []
drop n (_:xs) = drop (n-1) xs
#else /* hack away */
{-# INLINE drop #-}
drop n ls
| n <= 0 = ls
| otherwise = unsafeDrop n ls
where
-- A version of drop that drops the whole list if given an argument
-- less than 1
unsafeDrop :: Int -> [a] -> [a]
unsafeDrop !_ [] = []
unsafeDrop 1 (_:xs) = xs
unsafeDrop m (_:xs) = unsafeDrop (m - 1) xs
#endif
-- | 'splitAt' @n xs@ returns a tuple where first element is @xs@ prefix of
-- length @n@ and second element is the remainder of the list:
--
-- > splitAt 6 "Hello World!" == ("Hello ","World!")
-- > splitAt 3 [1,2,3,4,5] == ([1,2,3],[4,5])
-- > splitAt 1 [1,2,3] == ([1],[2,3])
-- > splitAt 3 [1,2,3] == ([1,2,3],[])
-- > splitAt 4 [1,2,3] == ([1,2,3],[])
-- > splitAt 0 [1,2,3] == ([],[1,2,3])
-- > splitAt (-1) [1,2,3] == ([],[1,2,3])
--
-- It is equivalent to @('take' n xs, 'drop' n xs)@ when @n@ is not @_|_@
-- (@splitAt _|_ xs = _|_@).
-- 'splitAt' is an instance of the more general 'Data.List.genericSplitAt',
-- in which @n@ may be of any integral type.
splitAt :: Int -> [a] -> ([a],[a])
#ifdef USE_REPORT_PRELUDE
splitAt n xs = (take n xs, drop n xs)
#else
splitAt n ls
| n <= 0 = ([], ls)
| otherwise = splitAt' n ls
where
splitAt' :: Int -> [a] -> ([a], [a])
splitAt' _ [] = ([], [])
splitAt' 1 (x:xs) = ([x], xs)
splitAt' m (x:xs) = (x:xs', xs'')
where
(xs', xs'') = splitAt' (m - 1) xs
#endif /* USE_REPORT_PRELUDE */
-- | 'span', applied to a predicate @p@ and a list @xs@, returns a tuple where
-- first element is longest prefix (possibly empty) of @xs@ of elements that
-- satisfy @p@ and second element is the remainder of the list:
--
-- > span (< 3) [1,2,3,4,1,2,3,4] == ([1,2],[3,4,1,2,3,4])
-- > span (< 9) [1,2,3] == ([1,2,3],[])
-- > span (< 0) [1,2,3] == ([],[1,2,3])
--
-- 'span' @p xs@ is equivalent to @('takeWhile' p xs, 'dropWhile' p xs)@
span :: (a -> Bool) -> [a] -> ([a],[a])
span _ xs@[] = (xs, xs)
span p xs@(x:xs')
| p x = let (ys,zs) = span p xs' in (x:ys,zs)
| otherwise = ([],xs)
-- | 'break', applied to a predicate @p@ and a list @xs@, returns a tuple where
-- first element is longest prefix (possibly empty) of @xs@ of elements that
-- /do not satisfy/ @p@ and second element is the remainder of the list:
--
-- > break (> 3) [1,2,3,4,1,2,3,4] == ([1,2,3],[4,1,2,3,4])
-- > break (< 9) [1,2,3] == ([],[1,2,3])
-- > break (> 9) [1,2,3] == ([1,2,3],[])
--
-- 'break' @p@ is equivalent to @'span' ('not' . p)@.
break :: (a -> Bool) -> [a] -> ([a],[a])
#ifdef USE_REPORT_PRELUDE
break p = span (not . p)
#else
-- HBC version (stolen)
break _ xs@[] = (xs, xs)
break p xs@(x:xs')
| p x = ([],xs)
| otherwise = let (ys,zs) = break p xs' in (x:ys,zs)
#endif
-- | 'reverse' @xs@ returns the elements of @xs@ in reverse order.
-- @xs@ must be finite.
reverse :: [a] -> [a]
#ifdef USE_REPORT_PRELUDE
reverse = foldl (flip (:)) []
#else
reverse l = rev l []
where
rev [] a = a
rev (x:xs) a = rev xs (x:a)
#endif
-- | The 'subsequences' function returns the list of all subsequences of the argument.
--
-- > subsequences "abc" == ["","a","b","ab","c","ac","bc","abc"]
subsequences :: [a] -> [[a]]
subsequences xs = [] : nonEmptySubsequences xs
-- | The 'nonEmptySubsequences' function returns the list of all subsequences of the argument,
-- except for the empty list.
--
-- > nonEmptySubsequences "abc" == ["a","b","ab","c","ac","bc","abc"]
nonEmptySubsequences :: [a] -> [[a]]
nonEmptySubsequences [] = []
nonEmptySubsequences (x:xs) = [x] : foldr f [] (nonEmptySubsequences xs)
where f ys r = ys : (x : ys) : r
-- | The 'permutations' function returns the list of all permutations of the argument.
--
-- > permutations "abc" == ["abc","bac","cba","bca","cab","acb"]
permutations :: [a] -> [[a]]
permutations xs0 = xs0 : perms xs0 []
where
perms [] _ = []
perms (t:ts) is = foldr interleave (perms ts (t:is)) (permutations is)
where interleave xs r = let (_,zs) = interleave' id xs r in zs
interleave' _ [] r = (ts, r)
interleave' f (y:ys) r = let (us,zs) = interleave' (f . (y:)) ys r
in (y:us, f (t:y:us) : zs)
-- | The 'intersperse' function takes an element and a list and
-- \`intersperses\' that element between the elements of the list.
-- For example,
--
-- > intersperse ',' "abcde" == "a,b,c,d,e"
intersperse :: a -> [a] -> [a]
intersperse _ [] = []
intersperse sep (x:xs) = x : prependToAll sep xs
-- Not exported:
-- We want to make every element in the 'intersperse'd list available
-- as soon as possible to avoid space leaks. Experiments suggested that
-- a separate top-level helper is more efficient than a local worker.
prependToAll :: a -> [a] -> [a]
prependToAll _ [] = []
prependToAll sep (x:xs) = sep : x : prependToAll sep xs
-- | 'intercalate' @xs xss@ is equivalent to @('concat' ('intersperse' xs xss))@.
-- It inserts the list @xs@ in between the lists in @xss@ and concatenates the
-- result.
intercalate :: [a] -> [[a]] -> [a]
intercalate xs xss = concat (intersperse xs xss)
-- | 'and' returns the conjunction of a Boolean list. For the result to be
-- 'True', the list must be finite; 'False', however, results from a 'False'
-- value at a finite index of a finite or infinite list.
and :: [Bool] -> Bool
#ifdef USE_REPORT_PRELUDE
and = foldr (&&) True
#else
and [] = True
and (x:xs) = x && and xs
{-# NOINLINE [1] and #-}
{-# RULES
"and/build" forall (g::forall b.(Bool->b->b)->b->b) .
and (build g) = g (&&) True
#-}
#endif
-- | 'or' returns the disjunction of a Boolean list. For the result to be
-- 'False', the list must be finite; 'True', however, results from a 'True'
-- value at a finite index of a finite or infinite list.
or :: [Bool] -> Bool
#ifdef USE_REPORT_PRELUDE
or = foldr (||) False
#else
or [] = False
or (x:xs) = x || or xs
{-# NOINLINE [1] or #-}
{-# RULES
"or/build" forall (g::forall b.(Bool->b->b)->b->b) .
or (build g) = g (||) False
#-}
#endif
-- | 'elem' is the list membership predicate, usually written in infix form,
-- e.g., @x \`elem\` xs@. For the result to be
-- 'False', the list must be finite; 'True', however, results from an element
-- equal to @x@ found at a finite index of a finite or infinite list.
elem :: (Eq a) => a -> [a] -> Bool
#ifdef USE_REPORT_PRELUDE
elem x = any (== x)
#else
elem _ [] = False
elem x (y:ys) = x==y || elem x ys
{-# NOINLINE [1] elem #-}
{-# RULES
"elem/build" forall x (g :: forall b . Eq a => (a -> b -> b) -> b -> b)
. elem x (build g) = g (\ y r -> (x == y) || r) False
#-}
#endif
-- | 'notElem' is the negation of 'elem'.
notElem :: (Eq a) => a -> [a] -> Bool
#ifdef USE_REPORT_PRELUDE
notElem x = all (/= x)
#else
notElem _ [] = True
notElem x (y:ys)= x /= y && notElem x ys
{-# NOINLINE [1] notElem #-}
{-# RULES
"notElem/build" forall x (g :: forall b . Eq a => (a -> b -> b) -> b -> b)
. notElem x (build g) = g (\ y r -> (x /= y) && r) True
#-}
#endif
-- | 'lookup' @key assocs@ looks up a key in an association list.
lookup :: (Eq a) => a -> [(a,b)] -> Maybe b
lookup _key [] = Nothing
lookup key ((x,y):xys)
| key == x = Just y
| otherwise = lookup key xys
-- | Map a function over a list and concatenate the results.
concatMap :: (a -> [b]) -> [a] -> [b]
concatMap f = foldr ((++) . f) []
{-# NOINLINE [1] concatMap #-}
{-# RULES
"concatMap" forall f xs . concatMap f xs =
build (\c n -> foldr (\x b -> foldr c b (f x)) n xs)
#-}
-- | Concatenate a list of lists.
concat :: [[a]] -> [a]
concat = foldr (++) []
{-# NOINLINE [1] concat #-}
{-# RULES
"concat" forall xs. concat xs =
build (\c n -> foldr (\x y -> foldr c y x) n xs)
-- We don't bother to turn non-fusible applications of concat back into concat
#-}
-- | List index (subscript) operator, starting from 0.
-- It is an instance of the more general 'Data.List.genericIndex',
-- which takes an index of any integral type.
(!!) :: [a] -> Int -> a
#ifdef USE_REPORT_PRELUDE
xs !! n | n < 0 = error "Prelude.!!: negative index"
[] !! _ = error "Prelude.!!: index too large"
(x:_) !! 0 = x
(_:xs) !! n = xs !! (n-1)
#else
-- We don't really want the errors to inline with (!!).
-- We may want to fuss around a bit with NOINLINE, and
-- if so we should be careful not to trip up known-bottom
-- optimizations.
tooLarge :: Int -> a
tooLarge _ = error (prel_list_str ++ "!!: index too large")
negIndex :: a
negIndex = error $ prel_list_str ++ "!!: negative index"
{-# INLINABLE (!!) #-}
xs !! n
| n < 0 = negIndex
| otherwise = foldr (\x r k -> case k of
0 -> x
_ -> r (k-1)) tooLarge xs n
#endif
--------------------------------------------------------------
-- The zip family
--------------------------------------------------------------
----------------------------------------------
-- | 'zip' takes two lists and returns a list of corresponding pairs.
-- If one input list is short, excess elements of the longer list are
-- discarded.
--
-- 'zip' is right-lazy:
--
-- > zip [] _|_ = []
{-# NOINLINE [1] zip #-}
zip :: [a] -> [b] -> [(a,b)]
zip [] _bs = []
zip _as [] = []
zip (a:as) (b:bs) = (a,b) : zip as bs
-- The zipWith family generalises the zip family by zipping with the
-- function given as the first argument, instead of a tupling function.
----------------------------------------------
-- | 'zipWith' generalises 'zip' by zipping with the function given
-- as the first argument, instead of a tupling function.
-- For example, @'zipWith' (+)@ is applied to two lists to produce the
-- list of corresponding sums.
--
-- 'zipWith' is right-lazy:
--
-- > zipWith f [] _|_ = []
{-# NOINLINE [1] zipWith #-}
zipWith :: (a->b->c) -> [a]->[b]->[c]
zipWith _f [] _bs = []
zipWith _f _as [] = []
zipWith f (a:as) (b:bs) = f a b : zipWith f as bs
-- | 'unzip' transforms a list of pairs into a list of first components
-- and a list of second components.
unzip :: [(a,b)] -> ([a],[b])
{-# INLINE unzip #-}
unzip = foldr (\(a,b) ~(as,bs) -> (a:as,b:bs)) ([],[])
--------------------------------------------------------------
-- Error code
--------------------------------------------------------------
-- Common up near identical calls to `error' to reduce the number
-- constant strings created when compiled:
errorEmptyList :: String -> a
errorEmptyList fun =
error (prel_list_str ++ fun ++ ": empty list")
prel_list_str :: String
prel_list_str = "Prelude."
| sdzivanovich/cs1699_deliverable5 | MyList.hs | bsd-3-clause | 23,442 | 0 | 16 | 6,338 | 3,613 | 2,085 | 1,528 | 252 | 3 |
module Lava.Signal where
import Lava.Ref
import Lava.Sequent
import Lava.Error
import Data.List
( transpose
)
----------------------------------------------------------------
-- Signal, Symbol, S
newtype Signal a
= Signal Symbol
newtype Symbol
= Symbol (Ref (S Symbol))
data S s
= Bool Bool
| Inv s
| And [s]
| Or [s]
| Xor [s]
| VarBool String
| DelayBool s s
| Int Int
| Neg s
| Div s s
| Mod s s
| Plus [s]
| Times [s]
| Gte s s
| Equal [s]
| If s s s
| VarInt String
| DelayInt s s
symbol :: S Symbol -> Symbol
symbol = Symbol . ref
unsymbol :: Symbol -> S Symbol
unsymbol (Symbol r) = deref r
instance Eq (Signal a) where
Signal (Symbol r1) == Signal (Symbol r2) = r1 == r2
----------------------------------------------------------------
-- operations
-- on bits
bool :: Bool -> Signal Bool
bool b = lift0 (Bool b)
low, high :: Signal Bool
low = bool False
high = bool True
inv :: Signal Bool -> Signal Bool
inv = lift1 Inv
andl, orl, xorl :: [Signal Bool] -> Signal Bool
andl = liftl And
orl = liftl Or
xorl = liftl Xor
equalBool :: Signal Bool -> Signal Bool -> Signal Bool
equalBool x y = inv (xorl [x,y])
ifBool :: Signal Bool -> (Signal Bool, Signal Bool) -> Signal Bool
ifBool c (x,y) = orl[andl[c,x],andl[inv c,y]]
delayBool :: Signal Bool -> Signal Bool -> Signal Bool
delayBool = lift2 DelayBool
varBool :: String -> Signal Bool
varBool s = lift0 (VarBool s)
-- on ints
int :: Int -> Signal Int
int n = lift0 (Int n)
neg :: Signal Int -> Signal Int
neg = lift1 Neg
divide, modulo :: Signal Int -> Signal Int -> Signal Int
divide = lift2 Div
modulo = lift2 Mod
plusl, timesl :: [Signal Int] -> Signal Int
plusl = liftl Plus
timesl = liftl Times
equall :: [Signal Int] -> Signal Bool
equall = liftl Equal
gteInt :: Signal Int -> Signal Int -> Signal Bool
gteInt = lift2 Gte
equalInt :: Signal Int -> Signal Int -> Signal Bool
equalInt x y = equall [x,y]
ifInt :: Signal Bool -> (Signal Int, Signal Int) -> Signal a
ifInt c (x,y) = lift3 If c x y
delayInt :: Signal Int -> Signal Int -> Signal Int
delayInt = lift2 DelayInt
varInt :: String -> Signal Int
varInt s = lift0 (VarInt s)
-- liftings
lift0 :: S Symbol -> Signal a
lift0 oper = Signal (symbol oper)
lift1 :: (Symbol -> S Symbol) -> Signal a -> Signal b
lift1 oper (Signal a) = Signal (symbol (oper a))
lift2 :: (Symbol -> Symbol -> S Symbol) -> Signal a -> Signal b -> Signal c
lift2 oper (Signal a) (Signal b) = Signal (symbol (oper a b))
lift3 :: (Symbol -> Symbol -> Symbol -> S Symbol)
-> Signal a -> Signal b -> Signal c -> Signal d
lift3 oper (Signal a) (Signal b) (Signal c) = Signal (symbol (oper a b c))
liftl :: ([Symbol] -> S Symbol) -> [Signal a] -> Signal c
liftl oper sigas = Signal (symbol (oper (map (\(Signal a) -> a) sigas)))
----------------------------------------------------------------
-- evaluate
eval :: S (S a) -> S a
eval s =
case s of
Bool b -> Bool b
Inv (Bool b) -> Bool (not b)
And xs -> Bool . all bval $ xs
Or xs -> Bool . any bval $ xs
Xor xs -> Bool . (1 ==) . length . filter bval $ xs
Int n -> Int n
Neg (Int n) -> Int (-n)
Div (Int n1) (Int n2) -> Int (n1 `div` n2)
Mod (Int n1) (Int n2) -> Int (n1 `mod` n2)
Plus xs -> Int . sum . map nval $ xs
Times xs -> Int . product . map nval $ xs
Gte (Int n1) (Int n2) -> Bool (n1 >= n2)
Equal xs -> Bool . equal . map nval $ xs
If (Bool c) x y -> if c then x else y
DelayBool s s' -> wrong Lava.Error.DelayEval
DelayInt s s' -> wrong Lava.Error.DelayEval
VarBool s -> wrong Lava.Error.VarEval
VarInt s -> wrong Lava.Error.VarEval
where
bval (Bool b) = b
nval (Int n) = n
equal (x:y:xs) = x == y && equal (y:xs)
equal _ = True
evalLazy :: S (Maybe (S a)) -> Maybe (S a)
evalLazy s =
case s of
-- lazy
And xs
| any (`bval` False) xs -> bans False
Or xs
| any (`bval` True) xs -> bans True
Xor xs
| number (`bval` True) xs >= 2 -> bans False
-- strict
_ -> eval `fmap` sequent s
where
bans = Just . Bool
bval (Just (Bool b)) b' = b == b'
bval _ _ = False
number p = length . filter p
arguments :: S a -> [a]
arguments s =
case s of
Bool b -> []
Inv s -> [s]
And xs -> xs
Or xs -> xs
Xor xs -> xs
Int n -> []
Neg s -> [s]
Div s1 s2 -> [s1,s2]
Mod s1 s2 -> [s1,s2]
Plus xs -> xs
Times xs -> xs
Gte x y -> [x,y]
Equal xs -> xs
If x y z -> [x,y,z]
DelayBool s s' -> [s,s']
DelayInt s s' -> [s,s']
VarBool s -> []
VarInt s -> []
zips :: S [a] -> [S a]
zips s =
case s of
Bool b -> [Bool b]
Inv s -> map Inv s
And xs -> map And (transpose xs)
Or xs -> map Or (transpose xs)
Xor xs -> map Xor (transpose xs)
Int n -> [Int n]
Neg s -> map Neg s
Div s1 s2 -> zipWith Div s1 s2
Mod s1 s2 -> zipWith Mod s1 s2
Plus xs -> map Plus (transpose xs)
Times xs -> map Times (transpose xs)
Gte x y -> zipWith Gte x y
Equal xs -> map Equal (transpose xs)
If x y z -> zipWith3 If x y z
DelayBool s s' -> zipWith DelayBool s s'
DelayInt s s' -> zipWith DelayInt s s'
VarBool s -> [VarBool s]
VarInt s -> [VarInt s]
----------------------------------------------------------------
-- properties of S
instance Functor S where
fmap f s =
case s of
Bool b -> Bool b
Inv x -> Inv (f x)
And xs -> And (map f xs)
Or xs -> Or (map f xs)
Xor xs -> Xor (map f xs)
Int n -> Int n
Neg x -> Neg (f x)
Div x y -> Div (f x) (f y)
Mod x y -> Mod (f x) (f y)
Plus xs -> Plus (map f xs)
Times xs -> Times (map f xs)
Gte x y -> Gte (f x) (f y)
Equal xs -> Equal (map f xs)
If x y z -> If (f x) (f y) (f z)
DelayBool x y -> DelayBool (f x) (f y)
DelayInt x y -> DelayInt (f x) (f y)
VarBool v -> VarBool v
VarInt v -> VarInt v
instance Sequent S where
sequent s =
case s of
Bool b -> lift0 (Bool b)
Inv x -> lift1 Inv x
And xs -> liftl And xs
Or xs -> liftl Or xs
Xor xs -> liftl Xor xs
Int n -> lift0 (Int n)
Neg x -> lift1 Neg x
Div x y -> lift2 Div x y
Mod x y -> lift2 Mod x y
Plus xs -> liftl Plus xs
Times xs -> liftl Times xs
Gte x y -> lift2 Gte x y
Equal xs -> liftl Equal xs
If x y z -> lift3 If x y z
DelayBool x y -> lift2 DelayBool x y
DelayInt x y -> lift2 DelayInt x y
VarBool v -> lift0 (VarBool v)
VarInt v -> lift0 (VarInt v)
where
lift0 op =
do return op
lift1 op x =
do x' <- x
return (op x')
lift2 op x y =
do x' <- x
y' <- y
return (op x' y')
lift3 op x y z =
do x' <- x
y' <- y
z' <- z
return (op x' y' z')
liftl op xs =
do xs' <- sequence xs
return (op xs')
instance Show (Signal a) where
showsPrec n (Signal s) =
showsPrec n s
instance Show Symbol where
showsPrec n sym =
showsPrec n (unsymbol sym)
instance Show a => Show (S a) where
showsPrec n s =
case s of
Bool True -> showString "high"
Bool False -> showString "low"
Inv x -> showString "inv" . showList [x]
And xs -> showString "andl" . showList xs
Or xs -> showString "orl" . showList xs
Xor xs -> showString "xorl" . showList xs
Int i -> showsPrec n i
Neg x -> showString "-" . showsPrec n x
Div x y -> showString "idiv" . showList [x,y]
Mod x y -> showString "imod" . showList [x,y]
Plus xs -> showString "plusl" . showList xs
Times xs -> showString "timesl" . showList xs
Gte x y -> showString "gte" . showList [x,y]
Equal xs -> showString "equall" . showList xs
If x y z -> showString "ifThenElse" . showList [x,y,z]
DelayBool x y -> showString "delay" . showList [x,y]
DelayInt x y -> showString "delay" . showList [x,y]
VarBool s -> showString s
VarInt s -> showString s
_ -> showString "<<symbol>>"
----------------------------------------------------------------
-- the end.
| dfordivam/lava | Lava/Signal.hs | bsd-3-clause | 8,701 | 0 | 15 | 3,009 | 4,024 | 1,937 | 2,087 | 256 | 20 |
module Lib where
someFunc :: IO ()
someFunc = putStrLn "someFunc"
data CatA = AA
| AB
deriving (Eq, Show)
type FA = (CatA -> CatA)
data CatB = BA
| BB
deriving (Eq, Show)
type GB = (CatB -> CatB)
f :: CatA -> CatA
f AA = AB
f AB = AA
g :: CatB -> CatB
g BA = BB
g BB = BA
functor :: CatA -> CatB
functor AA = BA
functor AB = BB
functor' :: FA -> GB
functor' f = functor . f . inverse
where inverse :: CatB -> CatA
inverse BA = AA
inverse BB = AB
| matthewfranglen/category-set-graph | src/Lib.hs | bsd-3-clause | 523 | 0 | 7 | 184 | 217 | 118 | 99 | 25 | 2 |
import System.Environment
import System.Directory
import System.IO
import qualified Data.Text as T
import qualified Data.Text.Lazy as L
import qualified Data.Text.Lazy.IO as LIO
import qualified Data.Map as M
import qualified Data.List as DL
import Control.Monad
import Parser
import ParserTypes
import Interpretor
import Environment
import Builtins
main = do
arguments <- getArgs
handle arguments
handle :: [String] -> IO ()
handle ("-i":_) = repl newSymT
handle (f:_) = parse f
handle [] = putStrLn "usage: am [-i] [<file.am>]"
parse :: String -> IO ()
parse filePath = do
result <- parseFile filePath
case result of
Left err -> print err
Right prog -> do
interpret prog
interpret :: Program -> IO ()
interpret prog = do
syms <- return $ harvestSymbols (programFunctions prog) (SymbolTable M.empty)
mMain <- return $ mainExpr syms
case mMain of
Just e -> do
--putStrLn $ "interpret " ++ (show e)
out <- return $ eval syms e
case out of
Right out -> do
putStrLn $ "evaluated " ++ (show out)
Left msg -> do
putStrLn $ show msg
Nothing -> putStrLn "could not find main!"
repl :: Env -> IO ()
repl env = do
putStr ") "
hFlush stdout
line <- getLine
if DL.isInfixOf "fn " line
then replAddFunc env [line]
else
if DL.isInfixOf "import " line
then importFile env line
else apply env line
importFile :: Env -> String -> IO ()
importFile env importstmt =
let fileName = (unwords . (drop 1) . words) importstmt
in do
exists <- doesFileExist fileName
if exists
then do
mProgram <- parseFile fileName
case mProgram of
Left err -> do
print err
repl env
Right program -> do
syms <- return $ harvestSymbols (programFunctions program) env
putStrLn $ "imported file '" ++ fileName ++ "'"
repl syms
else do
putStrLn $ "file '" ++ fileName ++ "' does not exist"
repl env
apply :: Env -> String -> IO ()
apply env line = do
result <- parseExprFromStr line
case result of
Left err -> do
print err
repl env
Right expr -> do
out <- return $ eval env expr
case out of
Right out -> do
putStrLn $ "=> " ++ (show out)
repl env
Left msg -> do
putStrLn $ show msg
repl env
replAddFunc :: Env -> [String] -> IO ()
replAddFunc env lines = do
putStr "+ "
hFlush stdout
line <- getLine
if length line == 0
then do
result <- parseFuncFromStr (unwords lines)
case result of
Left err2 -> do
putStrLn "neither an expression nor a function given!"
repl env
Right func -> repl (defineSym env (funcName func) (SymFunc func))
else
replAddFunc env (lines ++ [line])
| planrich/abstractmachines | src/Main.hs | bsd-3-clause | 3,221 | 0 | 21 | 1,230 | 988 | 468 | 520 | 100 | 3 |
module Dang where
| elliottt/dang | src/Dang.hs | bsd-3-clause | 18 | 0 | 2 | 3 | 4 | 3 | 1 | 1 | 0 |
module Day1 (run) where
import Data.Char
import Data.List.Split
dirs :: [(Int, Int)]
dirs = [(0, 1), (1, 0), (0, -1), (-1, 0)]
start = ([(0, 0)], 0)
parse :: String -> (Int, Int)
parse = head . parseSaving
parseSaving :: String -> [(Int, Int)]
parseSaving = fst . foldl (flip move) start . splitOn ", "
firstRepeated :: Eq a => [a] -> a
firstRepeated (x:xs) =
case elem x xs of
True -> x
False -> firstRepeated xs
move :: String -> ([(Int, Int)], Int) -> ([(Int, Int)], Int)
move ('R':(read -> x)) = walk x . turn True
move ('L':(read -> x)) = walk x . turn False
turn :: Bool -> ([(Int, Int)], Int) -> ([(Int, Int)], Int)
turn True (pos, d) = (pos, (d + 1) `mod` length dirs)
turn False (pos, d) = (pos, (d - 1) `mod` length dirs)
walk :: Int -> ([ (Int, Int )], Int) -> ([(Int, Int) ], Int)
walk l ((px, py):ps, d)
| l > 0 = walk (l - 1) ((px + dx, py + dy):(px, py):ps, d)
| l <= 0 = ((px, py):ps, d)
where
(dx, dy) = dirs !! d
numBlocks :: (Int, Int) -> Int
numBlocks (x, y) = abs x + abs y
run :: IO ()
run = print $ numBlocks . firstRepeated . reverse $ parseSaving input
where parsed = parse input
input = "R3, L5, R2, L1, L2, R5, L2, R2, L2, L2, L1, R2, L2, R4, R4, R1, L2, L3, R3, L1, R2, L2, L4, R4, R5, L3, R3, L3, L3, R4, R5, L3, R3, L5, L1, L2, R2, L1, R3, R1, L1, R187, L1, R2, R47, L5, L1, L2, R4, R3, L3, R3, R4, R1, R3, L1, L4, L1, R2, L1, R4, R5, L1, R77, L5, L4, R3, L2, R4, R5, R5, L2, L2, R2, R5, L2, R194, R5, L2, R4, L5, L4, L2, R5, L3, L2, L5, R5, R2, L3, R3, R1, L4, R2, L1, R5, L1, R5, L1, L1, R3, L1, R5, R2, R5, R5, L4, L5, L5, L5, R3, L2, L5, L4, R3, R1, R1, R4, L2, L4, R5, R5, R4, L2, L2, R5, R5, L5, L2, R4, R4, L4, R1, L3, R1, L1, L1, L1, L4, R5, R4, L4, L4, R5, R3, L2, L2, R3, R1, R4, L3, R1, L4, R3, L3, L2, R2, R2, R2, L1, L4, R3, R2, R2, L3, R2, L3, L2, R4, L2, R3, L4, R5, R4, R1, R5, R3"
| ulyssesp/AoC | src/day1.hs | bsd-3-clause | 1,907 | 0 | 11 | 490 | 712 | 405 | 307 | -1 | -1 |
{-# LANGUAGE PolyKinds #-}
module ODE where
import FAD
import Taylor
import Data.Number.IReal -- .Rounded
import Data.Number.IReal(VarPrec(..))
import Data.Number.IReal.Auxiliary
import Data.Maybe
import Plot
{-
A solver for the initial value problem
x' = f t x, t >= t0
x t0 = x0
using a Taylor method, combined with Picard-Lindelöf's method to
ensure a validated enclosure.
The function solve below employs user-determined step-sizes and
number of steps to illustrate the method. Also, some tolerances
are chosen in an ad hoc way.
-}
-- Solver ----------------------------------------------------------------------
odeDerivs :: (Num a, Num b) => (Dif a -> Dif b -> Dif b) -> a -> b -> [b]
odeDerivs f t0 x0 = fromDif x
where xder = f (var t0) x
x = mkDif x0 xder
--solve :: (Dif IReal -> Dif IReal -> Dif IReal) -> (IReal,IReal) -> Int -> [(IReal,Int)] -> [(IReal,IReal)]
solve f p _ [] = [p]
solve f p n ((h,s):ps) = rs ++ solve f (head us) n ps
where (rs,us) = splitAt s (iterate (g h) p)
g h (t,x) = (t+h, prec 14 (hull [step (t, lower x), step (t, upper x)]))
hs = generalTerms h
step (t,x) = sum (take n $ zipWith (*) (odeDerivs f t x) hs) + err
where ti = t -+- (t+h)
xi = bound 0.1 Nothing
err = odeDerivs f ti xi !! n * hs !! n
bound rad maybeAns
| rad > 100 = error "Cannot verify existence"
| i1 `containedIn` i0 `atDecimals` (12+2) = bound (rad/2) (Just i0)
| otherwise = maybe (bound (2*rad) Nothing) id maybeAns
where i0 = (x - rad) -+- (x + rad)
i1 = x + h * unDif (f (var ti)) i0
{-
Examples from Tucker:
6.4.2 solve (\t x -> -t*x) (0,0+-1) 3 [(0.1,60),(0.05,100)]
6.4.3 solve (\t x -> x^2) (0,1 -+- 1.25) 3 [(0.05,8),(0.01,30),(0.001,50)]
6.4.4 solve (\t x -> -x^3) (0,1) 4 [(0.1,10),(0.2,15),(0.8,10)]
6.4.5 solve (\t x -> x*(x-1)) (0,1) 7 [(0.1,100)]
6.4.6 solve (\t x -> 5+sin t - x) (1,5+-1) 6 [(0.3,30)]
6.4.7 solve (\t x -> (exp (exp (-t*x)) + 0.01*x^3 + 0.1*x + 2 + 10*cos x+4*sin t - log x)/(0.02*x^3+4*x^2+3*x+4+(x+1)**0.75*0.001*sin (1.5*t*x)+0.001*cos(3.14*t))) (0,3 -+- (3+recip(2^52))) 3 [(0.25,40)]
-}
{-
If you have gnuplot, you may consider uncommenting this function and the import of module Plot,
to visualize the solutions. This gnuplot connection is, however, only a hack which should be
redone properly.
-}
psolve f (t0,x0) n hns = plotData [fu, fl] (sh t0) (sh xs)
where ss = solve f (t0,x0) n hns
sh x = showIReal 10 x
xs = t0 + sum (map (\(h,n) -> h * fromIntegral n) hns)
fu = map (\(t,x) -> sh (mid t) ++ " " ++ sh (upper x)) ss
fl = map (\(t,x) -> sh (mid t) ++ " " ++ sh (lower x)) ss
| sydow/ireal | applications/ODE.hs | bsd-3-clause | 2,803 | 0 | 16 | 745 | 802 | 426 | 376 | 34 | 1 |
-- HilbertCurve.hs
{-# OPTIONS_GHC -Wall #-}
module Lab2.HilbertCurve( makeHilbertCurve
, drawHilbertCurve
, xy2d
, d2xy
) where
import Graphics.Gloss
import Data.Bits
import Lab2.HilbertCoord
drawHilbertCurve :: Int -> Int -> IO ()
drawHilbertCurve imageSize n = do
let hCoords = makeHilbertCurve n
let maxCoord = fromIntegral $ maximum $ map hcX hCoords ++ map hcY hCoords
scaleCoord :: Int -> Float
scaleCoord ui = 0.4*(fromIntegral imageSize)*(-1.0 + 2.0*(fromIntegral ui)/maxCoord)
coords = map (\hc -> (scaleCoord (hcX hc), scaleCoord (hcY hc))) hCoords
myPicture :: Picture
myPicture = color aquamarine $ line coords
display
(InWindow
"hilbert curve" -- window title
(imageSize, imageSize) -- window size
(20, 600)) -- window position
black -- background color
myPicture -- picture to display
makeHilbertCurve :: Int -> [HilbertCoord]
makeHilbertCurve n = map f [0..2^(2*n)-1]
where
f d = HilbertCoord { hcX = fst (d2xy n d)
, hcY = snd (d2xy n d)
, hcV = d
}
xy2d :: Int -> (Int, Int) -> Int
xy2d n (x,y)
| x >= 2^n || y >= 2^n || x < 0 || y < 0 = error $ "xy2d can't compute "++show (x,y)++
", range is between 0 and "++show (2^n-1 :: Int)
| otherwise = f (x,y) 0 ((2^n) `div` 2)
where
f :: (Int, Int) -> Int -> Int -> Int
f _ d 0 = d
f (x',y') d s = f (rot s (x',y') (rx, ry)) newD (s `div` 2)
where
newD = d + s*s*((3*rx) `xor` ry)
rx
| (x' .&. s) > 0 = 1
| otherwise = 0
ry
| (y' .&. s) > 0 = 1
| otherwise = 0
d2xy :: Int -> Int -> (Int, Int)
d2xy n d'
| d' >= 2^(2*n) || d' < 0 = error $ "d2xy can't compute "++show d'++
", range is between 0 and "++show (2^(2*n)-1 :: Int)
| otherwise = f (0,0) d' 1
where
f :: (Int, Int) -> Int -> Int -> (Int, Int)
f (x',y') d s
| s >= (2^n) = (x', y')
| otherwise = f (x'' + s*rx, y'' + s*ry) (d `div` 4) (2*s)
where
rx = 1 .&. (d `div` 2)
ry = 1 .&. (d `xor` rx)
(x'', y'') = rot s (x',y') (rx, ry)
rot :: Int -> (Int, Int) -> (Int, Int) -> (Int, Int)
rot n (x, y) (rx, ry)
| (ry == 0) && (rx == 1) = (n-1-y, n-1-x)
| (ry == 0) = (y, x)
| otherwise = (x, y)
| ghorn/cs240h-class | Lab2/HilbertCurve.hs | bsd-3-clause | 2,600 | 0 | 16 | 1,006 | 1,165 | 628 | 537 | 61 | 2 |
{-# LANGUAGE DeriveGeneric #-}
module Tutorial.Chapter10.Martian (Martian(..), run) where
import Tutorial.Chapter10.Rock (Rock)
import Tutorial.Chapter10.Plant (Plant)
import qualified Tutorial.Chapter10.Plant as P (tryMating)
import Tutorial.Chapter10.Bug (Bug)
import qualified Tutorial.Chapter10.Bug as B (tryMating)
import ALife.Creatur (Agent, agentId, isAlive)
import ALife.Creatur.Database (Record, key)
import ALife.Creatur.Universe (SimpleUniverse, writeToLog)
import Control.Monad.State (StateT)
import Data.Serialize (Serialize)
import GHC.Generics (Generic)
data Martian = FromRock Rock | FromPlant Plant | FromBug Bug
deriving (Show, Generic)
instance Serialize Martian
instance Agent Martian where
agentId (FromRock x) = agentId x
agentId (FromPlant x) = agentId x
agentId (FromBug x) = agentId x
isAlive (FromRock x) = isAlive x
isAlive (FromPlant x) = isAlive x
isAlive (FromBug x) = isAlive x
instance Record Martian where
key = agentId
run :: [Martian] -> StateT (SimpleUniverse Martian) IO [Martian]
run xs@(me:_) = do
writeToLog $ agentId me ++ "'s turn"
tryMating xs
run [] = error "empty agent list"
tryMating :: [Martian] -> StateT (SimpleUniverse Martian) IO [Martian]
tryMating (FromPlant me:FromPlant other:_) = do
xs <- P.tryMating [me, other]
return $ map FromPlant xs
tryMating (FromBug me:FromBug other:_) = do
xs <- B.tryMating [me, other]
return $ map FromBug xs
tryMating xs = return xs -- can't mate rocks or mismatched species
| mhwombat/creatur-examples | src/Tutorial/Chapter10/Martian.hs | bsd-3-clause | 1,506 | 0 | 9 | 242 | 545 | 293 | 252 | 38 | 1 |
module EFS(EFS,
Proof,
ProofLine,
Formula,
JustificationType(..),
LineNo,
sProof,
proof,
isEmpty,
justification,
sentence,
rest,
firstLine,
letter,
var,
axiomStep,
substitutionStep,
sFormula,
formula,
atom,
term,
efs,
isAxiom) where
import Data.List as L
import Data.Set as S
type LineNo = Int
data EFS = EFS {
letters :: [Symbol],
variables :: [Symbol],
predicates :: [(String, Int)],
axioms :: Set Formula
} deriving (Show)
efs :: [String] ->
[String] ->
[(String, Int)] ->
[Formula] ->
EFS
efs letterNames varNames predicates axioms =
EFS (L.map Letter letterNames) (L.map Variable varNames) predicates (S.fromList axioms)
isAxiom :: EFS -> Formula -> Bool
isAxiom efs f = S.member f (axioms efs)
data Formula
= Formula [Atom]
deriving (Eq, Ord, Show)
formula = Formula
sFormula predName degree terms = Formula [atom predName degree terms]
data Atom
= Atom String Int [Term]
deriving (Eq, Ord, Show)
atom predicateName degree arguments =
Atom predicateName degree arguments
data Term
= Term [Symbol]
deriving (Eq, Ord, Show)
term = Term
data Symbol
= Letter String
| Variable String
deriving (Eq, Ord, Show)
letter = Letter
var = Variable
data Proof = Proof [ProofLine]
deriving (Show)
proof = Proof
sProof x = Proof [x]
isEmpty :: Proof -> Bool
isEmpty (Proof []) = True
isEmpty _ = False
firstLine :: Proof -> ProofLine
firstLine (Proof (l:ls)) = l
rest :: Proof -> Proof
rest (Proof (l:ls)) = Proof ls
data ProofLine
= ProofLine Formula Justification
deriving (Show)
axiomStep f = ProofLine f Axiom
substitutionStep f lineNo var@(Variable x) term =
ProofLine f (Substitution lineNo var term)
substitutionStep _ _ s _ = error $ show s ++ " is not a variable"
sentence (ProofLine f _) = f
data Justification
= Axiom
| Substitution LineNo Symbol Term
deriving (Show)
data JustificationType
= AXIOM
| SUBSTITUTION
justification :: ProofLine -> JustificationType
justification (ProofLine _ Axiom) = AXIOM
justification (ProofLine _ (Substitution _ _ _)) = SUBSTITUTION
| dillonhuff/EFSChecker | src/EFS.hs | bsd-3-clause | 2,343 | 0 | 10 | 683 | 789 | 436 | 353 | 90 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
module V1.Api (Api, server) where
import Servant
import qualified V1.Projects.Api as Projects (Api, server)
type Api = "projects" :> Projects.Api
server :: Server Api
server = Projects.server
| thiagorp/deployments-web | app/v1/Api.hs | bsd-3-clause | 254 | 0 | 6 | 39 | 63 | 40 | 23 | 8 | 1 |
module Day17 where
import Data.List
partOne = length filtered
partTwo = length $ takeWhile (\c -> length c == minimumNum) sorted
where
sorted = sortOn length filtered
minimumNum = length $ head sorted
filtered = filter (\i -> sum i == 150) $ subsequences input
input :: [Int]
input = map read $ words "50 44 11 49 42 46 18 32 26 40 21 7 18 43 10 47 36 24 22 40 "
| z0isch/advent-of-code | src/Day17.hs | bsd-3-clause | 387 | 0 | 10 | 99 | 123 | 64 | 59 | 9 | 1 |
{-# LANGUAGE Rank2Types #-}
module STPermute (
tests_STPermute,
smoke_STPermute
) where
import Control.Monad
import Control.Monad.ST
import Data.Permute
import Data.Permute.ST
import Driver
import Debug.Trace
import Test.QuickCheck
import Text.Printf
import Test.Permute()
import qualified Test.Permute as Test
newPermute_S n = permute n
prop_NewPermute (Nat n) =
newPermute n `equivalent` newPermute_S n
newListPermute_S n is = listPermute n is
prop_NewListPermute (ListPermute n is) =
newListPermute n is `equivalent` newListPermute_S n is
newSwapsPermute_S n ss = swapsPermute n ss
prop_NewSwapsPermute (SwapsPermute n ss) =
newSwapsPermute n ss `equivalent` newSwapsPermute_S n ss
prop_UnsafeNewSwapsPermute (SwapsPermute n ss) =
unsafeNewSwapsPermute n ss `equivalent` newSwapsPermute_S n ss
newCyclesPermute_S n cs = cyclesPermute n cs
prop_NewCyclesPermute (CyclesPermute n cs) =
newCyclesPermute n cs `equivalent` newCyclesPermute_S n cs
prop_UnsafeNewCyclesPermute (CyclesPermute n cs) =
unsafeNewCyclesPermute n cs `equivalent` newCyclesPermute_S n cs
newCopyPermute_S p = (p, p)
prop_NewCopyPermute =
implements
(\p -> newCopyPermute p >>= unsafeFreeze)
(\p -> newCopyPermute_S p)
copyPermute_S p q = ((), q, q)
prop_CopyPermute =
copyPermute `implements2` copyPermute_S
setIdentity_S p = ((), permute (size p))
prop_SetIdentity =
setIdentity `implements` setIdentity_S
getElem_S p i = ((elems p) !! i, p)
prop_GetElem (Index n i) =
implementsFor n
(\p -> getElem p i)
(\p -> getElem_S p i)
prop_UnsafeGetElem (Index n i) =
implementsFor n
(\p -> unsafeGetElem p i)
(\p -> getElem_S p i)
swapElems_S p i j = ((), p')
where
(n,is) = (size p, elems p)
at k | k == i = is !! j
| k == j = is !! i
| otherwise = is !! k
p' = listPermute n $ map at [0..(n-1)]
prop_SwapElems (Swap n i j) =
implementsFor n
(\p -> swapElems p i j)
(\p -> swapElems_S p i j)
prop_UnsafeSwapElems (Swap n i j) =
implementsFor n
(\p -> unsafeSwapElems p i j)
(\p -> swapElems_S p i j)
getSize_S p = (length (elems p), p)
prop_GetSize = getSize `implements` getSize_S
getElems_S p = (elems p, p)
prop_GetElems = getElems `implements` getElems_S
prop_IsValid_Strict = runST $ do
p <- newPermute 10
setElem p 0 1
valid <- isValid p
setElem p 0 0
return $ valid == False
prop_GetSwaps_Lazy1 = runST $ do
p <- newPermute 10
ss <- getSwaps p
swapElems p 0 1
return $ length ss == 1
prop_GetSwaps_Lazy2 = runST $ do
p <- newPermute 10
ss <- getSwaps p
swapElems p 0 1
swapElems p 3 4
head ss `seq` swapElems p 3 4
return $ length ss == 1
tests_STPermute =
[ ("newPermute" , mytest prop_NewPermute)
, ("newListPermute" , mytest prop_NewListPermute)
, ("newSwapsPermute" , mytest prop_NewSwapsPermute)
, ("unsafeNewSwapsPermute" , mytest prop_UnsafeNewSwapsPermute)
, ("newCyclesPermute" , mytest prop_NewCyclesPermute)
, ("unsafeNewCyclesPermute" , mytest prop_UnsafeNewCyclesPermute)
, ("newCopyPermute" , mytest prop_NewCopyPermute)
, ("copyPermute" , mytest prop_CopyPermute)
, ("setIdentity" , mytest prop_SetIdentity)
, ("getElem" , mytest prop_GetElem)
, ("unsafeGetElem" , mytest prop_UnsafeGetElem)
, ("swapElems" , mytest prop_SwapElems)
, ("unsafeSwapElems" , mytest prop_UnsafeSwapElems)
, ("getSize" , mytest prop_GetSize)
, ("getElems" , mytest prop_GetElems)
]
smoke_STPermute =
[ ("isValid is strict" , mytest prop_IsValid_Strict)
, ("getSwaps is lazy (test 1)" , mytest prop_GetSwaps_Lazy1)
, ("getSwaps is lazy (test 2)" , mytest prop_GetSwaps_Lazy2)
]
------------------------------------------------------------------------
--
-- The specification language
--
abstract :: STPermute s -> ST s Permute
abstract = freeze
commutes :: (Eq a, Show a) =>
STPermute s -> (STPermute s -> ST s a) ->
(Permute -> (a,Permute)) -> ST s Bool
commutes p a f = do
old <- abstract p
r <- a p
new <- abstract p
let s = f old
s' = (r,new)
passed = s == s'
when (not passed) $
trace (printf ("expected `%s' but got `%s'") (show s) (show s'))
return ()
return passed
equivalent :: (forall s . ST s (STPermute s)) -> Permute -> Bool
equivalent p s = runST $ do
p' <- (p >>= abstract)
when (not $ p' == s) $
trace (printf ("expected `%s' but got `%s'") (show s) (show p'))
return ()
return (p' == s)
implements :: (Eq a, Show a) =>
(forall s . STPermute s -> ST s a) ->
(Permute -> (a,Permute)) ->
Property
a `implements` f =
forAll arbitrary $ \(Nat n) ->
implementsFor n a f
implementsFor :: (Eq a, Show a) =>
Int ->
(forall s . STPermute s -> ST s a) ->
(Permute -> (a,Permute)) ->
Property
implementsFor n a f =
forAll (Test.permute n) $ \p ->
runST $ do
p' <- unsafeThaw p
commutes p' a f
implementsIf :: (Eq a, Show a) =>
(forall s . STPermute s -> ST s Bool) ->
(forall s . STPermute s -> ST s a) ->
(Permute -> (a, Permute)) ->
Property
implementsIf pre a f =
forAll arbitrary $ \p ->
runST ( do
p' <- thaw p
pre p') ==>
runST ( do
p' <- unsafeThaw p
commutes p' a f )
commutes2 :: (Eq a, Show a) =>
STPermute s -> STPermute s -> (STPermute s -> STPermute s -> ST s a) ->
(Permute -> Permute -> (a,Permute,Permute)) -> ST s Bool
commutes2 p q a f = do
oldp <- abstract p
oldq <- abstract q
r <- a p q
newp <- abstract p
newq <- abstract q
let s = f oldp oldq
s' = (r,newp,newq)
passed = s == s'
when (not passed) $
trace (printf ("expected `%s' but got `%s'") (show s) (show s'))
return ()
return passed
implements2 :: (Eq a, Show a) =>
(forall s . STPermute s -> STPermute s -> ST s a) ->
(Permute -> Permute -> (a,Permute,Permute)) ->
Property
implements2 a f =
forAll arbitrary $ \(Nat n) ->
forAll (Test.permute n) $ \p ->
forAll (Test.permute n) $ \q ->
runST $ do
p' <- unsafeThaw p
q' <- unsafeThaw q
commutes2 p' q' a f
| patperry/permutation | tests/STPermute.hs | bsd-3-clause | 6,700 | 0 | 15 | 2,050 | 2,433 | 1,241 | 1,192 | 187 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.CodeGeneration.GCD
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Test suite for Data.SBV.Examples.CodeGeneration.GCD
-----------------------------------------------------------------------------
module TestSuite.CodeGeneration.GCD(tests) where
import Data.SBV.Internals
import Data.SBV.Examples.CodeGeneration.GCD
import Utils.SBVTestFramework
-- Test suite
tests :: TestTree
tests = testGroup "CodeGeneration.GCD" [
goldenVsStringShow "gcd" gcdC
]
where gcdC = compileToC' "sgcd" $ do
cgSetDriverValues [55,154]
x <- cgInput "x"
y <- cgInput "y"
cgReturn $ sgcd x y
| josefs/sbv | SBVTestSuite/TestSuite/CodeGeneration/GCD.hs | bsd-3-clause | 835 | 0 | 11 | 170 | 123 | 70 | 53 | 12 | 1 |
{-# LANGUAGE CPP #-}
#ifdef FILE_EMBED
{-# LANGUAGE TemplateHaskell #-}
#endif
-- | The information from Paths_shake cleaned up
module Development.Shake.Internal.Paths(
shakeVersionString,
initDataDirectory,
hasManualData, copyManualData,
readDataFileHTML
) where
import Control.Monad.Extra
import Data.Version
import System.FilePath
import General.Extra
import qualified Data.ByteString.Lazy as LBS
import Paths_shake
#ifdef FILE_EMBED
import qualified Data.ByteString as BS
import Data.FileEmbed
#else
import Control.Exception
import System.Directory
import System.Info.Extra
import System.IO.Unsafe
import System.Environment
#endif
shakeVersionString :: String
shakeVersionString = showVersion version
#ifdef FILE_EMBED
initDataDirectory :: IO ()
initDataDirectory = pure ()
htmlDataFiles :: [(FilePath, BS.ByteString)]
htmlDataFiles =
[ ("profile.html", $(embedFile "html/profile.html"))
, ("progress.html", $(embedFile "html/progress.html"))
, ("shake.js", $(embedFile "html/shake.js"))
]
readDataFileHTML :: FilePath -> IO LBS.ByteString
readDataFileHTML file = do
case lookup file htmlDataFiles of
Nothing -> fail $ "Could not find data file " ++ file ++ " in embedded data files!"
Just x -> pure (LBS.fromStrict x)
manualDirData :: [(FilePath, BS.ByteString)]
manualDirData = $(embedDir "docs/manual")
hasManualData :: IO Bool
hasManualData = pure True
copyManualData :: FilePath -> IO ()
copyManualData dest = do
createDirectoryRecursive dest
forM_ manualDirData $ \(file, bs) -> do
BS.writeFile (dest </> file) bs
#else
-- We want getDataFileName to be relative to the current directory on program startup,
-- even if we issue a change directory command. Therefore, first call caches, future ones read.
{-# NOINLINE dataDirs #-}
dataDirs :: [String]
dataDirs = unsafePerformIO $ do
datdir <- getDataDir
exedir <- takeDirectory <$> getExecutablePath `catchIO` \_ -> pure ""
curdir <- getCurrentDirectory
pure $ [datdir] ++ [exedir | exedir /= ""] ++ [curdir]
-- The data files may be located relative to the current directory, if so cache it in advance
initDataDirectory :: IO ()
initDataDirectory = void $ evaluate dataDirs
getDataFile :: FilePath -> IO FilePath
getDataFile file = do
let poss = map (</> file) dataDirs
res <- filterM doesFileExist_ poss
case res of
[] -> fail $ unlines $ ("Could not find data file " ++ file ++ ", looked in:") : map (" " ++) poss
x:_ -> pure x
hasDataFile :: FilePath -> IO Bool
hasDataFile file = anyM (\dir -> doesFileExist_ $ dir </> file) dataDirs
readDataFileHTML :: FilePath -> IO LBS.ByteString
readDataFileHTML file = LBS.readFile =<< getDataFile ("html" </> file)
manualFiles :: [FilePath]
manualFiles = map ("docs/manual" </>) ["Shakefile.hs","main.c","constants.c","constants.h","build" <.> if isWindows then "bat" else "sh"]
hasManualData :: IO Bool
hasManualData = allM hasDataFile manualFiles
copyManualData :: FilePath -> IO ()
copyManualData dest = do
createDirectoryRecursive dest
forM_ manualFiles $ \file -> do
src <- getDataFile file
copyFile src (dest </> takeFileName file)
#endif
| ndmitchell/shake | src/Development/Shake/Internal/Paths.hs | bsd-3-clause | 3,205 | 0 | 13 | 573 | 366 | 206 | 160 | 49 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TupleSections #-}
-- |
--
-- Functions for generic traversals across Futhark syntax trees. The
-- motivation for this module came from dissatisfaction with rewriting
-- the same trivial tree recursions for every module. A possible
-- alternative would be to use normal \"Scrap your
-- boilerplate\"-techniques, but these are rejected for two reasons:
--
-- * They are too slow.
--
-- * More importantly, they do not tell you whether you have missed
-- some cases.
--
-- Instead, this module defines various traversals of the Futhark syntax
-- tree. The implementation is rather tedious, but the interface is
-- easy to use.
--
-- A traversal of the Futhark syntax tree is expressed as a record of
-- functions expressing the operations to be performed on the various
-- types of nodes.
module Language.Futhark.Traversals
( ASTMapper (..),
ASTMappable (..),
identityMapper,
bareExp,
)
where
import qualified Data.List.NonEmpty as NE
import qualified Data.Set as S
import Language.Futhark.Syntax
-- | Express a monad mapping operation on a syntax node. Each element
-- of this structure expresses the operation to be performed on a
-- given child.
data ASTMapper m = ASTMapper
{ mapOnExp :: ExpBase Info VName -> m (ExpBase Info VName),
mapOnName :: VName -> m VName,
mapOnQualName :: QualName VName -> m (QualName VName),
mapOnStructType :: StructType -> m StructType,
mapOnPatType :: PatType -> m PatType,
mapOnStructRetType :: StructRetType -> m StructRetType,
mapOnPatRetType :: PatRetType -> m PatRetType
}
-- | An 'ASTMapper' that just leaves its input unchanged.
identityMapper :: Monad m => ASTMapper m
identityMapper =
ASTMapper
{ mapOnExp = return,
mapOnName = return,
mapOnQualName = return,
mapOnStructType = return,
mapOnPatType = return,
mapOnStructRetType = return,
mapOnPatRetType = return
}
-- | The class of things that we can map an 'ASTMapper' across.
class ASTMappable x where
-- | Map a monadic action across the immediate children of an
-- object. Importantly, the 'astMap' action is not invoked for
-- the object itself, and the mapping does not descend recursively
-- into subexpressions. The mapping is done left-to-right.
astMap :: Monad m => ASTMapper m -> x -> m x
instance ASTMappable (AppExpBase Info VName) where
astMap tv (Range start next end loc) =
Range <$> mapOnExp tv start <*> traverse (mapOnExp tv) next
<*> traverse (mapOnExp tv) end
<*> pure loc
astMap tv (If c texp fexp loc) =
If <$> mapOnExp tv c <*> mapOnExp tv texp <*> mapOnExp tv fexp <*> pure loc
astMap tv (Match e cases loc) =
Match <$> mapOnExp tv e <*> astMap tv cases <*> pure loc
astMap tv (Apply f arg d loc) =
Apply <$> mapOnExp tv f <*> mapOnExp tv arg <*> pure d <*> pure loc
astMap tv (LetPat sizes pat e body loc) =
LetPat <$> astMap tv sizes <*> astMap tv pat <*> mapOnExp tv e <*> mapOnExp tv body <*> pure loc
astMap tv (LetFun name (fparams, params, ret, t, e) body loc) =
LetFun <$> mapOnName tv name
<*> ( (,,,,) <$> mapM (astMap tv) fparams <*> mapM (astMap tv) params
<*> traverse (astMap tv) ret
<*> traverse (mapOnStructRetType tv) t
<*> mapOnExp tv e
)
<*> mapOnExp tv body
<*> pure loc
astMap tv (LetWith dest src idxexps vexp body loc) =
LetWith
<$> astMap tv dest
<*> astMap tv src
<*> mapM (astMap tv) idxexps
<*> mapOnExp tv vexp
<*> mapOnExp tv body
<*> pure loc
astMap tv (Coerce e tdecl loc) =
Coerce <$> mapOnExp tv e <*> astMap tv tdecl <*> pure loc
astMap tv (BinOp (fname, fname_loc) t (x, Info (xt, xext)) (y, Info (yt, yext)) loc) =
BinOp <$> ((,) <$> mapOnQualName tv fname <*> pure fname_loc)
<*> traverse (mapOnPatType tv) t
<*> ( (,) <$> mapOnExp tv x
<*> (Info <$> ((,) <$> mapOnStructType tv xt <*> pure xext))
)
<*> ( (,) <$> mapOnExp tv y
<*> (Info <$> ((,) <$> mapOnStructType tv yt <*> pure yext))
)
<*> pure loc
astMap tv (DoLoop sparams mergepat mergeexp form loopbody loc) =
DoLoop <$> mapM (mapOnName tv) sparams <*> astMap tv mergepat
<*> mapOnExp tv mergeexp
<*> astMap tv form
<*> mapOnExp tv loopbody
<*> pure loc
astMap tv (Index arr idxexps loc) =
Index <$> mapOnExp tv arr <*> mapM (astMap tv) idxexps <*> pure loc
instance ASTMappable (ExpBase Info VName) where
astMap tv (Var name t loc) =
Var <$> mapOnQualName tv name <*> traverse (mapOnPatType tv) t
<*> pure loc
astMap _ (Literal val loc) =
pure $ Literal val loc
astMap _ (StringLit vs loc) =
pure $ StringLit vs loc
astMap tv (IntLit val t loc) =
IntLit val <$> traverse (mapOnPatType tv) t <*> pure loc
astMap tv (FloatLit val t loc) =
FloatLit val <$> traverse (mapOnPatType tv) t <*> pure loc
astMap tv (Parens e loc) =
Parens <$> mapOnExp tv e <*> pure loc
astMap tv (QualParens (name, nameloc) e loc) =
QualParens <$> ((,) <$> mapOnQualName tv name <*> pure nameloc)
<*> mapOnExp tv e
<*> pure loc
astMap tv (TupLit els loc) =
TupLit <$> mapM (mapOnExp tv) els <*> pure loc
astMap tv (RecordLit fields loc) =
RecordLit <$> astMap tv fields <*> pure loc
astMap tv (ArrayLit els t loc) =
ArrayLit <$> mapM (mapOnExp tv) els <*> traverse (mapOnPatType tv) t <*> pure loc
astMap tv (Ascript e tdecl loc) =
Ascript <$> mapOnExp tv e <*> astMap tv tdecl <*> pure loc
astMap tv (Negate x loc) =
Negate <$> mapOnExp tv x <*> pure loc
astMap tv (Not x loc) =
Not <$> mapOnExp tv x <*> pure loc
astMap tv (Update src slice v loc) =
Update <$> mapOnExp tv src <*> mapM (astMap tv) slice
<*> mapOnExp tv v
<*> pure loc
astMap tv (RecordUpdate src fs v (Info t) loc) =
RecordUpdate <$> mapOnExp tv src <*> pure fs
<*> mapOnExp tv v
<*> (Info <$> mapOnPatType tv t)
<*> pure loc
astMap tv (Project field e t loc) =
Project field <$> mapOnExp tv e <*> traverse (mapOnPatType tv) t <*> pure loc
astMap tv (Assert e1 e2 desc loc) =
Assert <$> mapOnExp tv e1 <*> mapOnExp tv e2 <*> pure desc <*> pure loc
astMap tv (Lambda params body ret t loc) =
Lambda <$> mapM (astMap tv) params
<*> mapOnExp tv body
<*> traverse (astMap tv) ret
<*> traverse (traverse $ mapOnStructRetType tv) t
<*> pure loc
astMap tv (OpSection name t loc) =
OpSection <$> mapOnQualName tv name
<*> traverse (mapOnPatType tv) t
<*> pure loc
astMap tv (OpSectionLeft name t arg (Info (pa, t1a, argext), Info (pb, t1b)) (ret, retext) loc) =
OpSectionLeft <$> mapOnQualName tv name
<*> traverse (mapOnPatType tv) t
<*> mapOnExp tv arg
<*> ( (,)
<$> (Info <$> ((pa,,) <$> mapOnStructType tv t1a <*> pure argext))
<*> (Info <$> ((pb,) <$> mapOnStructType tv t1b))
)
<*> ((,) <$> traverse (mapOnPatRetType tv) ret <*> traverse (mapM (mapOnName tv)) retext)
<*> pure loc
astMap tv (OpSectionRight name t arg (Info (pa, t1a), Info (pb, t1b, argext)) t2 loc) =
OpSectionRight <$> mapOnQualName tv name
<*> traverse (mapOnPatType tv) t
<*> mapOnExp tv arg
<*> ( (,)
<$> (Info <$> ((pa,) <$> mapOnStructType tv t1a))
<*> (Info <$> ((pb,,) <$> mapOnStructType tv t1b <*> pure argext))
)
<*> traverse (mapOnPatRetType tv) t2
<*> pure loc
astMap tv (ProjectSection fields t loc) =
ProjectSection fields <$> traverse (mapOnPatType tv) t <*> pure loc
astMap tv (IndexSection idxs t loc) =
IndexSection <$> mapM (astMap tv) idxs
<*> traverse (mapOnPatType tv) t
<*> pure loc
astMap tv (Constr name es ts loc) =
Constr name <$> traverse (mapOnExp tv) es <*> traverse (mapOnPatType tv) ts <*> pure loc
astMap tv (Attr attr e loc) =
Attr attr <$> mapOnExp tv e <*> pure loc
astMap tv (AppExp e res) =
AppExp <$> astMap tv e <*> astMap tv res
instance ASTMappable (LoopFormBase Info VName) where
astMap tv (For i bound) = For <$> astMap tv i <*> mapOnExp tv bound
astMap tv (ForIn pat e) = ForIn <$> astMap tv pat <*> mapOnExp tv e
astMap tv (While e) = While <$> mapOnExp tv e
instance ASTMappable (TypeExp VName) where
astMap tv (TEVar qn loc) = TEVar <$> mapOnQualName tv qn <*> pure loc
astMap tv (TETuple ts loc) = TETuple <$> traverse (astMap tv) ts <*> pure loc
astMap tv (TERecord ts loc) =
TERecord <$> traverse (traverse $ astMap tv) ts <*> pure loc
astMap tv (TEArray te dim loc) =
TEArray <$> astMap tv te <*> astMap tv dim <*> pure loc
astMap tv (TEUnique t loc) = TEUnique <$> astMap tv t <*> pure loc
astMap tv (TEApply t1 t2 loc) =
TEApply <$> astMap tv t1 <*> astMap tv t2 <*> pure loc
astMap tv (TEArrow v t1 t2 loc) =
TEArrow v <$> astMap tv t1 <*> astMap tv t2 <*> pure loc
astMap tv (TESum cs loc) =
TESum <$> traverse (traverse $ astMap tv) cs <*> pure loc
astMap tv (TEDim dims t loc) =
TEDim dims <$> astMap tv t <*> pure loc
instance ASTMappable (TypeArgExp VName) where
astMap tv (TypeArgExpDim dim loc) =
TypeArgExpDim <$> astMap tv dim <*> pure loc
astMap tv (TypeArgExpType te) =
TypeArgExpType <$> astMap tv te
instance ASTMappable (DimExp VName) where
astMap tv (DimExpNamed vn loc) =
DimExpNamed <$> mapOnQualName tv vn <*> pure loc
astMap _ (DimExpConst k loc) = pure $ DimExpConst k loc
astMap _ DimExpAny = pure DimExpAny
instance ASTMappable (DimDecl VName) where
astMap tv (NamedDim vn) = NamedDim <$> mapOnQualName tv vn
astMap _ (ConstDim k) = pure $ ConstDim k
astMap tv (AnyDim vn) = AnyDim <$> traverse (mapOnName tv) vn
instance ASTMappable (TypeParamBase VName) where
astMap = traverse . mapOnName
instance ASTMappable (DimIndexBase Info VName) where
astMap tv (DimFix j) = DimFix <$> mapOnExp tv j
astMap tv (DimSlice i j stride) =
DimSlice
<$> maybe (return Nothing) (fmap Just . mapOnExp tv) i
<*> maybe (return Nothing) (fmap Just . mapOnExp tv) j
<*> maybe (return Nothing) (fmap Just . mapOnExp tv) stride
instance ASTMappable Alias where
astMap tv (AliasBound v) = AliasBound <$> mapOnName tv v
astMap tv (AliasFree v) = AliasFree <$> mapOnName tv v
instance ASTMappable Aliasing where
astMap tv = fmap S.fromList . traverse (astMap tv) . S.toList
instance ASTMappable AppRes where
astMap tv (AppRes t ext) =
AppRes <$> mapOnPatType tv t <*> pure ext
type TypeTraverser f t dim1 als1 dim2 als2 =
(TypeName -> f TypeName) ->
(dim1 -> f dim2) ->
(als1 -> f als2) ->
t dim1 als1 ->
f (t dim2 als2)
traverseScalarType ::
Applicative f =>
TypeTraverser f ScalarTypeBase dim1 als1 dims als2
traverseScalarType _ _ _ (Prim t) = pure $ Prim t
traverseScalarType f g h (Record fs) = Record <$> traverse (traverseType f g h) fs
traverseScalarType f g h (TypeVar als u t args) =
TypeVar <$> h als <*> pure u <*> f t <*> traverse (traverseTypeArg f g) args
traverseScalarType f g h (Arrow als v t1 (RetType dims t2)) =
Arrow <$> h als <*> pure v <*> traverseType f g h t1
<*> (RetType dims <$> traverseType f g h t2)
traverseScalarType f g h (Sum cs) =
Sum <$> (traverse . traverse) (traverseType f g h) cs
traverseType ::
Applicative f =>
TypeTraverser f TypeBase dim1 als1 dims als2
traverseType f g h (Array als u et shape) =
Array <$> h als <*> pure u
<*> traverseScalarType f g pure et
<*> traverse g shape
traverseType f g h (Scalar t) =
Scalar <$> traverseScalarType f g h t
traverseTypeArg ::
Applicative f =>
(TypeName -> f TypeName) ->
(dim1 -> f dim2) ->
TypeArg dim1 ->
f (TypeArg dim2)
traverseTypeArg _ g (TypeArgDim d loc) =
TypeArgDim <$> g d <*> pure loc
traverseTypeArg f g (TypeArgType t loc) =
TypeArgType <$> traverseType f g pure t <*> pure loc
instance ASTMappable StructType where
astMap tv = traverseType f (astMap tv) pure
where
f = fmap typeNameFromQualName . mapOnQualName tv . qualNameFromTypeName
instance ASTMappable PatType where
astMap tv = traverseType f (astMap tv) (astMap tv)
where
f = fmap typeNameFromQualName . mapOnQualName tv . qualNameFromTypeName
instance ASTMappable StructRetType where
astMap tv (RetType ext t) = RetType ext <$> astMap tv t
instance ASTMappable PatRetType where
astMap tv (RetType ext t) = RetType ext <$> astMap tv t
instance ASTMappable (TypeDeclBase Info VName) where
astMap tv (TypeDecl dt (Info et)) =
TypeDecl <$> astMap tv dt <*> (Info <$> mapOnStructType tv et)
instance ASTMappable (IdentBase Info VName) where
astMap tv (Ident name (Info t) loc) =
Ident <$> mapOnName tv name <*> (Info <$> mapOnPatType tv t) <*> pure loc
instance ASTMappable (SizeBinder VName) where
astMap tv (SizeBinder name loc) =
SizeBinder <$> mapOnName tv name <*> pure loc
instance ASTMappable (PatBase Info VName) where
astMap tv (Id name (Info t) loc) =
Id <$> mapOnName tv name <*> (Info <$> mapOnPatType tv t) <*> pure loc
astMap tv (TuplePat pats loc) =
TuplePat <$> mapM (astMap tv) pats <*> pure loc
astMap tv (RecordPat fields loc) =
RecordPat <$> mapM (traverse $ astMap tv) fields <*> pure loc
astMap tv (PatParens pat loc) =
PatParens <$> astMap tv pat <*> pure loc
astMap tv (PatAscription pat t loc) =
PatAscription <$> astMap tv pat <*> astMap tv t <*> pure loc
astMap tv (Wildcard (Info t) loc) =
Wildcard <$> (Info <$> mapOnPatType tv t) <*> pure loc
astMap tv (PatLit v (Info t) loc) =
PatLit v <$> (Info <$> mapOnPatType tv t) <*> pure loc
astMap tv (PatConstr n (Info t) ps loc) =
PatConstr n <$> (Info <$> mapOnPatType tv t) <*> mapM (astMap tv) ps <*> pure loc
astMap tv (PatAttr attr p loc) =
PatAttr attr <$> astMap tv p <*> pure loc
instance ASTMappable (FieldBase Info VName) where
astMap tv (RecordFieldExplicit name e loc) =
RecordFieldExplicit name <$> mapOnExp tv e <*> pure loc
astMap tv (RecordFieldImplicit name t loc) =
RecordFieldImplicit <$> mapOnName tv name
<*> traverse (mapOnPatType tv) t
<*> pure loc
instance ASTMappable (CaseBase Info VName) where
astMap tv (CasePat pat e loc) =
CasePat <$> astMap tv pat <*> mapOnExp tv e <*> pure loc
instance ASTMappable a => ASTMappable (Info a) where
astMap tv = traverse $ astMap tv
instance ASTMappable a => ASTMappable [a] where
astMap tv = traverse $ astMap tv
instance ASTMappable a => ASTMappable (NE.NonEmpty a) where
astMap tv = traverse $ astMap tv
instance (ASTMappable a, ASTMappable b) => ASTMappable (a, b) where
astMap tv (x, y) = (,) <$> astMap tv x <*> astMap tv y
instance (ASTMappable a, ASTMappable b, ASTMappable c) => ASTMappable (a, b, c) where
astMap tv (x, y, z) = (,,) <$> astMap tv x <*> astMap tv y <*> astMap tv z
-- It would be lovely if the following code would be written in terms
-- of ASTMappable, but unfortunately it involves changing the Info
-- functor. For simplicity, the general traversals do not support
-- that. Sometimes a little duplication is better than an overly
-- complex abstraction. The types ensure that this will be correct
-- anyway, so it's just tedious, and not actually fragile.
bareTypeDecl :: TypeDeclBase Info VName -> TypeDeclBase NoInfo VName
bareTypeDecl (TypeDecl te _) = TypeDecl te NoInfo
bareField :: FieldBase Info VName -> FieldBase NoInfo VName
bareField (RecordFieldExplicit name e loc) =
RecordFieldExplicit name (bareExp e) loc
bareField (RecordFieldImplicit name _ loc) =
RecordFieldImplicit name NoInfo loc
barePat :: PatBase Info VName -> PatBase NoInfo VName
barePat (TuplePat ps loc) = TuplePat (map barePat ps) loc
barePat (RecordPat fs loc) = RecordPat (map (fmap barePat) fs) loc
barePat (PatParens p loc) = PatParens (barePat p) loc
barePat (Id v _ loc) = Id v NoInfo loc
barePat (Wildcard _ loc) = Wildcard NoInfo loc
barePat (PatAscription pat (TypeDecl t _) loc) =
PatAscription (barePat pat) (TypeDecl t NoInfo) loc
barePat (PatLit v _ loc) = PatLit v NoInfo loc
barePat (PatConstr c _ ps loc) = PatConstr c NoInfo (map barePat ps) loc
barePat (PatAttr attr p loc) = PatAttr attr (barePat p) loc
bareDimIndex :: DimIndexBase Info VName -> DimIndexBase NoInfo VName
bareDimIndex (DimFix e) =
DimFix $ bareExp e
bareDimIndex (DimSlice x y z) =
DimSlice (bareExp <$> x) (bareExp <$> y) (bareExp <$> z)
bareLoopForm :: LoopFormBase Info VName -> LoopFormBase NoInfo VName
bareLoopForm (For (Ident i _ loc) e) = For (Ident i NoInfo loc) (bareExp e)
bareLoopForm (ForIn pat e) = ForIn (barePat pat) (bareExp e)
bareLoopForm (While e) = While (bareExp e)
bareCase :: CaseBase Info VName -> CaseBase NoInfo VName
bareCase (CasePat pat e loc) = CasePat (barePat pat) (bareExp e) loc
-- | Remove all annotations from an expression, but retain the
-- name/scope information.
bareExp :: ExpBase Info VName -> ExpBase NoInfo VName
bareExp (Var name _ loc) = Var name NoInfo loc
bareExp (Literal v loc) = Literal v loc
bareExp (IntLit val _ loc) = IntLit val NoInfo loc
bareExp (FloatLit val _ loc) = FloatLit val NoInfo loc
bareExp (Parens e loc) = Parens (bareExp e) loc
bareExp (QualParens name e loc) = QualParens name (bareExp e) loc
bareExp (TupLit els loc) = TupLit (map bareExp els) loc
bareExp (StringLit vs loc) = StringLit vs loc
bareExp (RecordLit fields loc) = RecordLit (map bareField fields) loc
bareExp (ArrayLit els _ loc) = ArrayLit (map bareExp els) NoInfo loc
bareExp (Ascript e tdecl loc) =
Ascript (bareExp e) (bareTypeDecl tdecl) loc
bareExp (Negate x loc) = Negate (bareExp x) loc
bareExp (Not x loc) = Not (bareExp x) loc
bareExp (Update src slice v loc) =
Update (bareExp src) (map bareDimIndex slice) (bareExp v) loc
bareExp (RecordUpdate src fs v _ loc) =
RecordUpdate (bareExp src) fs (bareExp v) NoInfo loc
bareExp (Project field e _ loc) =
Project field (bareExp e) NoInfo loc
bareExp (Assert e1 e2 _ loc) = Assert (bareExp e1) (bareExp e2) NoInfo loc
bareExp (Lambda params body ret _ loc) =
Lambda (map barePat params) (bareExp body) ret NoInfo loc
bareExp (OpSection name _ loc) = OpSection name NoInfo loc
bareExp (OpSectionLeft name _ arg _ _ loc) =
OpSectionLeft name NoInfo (bareExp arg) (NoInfo, NoInfo) (NoInfo, NoInfo) loc
bareExp (OpSectionRight name _ arg _ _ loc) =
OpSectionRight name NoInfo (bareExp arg) (NoInfo, NoInfo) NoInfo loc
bareExp (ProjectSection fields _ loc) = ProjectSection fields NoInfo loc
bareExp (IndexSection slice _ loc) =
IndexSection (map bareDimIndex slice) NoInfo loc
bareExp (Constr name es _ loc) =
Constr name (map bareExp es) NoInfo loc
bareExp (AppExp appexp _) =
AppExp appexp' NoInfo
where
appexp' =
case appexp of
Match e cases loc ->
Match (bareExp e) (fmap bareCase cases) loc
DoLoop _ mergepat mergeexp form loopbody loc ->
DoLoop
[]
(barePat mergepat)
(bareExp mergeexp)
(bareLoopForm form)
(bareExp loopbody)
loc
LetWith (Ident dest _ destloc) (Ident src _ srcloc) idxexps vexp body loc ->
LetWith
(Ident dest NoInfo destloc)
(Ident src NoInfo srcloc)
(map bareDimIndex idxexps)
(bareExp vexp)
(bareExp body)
loc
BinOp fname _ (x, _) (y, _) loc ->
BinOp fname NoInfo (bareExp x, NoInfo) (bareExp y, NoInfo) loc
If c texp fexp loc ->
If (bareExp c) (bareExp texp) (bareExp fexp) loc
Apply f arg _ loc ->
Apply (bareExp f) (bareExp arg) NoInfo loc
LetPat sizes pat e body loc ->
LetPat sizes (barePat pat) (bareExp e) (bareExp body) loc
LetFun name (fparams, params, ret, _, e) body loc ->
LetFun name (fparams, map barePat params, ret, NoInfo, bareExp e) (bareExp body) loc
Range start next end loc ->
Range (bareExp start) (fmap bareExp next) (fmap bareExp end) loc
Coerce e tdecl loc ->
Coerce (bareExp e) (bareTypeDecl tdecl) loc
Index arr slice loc ->
Index (bareExp arr) (map bareDimIndex slice) loc
bareExp (Attr attr e loc) =
Attr attr (bareExp e) loc
| diku-dk/futhark | src/Language/Futhark/Traversals.hs | isc | 20,310 | 0 | 17 | 4,841 | 7,844 | 3,849 | 3,995 | 409 | 11 |
{-# OPTIONS_GHC -Wno-deprecations #-}
module Main (main) where
--------------------------------------------------------------------------------
import Data.IORef
import Numeric.Natural
import Prelude
import System.Exit
import System.IO.Unsafe
import Test.QuickCheck
import Control.Monad (when)
import Data.ByteString (ByteString)
import Data.Vector.Primitive (Prim, Vector)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Unsafe as BS
import qualified Data.Vector.Primitive as VP
import qualified Urbit.Atom.Fast as F
import qualified Urbit.Atom.Slow as S
-- Instances -------------------------------------------------------------------
instance Arbitrary Natural where
arbitrary = fromInteger . abs <$> arbitrary
instance Arbitrary ByteString where
arbitrary = BS.pack <$> arbitrary
instance (Prim a, Arbitrary a) => Arbitrary (Vector a) where
arbitrary = VP.fromList <$> arbitrary
-- Utils -----------------------------------------------------------------------
stripBytes :: ByteString -> ByteString
stripBytes buf = BS.take (len - go 0 (len - 1)) buf
where
len = BS.length buf
go n i | i < 0 = n
| 0 == BS.unsafeIndex buf i = go (n + 1) (i - 1)
| otherwise = n
stripWords :: Vector Word -> Vector Word
stripWords vec = VP.take (len - go 0 (len - 1)) vec
where
len = VP.length vec
go n i | i < 0 = n
| 0 == VP.unsafeIndex vec i = go (n + 1) (i - 1)
| otherwise = n
dumpLoad :: Eq i => (i -> o) -> (o -> i) -> (i -> Bool)
dumpLoad dump load x = x == load (dump x)
loadDump :: Eq o => (o -> i) -> (i -> o) -> (o -> o) -> (o -> Bool)
loadDump load dump norm x = norm x == dump (load x)
-- Test Reference Implementation -----------------------------------------------
prop_atom_bytes_roundtrip :: Natural -> Bool
prop_atom_bytes_roundtrip = dumpLoad S.atomBytes S.bytesAtom
prop_atom_words_roundtrip :: Natural -> Bool
prop_atom_words_roundtrip = dumpLoad S.atomWords S.wordsAtom
prop_bytes_atom_roundtrip :: ByteString -> Bool
prop_bytes_atom_roundtrip = loadDump S.bytesAtom S.atomBytes stripBytes
prop_words_atom_roundtrip :: Vector Word -> Bool
prop_words_atom_roundtrip = loadDump S.wordsAtom S.atomWords stripWords
-- Test Fast Implementation ----------------------------------------------------
prop_fast_atom_bytes_roundtrip :: Natural -> Bool
prop_fast_atom_bytes_roundtrip = dumpLoad F.atomBytes F.bytesAtom
prop_fast_atom_words_roundtrip :: Natural -> Bool
prop_fast_atom_words_roundtrip = dumpLoad F.atomWords F.wordsAtom
prop_fast_bytes_atom_roundtrip :: ByteString -> Bool
prop_fast_bytes_atom_roundtrip = loadDump F.bytesAtom F.atomBytes stripBytes
prop_fast_words_atom_roundtrip :: Vector Word -> Bool
prop_fast_words_atom_roundtrip = loadDump F.wordsAtom F.atomWords stripWords
-- Fast and Reference Implementations are the Same -----------------------------
prop_fast_words_atom_correct :: Vector Word -> Bool
prop_fast_words_atom_correct x = F.wordsAtom x == S.wordsAtom x
prop_fast_atom_words_correct :: Natural -> Bool
prop_fast_atom_words_correct x = F.atomWords x == S.atomWords x
prop_fast_bytes_atom_correct :: ByteString -> Bool
prop_fast_bytes_atom_correct x = F.bytesAtom x == S.bytesAtom x
prop_fast_atom_import_correct :: ByteString -> Bool
prop_fast_atom_import_correct x = F.importBytes x == S.bytesAtom x
prop_fast_atom_bytes_correct :: Natural -> Bool
prop_fast_atom_bytes_correct x = F.atomBytes x == S.atomBytes x
prop_fast_atom_export_correct :: Natural -> Bool
prop_fast_atom_export_correct x = F.exportBytes x == S.atomBytes x
--------------------------------------------------------------------------------
failed :: IORef Int
failed = unsafePerformIO (newIORef 0)
checkProp :: Testable prop => String -> prop -> IO ()
checkProp nm chk = do
putStrLn nm
res <- quickCheckResult chk
putStrLn ""
case res of
Success{} -> pure ()
_ -> modifyIORef' failed succ
main :: IO ()
main = do
checkProp "Reference: Atom <-> ByteString roundtrip"
prop_atom_bytes_roundtrip
checkProp "Reference: Atom <-> Vector Word roundtrip"
prop_atom_words_roundtrip
checkProp "Reference: ByteString <-> Atom roundtrip"
prop_bytes_atom_roundtrip
checkProp "Reference: Vector Word <-> Atom roundtrip"
prop_words_atom_roundtrip
checkProp "Fast: Atom <-> ByteString roundtrip"
prop_fast_atom_bytes_roundtrip
checkProp "Fast: Atom <-> Vector Word roundtrip"
prop_fast_atom_words_roundtrip
checkProp "Fast: Bytestring <-> Atom roundtrip"
prop_fast_bytes_atom_roundtrip
checkProp "Fast: Export->Import roundtrip" $ do
withMaxSuccess 100000 (dumpLoad F.exportBytes F.importBytes)
checkProp "Fast: Import->Export roundtrip" $ do
withMaxSuccess 10000 (loadDump F.importBytes F.exportBytes stripBytes)
checkProp "Fast: Vector Word <-> Atom roundtrip"
prop_fast_words_atom_roundtrip
checkProp "Fast matches reference: Vector Words -> Atom"
(withMaxSuccess 10000 prop_fast_words_atom_correct)
checkProp "Fast matches reference: Atom -> Vector Word"
(withMaxSuccess 10000 prop_fast_atom_words_correct)
checkProp "Fast matches reference: ByteString -> Atom"
(withMaxSuccess 10000 prop_fast_bytes_atom_correct)
checkProp "Fast matches reference: Atom -> ByteString"
(withMaxSuccess 10000 prop_fast_atom_bytes_correct)
checkProp "Fast matches reference: Atom Import"
(withMaxSuccess 10000 prop_fast_atom_import_correct)
checkProp "Fast matches reference: Atom Export"
(withMaxSuccess 10000 prop_fast_atom_export_correct)
res <- readIORef failed
when (res /= 0) $ do
putStrLn $ "FAILURE: " <> show res <> " tests failed."
exitWith (ExitFailure 1)
putStrLn $ "SUCCESS: All tests passed"
| jfranklin9000/urbit | pkg/hs/urbit-atom/test/Main.hs | mit | 5,857 | 0 | 13 | 1,018 | 1,404 | 700 | 704 | 115 | 2 |
{-# LANGUAGE CPP #-}
-- Copyright (c) 2010, Diego Souza
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
-- * Neither the name of the <ORGANIZATION> nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Test.Yql.Core.Types where
#define eq assertEqual (__FILE__ ++":"++ show __LINE__)
#define ok assertBool (__FILE__ ++":"++ show __LINE__)
import Data.Maybe
import qualified Data.Map as M
import Yql.Core.Types
import Yql.Core.LocalFunction
import Yql.Data.Xml
import Control.Monad
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit (assertBool, assertEqual)
eqM aM bM = do { a <- aM
; b <- bM
; eq a b
}
suite :: [Test]
suite = [ testGroup "Types.hs" [ test0
, test1
, test2
, test3
, test4
, test5
, test6
, test7
, test8
, test9
, test10
, test11
, test12
, test13
, test14
, test15
, test16
, test17
, test18
, test19
, test20
, test21
, test22
, test23
, test24
, test25
, test26
, test27
, test28
, test29
, test30
, test31
, test32
, test33
, test34
, test35
, test36
, test37
, test38
, test39
, test40
, test41
, test42
, test43
, test44
, test45
, test46
, test47
, test48
]
]
test0 = testCase "show select * without where produces correct stmt" $
eq "SELECT * FROM iyql;" (show $ SELECT ["*"] "iyql" Nothing Nothing Nothing [])
test1 = testCase "show select foo,bar without where produces correct stmt" $
eq "SELECT foo,bar FROM iyql;" (show $ SELECT ["foo","bar"] "iyql" Nothing Nothing Nothing [])
test2 = testCase "show select foo with single where clause [txt]" $
eq "SELECT foo FROM iyql WHERE foo = \"bar\";" (show $ SELECT ["foo"] "iyql" (Just $ "foo" `OpEq` TxtValue "bar") Nothing Nothing[])
test3 = testCase "show select pi with single where clause [num]" $
eq "SELECT pi FROM iyql WHERE pi = 3.14;" (show $ SELECT ["pi"] "iyql" (Just $ "pi" `OpEq` NumValue "3.14") Nothing Nothing [])
test4 = testCase "show select foo with single IN clause" $
eq "SELECT foo FROM iyql WHERE foo IN (\"b\",\"a\",\"r\");" (show $ SELECT ["foo"] "iyql" (Just $ "foo" `OpIn` [TxtValue "b",TxtValue "a",TxtValue "r"]) Nothing Nothing [])
test5 = testCase "show select with where expression with and/or" $
do eq "SELECT foo FROM iyql WHERE foo = \"bar\" OR bar = \"foo\" AND pi = 3.14;" (show $ SELECT ["foo"] "iyql" (Just $ ("foo" `OpEq` TxtValue "bar") `OpOr` ("bar" `OpEq` TxtValue "foo") `OpAnd` ("pi" `OpEq` NumValue "3.14")) Nothing Nothing [])
test6 = testCase "read string creates correct type" $
do eq (SELECT ["foo","bar"] "iyql" (Just $ ("pi" `OpEq` NumValue "3.14") `OpOr` ("foo" `OpIn` [TxtValue "b",TxtValue "a",TxtValue "r"])) Nothing Nothing []) (read "select foo,bar from iyql where pi=3.14 or foo in (\"b\",\"a\",\"r\");")
test7 = testCase "show select escapes strings" $
do eq "SELECT * FROM iyql WHERE foo = \"foo\\\"bar\";" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpEq` TxtValue "foo\"bar") Nothing Nothing [])
eq "SELECT * FROM iyql WHERE foo = \"foo'bar\";" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpEq` TxtValue "foo'bar") Nothing Nothing [])
test8 = testCase "show select with remote functions" $
do eq "SELECT * FROM iyql | iyql() | iyql(a=1) | iyql(a=1,b=\"2\");" (show $ SELECT ["*"] "iyql" Nothing Nothing Nothing [Remote "iyql" [],Remote "iyql" [("a",NumValue "1")],Remote "iyql" [("a",NumValue "1"),("b",TxtValue "2")]])
eq "SELECT * FROM iyql WHERE foo = \"bar\" | iyql() | iyql(a=1) | iyql(a=1,b=\"2\");" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpEq` TxtValue "bar") Nothing Nothing [Remote "iyql" [],Remote "iyql" [("a",NumValue "1")],Remote "iyql" [("a",NumValue "1"),("b",TxtValue "2")]])
test9 = testCase "show select with local functions" $
do eq "SELECT * FROM iyql | .iyql() | .iyql(a=1) | .iyql(a=1,b=\"2\");" (show $ SELECT ["*"] "iyql" Nothing Nothing Nothing [Local "iyql" [],Local "iyql" [("a",NumValue "1")],Local "iyql" [("a",NumValue "1"),("b",TxtValue "2")]])
eq "SELECT * FROM iyql WHERE foo = \"bar\" | .iyql() | .iyql(a=1) | .iyql(a=1,b=\"2\");" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpEq` TxtValue "bar") Nothing Nothing [Local "iyql" [],Local "iyql" [("a",NumValue "1")],Local "iyql" [("a",NumValue "1"),("b",TxtValue "2")]])
test10 = testCase "pipeline [execTransform] is in correct order" $
do { eqM (return $ ("bar"++).("foo"++) $ ">") (join $ fmap (flip execTransformM_ ">") (pipeline myLinker [Local "foo" [],Local "bar" []]))
; eqM (return $ ("bar"++).("foo"++) $ ">") (join $ fmap (flip execTransformM_ ">") (pipeline myLinker [Local "foobar1" []]))
; eqM (return $ ("bar"++).(++"foo") $ ">") (join $ fmap (flip execTransformM_ ">") (pipeline myLinker [Local "foobar2" []]))
; eqM (return $ (++"bar").(++"foo") $ ">") (join $ fmap (flip execTransformM_ ">") (pipeline myLinker [Local "foobar3" []]))
; eqM (return $ (++"bar").("foo"++) $ ">") (join $ fmap (flip execTransformM_ ">") (pipeline myLinker [Local "foobar4" []]))
}
where myLinker = M.fromList [ ("foo", Transform id (const ("foo"++)))
, ("bar", Transform id (const ("bar"++)))
, ("foobar1", Transform id (const ("foo"++)) `Seq` Transform id (const ("bar"++)))
, ("foobar2", Transform id (const (++"foo")) `Seq` Transform id (const ("bar"++)))
, ("foobar3", Transform id (const (++"foo")) `Seq` Transform id (const (++"bar")))
, ("foobar4", Transform id (const ("foo"++)) `Seq` Transform id (const (++"bar")))
]
test11 = testCase "pipeline generates error when function is not found" $
do ok (isNothing $ pipeline M.empty [Local "foo" []])
ok (isJust $ pipeline M.empty [])
test12 = testCase "ld [transform] is in correct order" $
do eqM (return $ ("bar"++).("foo"++) $ ">") (join $ fmap (flip execTransformM_ ">") (ld' myLinker (SELECT ["*"] "foobar" Nothing Nothing Nothing [Local "foo" [],Local "bar" []])))
where myLinker = M.fromList [ ("foo", Transform id (const ("foo"++)))
, ("bar", Transform id (const ("bar"++)))
]
test13 = testCase "ld generates error when function is not found" $
do ok (isNothing $ ld' M.empty (SELECT ["*"] "foobar" Nothing Nothing Nothing [Local "foo" []]))
ok (isJust $ ld' M.empty (SELECT ["*"] "foobar" Nothing Nothing Nothing []))
test14 = testCase "show desc produces correct result" $
do eq ("DESC foobar;") (show $ DESC "foobar" [])
eq ("DESC a;") (show $ DESC "a" [])
eq ("DESC a | .yql();") (show $ DESC "a" [Local "yql" []])
test15 = testCase "read desc statements produces correct type" $
do eq (DESC "foobar" []) (read "desc foobar;")
eq (DESC "foobar" [Local "tables" []]) (read "desc foobar | .tables();")
test16 = testCase "test ord implementation of security level [User > App > Any]" $
do ok (Any < App)
ok (Any < User)
ok (App < User)
ok (App > Any)
ok (User > App)
ok (User > Any)
ok (User /= App)
ok (User /= Any)
ok (User == User)
ok (App == App)
ok (Any == Any)
test17 = testCase "test readDescXml extracts attributes" $
do ok $ (Just (Table "meme.info" Any False)) == (join $ fmap readDescXml (xmlParse xml0))
ok $ (Just (Table "meme.info" App False)) == (join $ fmap readDescXml (xmlParse xml1))
ok $ (Just (Table "meme.info" User False)) == (join $ fmap readDescXml (xmlParse xml2))
ok $ (Just (Table "meme.info" Any True)) == (join $ fmap readDescXml (xmlParse xml3))
where xml0 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><query xmlns:yahoo=\"http://www.yahooapis.com/v1/base.rng\" yahoo:count=\"1\" yahoo:created=\"2010-08-09T04:08:39Z\" yahoo:lang=\"en-US\"> <results> <table name=\"meme.info\" security=\"ANY\"> <meta> <author>Yahoo! Inc.</author> <documentationURL>http://developer.yahoo.com/meme/</documentationURL> <sampleQuery>SELECT * FROM meme.info WHERE owner_guid=me</sampleQuery> </meta> <request> <select> <key name=\"owner_guid\" type=\"xs:string\"/> <key name=\"name\" type=\"xs:string\"/> </select> </request> </table> </results></query>"
xml1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><query xmlns:yahoo=\"http://www.yahooapis.com/v1/base.rng\" yahoo:count=\"1\" yahoo:created=\"2010-08-09T04:08:39Z\" yahoo:lang=\"en-US\"> <results> <table name=\"meme.info\" security=\"APP\"> <meta> <author>Yahoo! Inc.</author> <documentationURL>http://developer.yahoo.com/meme/</documentationURL> <sampleQuery>SELECT * FROM meme.info WHERE owner_guid=me</sampleQuery> </meta> <request> <select> <key name=\"owner_guid\" type=\"xs:string\"/> <key name=\"name\" type=\"xs:string\"/> </select> </request> </table> </results></query>"
xml2 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><query xmlns:yahoo=\"http://www.yahooapis.com/v1/base.rng\" yahoo:count=\"1\" yahoo:created=\"2010-08-09T04:08:39Z\" yahoo:lang=\"en-US\"> <results> <table name=\"meme.info\" security=\"USER\"> <meta> <author>Yahoo! Inc.</author> <documentationURL>http://developer.yahoo.com/meme/</documentationURL> <sampleQuery>SELECT * FROM meme.info WHERE owner_guid=me</sampleQuery> </meta> <request> <select> <key name=\"owner_guid\" type=\"xs:string\"/> <key name=\"name\" type=\"xs:string\"/> </select> </request> </table> </results></query>"
xml3 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><query xmlns:yahoo=\"http://www.yahooapis.com/v1/base.rng\" yahoo:count=\"1\" yahoo:created=\"2010-08-09T04:08:39Z\" yahoo:lang=\"en-US\"> <results> <table name=\"meme.info\" security=\"ANY\" https=\"true\"> <meta> <author>Yahoo! Inc.</author> <documentationURL>http://developer.yahoo.com/meme/</documentationURL> <sampleQuery>SELECT * FROM meme.info WHERE owner_guid=me</sampleQuery> </meta> <request> <select> <key name=\"owner_guid\" type=\"xs:string\"/> <key name=\"name\" type=\"xs:string\"/> </select> </request> </table> </results></query>"
test18 = testCase "show produces the correct stmt for updates" $
do eq ("UPDATE foobar SET foo=\"bar\";") (show $ UPDATE [("foo",TxtValue "bar")] "foobar" Nothing [])
eq ("UPDATE foobar SET f=0,o=2,o=3 WHERE a = 0 AND b = 1;") (show $ UPDATE [("f",NumValue "0"),("o",NumValue "2"),("o",NumValue "3")] "foobar" (Just $ ("a" `OpEq` NumValue "0") `OpAnd` ("b" `OpEq` NumValue "1")) [])
eq ("UPDATE foobar SET foo=\"bar\" WHERE bar = \"foo\" | .json();") (show $ UPDATE [("foo",TxtValue "bar")] "foobar" (Just $ "bar" `OpEq` TxtValue "foo") [Local "json" []])
test19 = testCase "read update statements produces the correct type" $
do eq (UPDATE [("foo",TxtValue "bar")] "foobar" Nothing []) (read "update foobar set foo='bar';")
eq (UPDATE [("foo",TxtValue "bar"),("bar",TxtValue "foo")] "foobar" (Just $ "guid" `OpEq` MeValue) []) (read "update foobar set foo='bar', bar='foo' where guid=me;")
eq (UPDATE [("foo",TxtValue "bar")] "foobar" (Just $ "guid" `OpEq` MeValue) [Local "json" []]) (read "update foobar set foo='bar' where guid=me | .json();")
test20 = testCase "update returns true for update stmts" $
do ok (update $ UPDATE [] "" Nothing [])
ok (update $ USE "" "" (UPDATE [] "" Nothing []))
test21 = testCase "read insert statements produces the correct type" $
do eq (INSERT [("foo",TxtValue "bar")] "foobar" []) (read "insert into foobar (foo) VALUES ('bar');")
eq (INSERT [("foo",NumValue "7")] "foobar" [Remote "iyql" []]) (read "insert into foobar (foo) values (7) | iyql();")
eq (INSERT [("a",TxtValue "0"),("b",TxtValue "1")] "foobar" []) (read "insert into foobar (a,b) values ('0','1');")
test22 = testCase "show produces the correct stmt for updates" $
do eq ("INSERT INTO foobar (foo) VALUES (\"bar\");") (show $ INSERT [("foo",TxtValue "bar")] "foobar" [])
eq ("INSERT INTO foobar (a,b) VALUES (0,1);") (show $ INSERT [("a",NumValue "0"),("b",NumValue "1")] "foobar" [])
test23 = testCase "show produces the correct stmt for deletes" $
do eq ("DELETE FROM foobar WHERE guid = me;") (show $ DELETE "foobar" (Just $ "guid" `OpEq` MeValue) [])
eq ("DELETE FROM foobar;") (show $ DELETE "foobar" Nothing [])
eq ("DELETE FROM foobar WHERE guid = me | .diagnostics();") (show $ DELETE "foobar" (Just $ "guid" `OpEq` MeValue) [Local "diagnostics" []])
test24 = testCase "read delete statements produces the correct type" $
do eq (DELETE "foobar" Nothing []) (read "delete from foobar;")
eq (DELETE "foobar" (Just $ "guid" `OpEq` MeValue) []) (read "delete from foobar where guid=me;")
eq (DELETE "foobar" (Just $ "guid" `OpEq` MeValue) [Local "diagnostics" []]) (read "delete from foobar where guid=me | .diagnostics();")
test25 = testCase "delete returns true for delete stmts" $
do ok (delete $ DELETE "" Nothing [])
ok (delete $ USE "" "" (DELETE "" Nothing []))
test26 = testCase "read show tables statements produces the correct type" $
do eq (SHOWTABLES []) (read "show tables;")
eq (SHOWTABLES [Local "iyql" []]) (read "show tables | .iyql();")
eq (SHOWTABLES [Local "iyql" [],Remote "iyql" []]) (read "show tables | .iyql() | iyql();")
test27 = testCase "show produces the correct stmt for show tables" $
do eq ("SHOW TABLES;") (show $ SHOWTABLES [])
eq ("SHOW TABLES | .iyql();") (show $ SHOWTABLES [Local "iyql" []])
test28 = testCase "show produces the correct stmt using local filter [like]" $
do eq "SELECT * FROM iyql WHERE foo LIKE \"iyql%\";" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpLike` TxtValue "iyql%") Nothing Nothing [])
eq "SELECT * FROM iyql WHERE foo NOT LIKE \"iyql%\";" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpNotLike` TxtValue "iyql%") Nothing Nothing [])
test29 = testCase "show produces the correct stmt using local filter [matches]" $
do eq "SELECT * FROM iyql WHERE foo MATCHES \"iyql%\";" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpMatches` TxtValue "iyql%") Nothing Nothing [])
eq "SELECT * FROM iyql WHERE foo NOT MATCHES \"iyql%\";" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpNotMatches` TxtValue "iyql%") Nothing Nothing [])
test30 = testCase "show produces the correct stmt using local filters [is null]" $
do eq "SELECT * FROM iyql WHERE foo IS NULL;" (show $ SELECT ["*"] "iyql" (Just $ OpIsNull "foo") Nothing Nothing [])
eq "SELECT * FROM iyql WHERE foo IS NOT NULL;" (show $ SELECT ["*"] "iyql" (Just $ OpIsNotNull "foo") Nothing Nothing [])
test31 = testCase "show produces the correct stmt using local filters [!=,<,>,<=,>=]" $
do eq "SELECT * FROM iyql WHERE foo != 7;" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpNe` NumValue "7") Nothing Nothing [])
eq "SELECT * FROM iyql WHERE foo > 7;" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpGt` NumValue "7") Nothing Nothing [])
eq "SELECT * FROM iyql WHERE foo < 7;" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpLt` NumValue "7") Nothing Nothing [])
eq "SELECT * FROM iyql WHERE foo >= 7;" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpGe` NumValue "7") Nothing Nothing [])
eq "SELECT * FROM iyql WHERE foo <= 7;" (show $ SELECT ["*"] "iyql" (Just $ "foo" `OpLe` NumValue "7") Nothing Nothing [])
test32 = testCase "read parses the correct statement using local filters [like]" $
do eq (SELECT ["*"] "iyql" (Just $ "foo" `OpLike` TxtValue "iyql%") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo LIKE \"iyql%\";")
eq (SELECT ["*"] "iyql" (Just $ "foo" `OpNotLike` TxtValue "iyql%") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo NOT LIKE \"iyql%\";" )
test33 = testCase "read parses the correct stmt using local filters [is null]" $
do eq (SELECT ["*"] "iyql" (Just $ OpIsNull "foo") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo IS NULL;")
eq (SELECT ["*"] "iyql" (Just $ OpIsNotNull "foo") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo IS NOT NULL;")
test34 = testCase "read parses the correct stmt using local filters [!=,<,>,<=,>=]" $
do eq (SELECT ["*"] "iyql" (Just $ "foo" `OpNe` NumValue "7") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo != 7;")
eq (SELECT ["*"] "iyql" (Just $ "foo" `OpGt` NumValue "7") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo > 7;")
eq (SELECT ["*"] "iyql" (Just $ "foo" `OpLt` NumValue "7") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo < 7;")
eq (SELECT ["*"] "iyql" (Just $ "foo" `OpGe` NumValue "7") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo >= 7;")
eq (SELECT ["*"] "iyql" (Just $ "foo" `OpLe` NumValue "7") Nothing Nothing []) (read "SELECT * FROM iyql WHERE foo <= 7;")
test35 = testCase "show functions with remote limits produces the proper stmt" $
do eq ("SELECT * FROM iyql (0,10);") (show $ SELECT ["*"] "iyql" Nothing (Just (0,10)) Nothing [])
test36 = testCase "show functions with local limits produces the proper stmt" $
do eq ("SELECT * FROM iyql LIMIT 10 OFFSET 0;") (show $ SELECT ["*"] "iyql" Nothing Nothing (Just (0,10)) [])
test37 = testCase "read parses the correct stmt using remote limits" $
do eq (SELECT ["*"] "iyql" Nothing (Just (0,10)) Nothing []) (read "SELECT * FROM iyql (0,10);")
eq (SELECT ["*"] "iyql" Nothing (Just (0,10)) Nothing []) (read "SELECT * FROM iyql (10);")
test38 = testCase "read parses the correct stmt using local limits" $
do eq (SELECT ["*"] "iyql" Nothing Nothing (Just (13,7)) []) (read "SELECT * FROM iyql LIMIT 7 OFFSET 13;")
eq (SELECT ["*"] "iyql" Nothing Nothing (Just (0,17)) []) (read "SELECT * FROM iyql LIMIT 17;");
test39 = testCase "read parses the correct stmt using remote limits" $
do eq (SELECT ["*"] "iyql" Nothing (Just (0,10)) Nothing []) (read "SELECT * FROM iyql(0,10);")
eq (SELECT ["*"] "iyql" Nothing (Just (0,17)) Nothing []) (read "SELECT * FROM iyql (17);")
test40 = testCase "read parses the correct stmt for use statements" $
do eq (USE "foobar" "fb" (SELECT ["*"] "iyql" Nothing Nothing Nothing [])) (read "USE \"foobar\" as fb; SELECT * FROM iyql;")
eq (USE "foobar" "fb" (UPDATE [("foo",TxtValue "bar")] "iyql" Nothing [])) (read "USE \"foobar\" as fb; UPDATE iyql set foo='bar';")
eq (USE "foobar" "fb" (INSERT [("foo",TxtValue "bar")] "foobar" [])) (read "USE \"foobar\" as fb; INSERT INTO foobar (foo) VALUES ('bar');")
eq (USE "foobar" "fb" (DELETE "foobar" Nothing [])) (read "USE \"foobar\" as fb; DELETE FROM foobar;")
eq (USE "foo" "f" (USE "bar" "b" (SELECT ["*"] "iyql" Nothing Nothing Nothing []))) (read "USE \"foo\" as f; USE \"bar\" as b; SELECT * FROM iyql;")
test41 = testCase "select returns true for select statements" $
do ok (select $ SELECT [] "foobar" Nothing Nothing Nothing [])
ok (select $ USE "" "" (SELECT [] "foobar" Nothing Nothing Nothing []))
test42 = testCase "insert returns true for insert statements" $
do ok (insert $ INSERT [] "" [])
ok (insert $ USE "" "" (INSERT [] "" []))
test43 = testCase "showTables returns true for show tables statements" $
do ok (showTables $ SHOWTABLES [])
ok (showTables $ USE "" "" (SHOWTABLES []))
test44 = testCase "desc returns true for desc statements" $
do ok (desc $ DESC "" [])
ok (desc $ USE "" "" (DESC "" []))
test45 = testCase "showStmt shows subselects properly" $
do eq ("SELECT * FROM iyql WHERE foo IN (SELECT id FROM iyql WHERE bar > 7);") (showStmt $ SELECT ["*"] "iyql" (Just $ "foo" `OpIn` [SubSelect $ SELECT ["id"] "iyql" (Just $ "bar" `OpGt` (NumValue "7")) Nothing Nothing []]) Nothing Nothing [])
test46 = testCase "read select with subqueries" $
do eq (SELECT ["*"] "iyql" (Just $ "foo" `OpIn` [SubSelect $ SELECT ["id"] "iyql" (Just $ "bar" `OpGt` (NumValue "7")) Nothing Nothing []]) Nothing Nothing []) (read "select * from iyql where foo in (select id from iyql where bar>7);")
test47 = testCase "readShowTablesXml reads all tables from xml" $
do eq (Just ["foo","bar","baz"]) (fmap (readShowTablesXml) (xmlParse "<query><results><table>foo</table><table>bar</table><table>baz</table></results></query>"))
test48 = testCase "usingMe is able to identify me identifiers in simple cases" $
do ok (usingMe (SELECT ["*"] "iyql" (Just $ "foo" `OpEq` MeValue) Nothing Nothing []))
test49 = testCase "usingMe is able to identify me identifier in complex cases" $
do ok (usingMe (SELECT ["*"] "iyql" (Just $ "foo" `OpIn` [SubSelect $ SELECT ["guid"] "iyql" (Just $ "bar" `OpEq` MeValue) Nothing Nothing []]) Nothing Nothing []))
| rasata/iyql | src/test/haskell/Test/Yql/Core/Types.hs | gpl-3.0 | 24,854 | 6 | 20 | 7,056 | 6,435 | 3,339 | 3,096 | 241 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.DescribeServiceErrors
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes AWS OpsWorks service errors.
--
-- Required Permissions: To use this action, an IAM user must have a Show,
-- Deploy, or Manage permissions level for the stack, or an attached policy that
-- explicitly grants permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing User Permissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_DescribeServiceErrors.html>
module Network.AWS.OpsWorks.DescribeServiceErrors
(
-- * Request
DescribeServiceErrors
-- ** Request constructor
, describeServiceErrors
-- ** Request lenses
, dseInstanceId
, dseServiceErrorIds
, dseStackId
-- * Response
, DescribeServiceErrorsResponse
-- ** Response constructor
, describeServiceErrorsResponse
-- ** Response lenses
, dserServiceErrors
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
data DescribeServiceErrors = DescribeServiceErrors
{ _dseInstanceId :: Maybe Text
, _dseServiceErrorIds :: List "ServiceErrorIds" Text
, _dseStackId :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeServiceErrors' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dseInstanceId' @::@ 'Maybe' 'Text'
--
-- * 'dseServiceErrorIds' @::@ ['Text']
--
-- * 'dseStackId' @::@ 'Maybe' 'Text'
--
describeServiceErrors :: DescribeServiceErrors
describeServiceErrors = DescribeServiceErrors
{ _dseStackId = Nothing
, _dseInstanceId = Nothing
, _dseServiceErrorIds = mempty
}
-- | The instance ID. If you use this parameter, 'DescribeServiceErrors' returns
-- descriptions of the errors associated with the specified instance.
dseInstanceId :: Lens' DescribeServiceErrors (Maybe Text)
dseInstanceId = lens _dseInstanceId (\s a -> s { _dseInstanceId = a })
-- | An array of service error IDs. If you use this parameter, 'DescribeServiceErrors' returns descriptions of the specified errors. Otherwise, it returns a
-- description of every error.
dseServiceErrorIds :: Lens' DescribeServiceErrors [Text]
dseServiceErrorIds =
lens _dseServiceErrorIds (\s a -> s { _dseServiceErrorIds = a })
. _List
-- | The stack ID. If you use this parameter, 'DescribeServiceErrors' returns
-- descriptions of the errors associated with the specified stack.
dseStackId :: Lens' DescribeServiceErrors (Maybe Text)
dseStackId = lens _dseStackId (\s a -> s { _dseStackId = a })
newtype DescribeServiceErrorsResponse = DescribeServiceErrorsResponse
{ _dserServiceErrors :: List "ServiceErrors" ServiceError'
} deriving (Eq, Read, Show, Monoid, Semigroup)
instance GHC.Exts.IsList DescribeServiceErrorsResponse where
type Item DescribeServiceErrorsResponse = ServiceError'
fromList = DescribeServiceErrorsResponse . GHC.Exts.fromList
toList = GHC.Exts.toList . _dserServiceErrors
-- | 'DescribeServiceErrorsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dserServiceErrors' @::@ ['ServiceError'']
--
describeServiceErrorsResponse :: DescribeServiceErrorsResponse
describeServiceErrorsResponse = DescribeServiceErrorsResponse
{ _dserServiceErrors = mempty
}
-- | An array of 'ServiceError' objects that describe the specified service errors.
dserServiceErrors :: Lens' DescribeServiceErrorsResponse [ServiceError']
dserServiceErrors =
lens _dserServiceErrors (\s a -> s { _dserServiceErrors = a })
. _List
instance ToPath DescribeServiceErrors where
toPath = const "/"
instance ToQuery DescribeServiceErrors where
toQuery = const mempty
instance ToHeaders DescribeServiceErrors
instance ToJSON DescribeServiceErrors where
toJSON DescribeServiceErrors{..} = object
[ "StackId" .= _dseStackId
, "InstanceId" .= _dseInstanceId
, "ServiceErrorIds" .= _dseServiceErrorIds
]
instance AWSRequest DescribeServiceErrors where
type Sv DescribeServiceErrors = OpsWorks
type Rs DescribeServiceErrors = DescribeServiceErrorsResponse
request = post "DescribeServiceErrors"
response = jsonResponse
instance FromJSON DescribeServiceErrorsResponse where
parseJSON = withObject "DescribeServiceErrorsResponse" $ \o -> DescribeServiceErrorsResponse
<$> o .:? "ServiceErrors" .!= mempty
| romanb/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/DescribeServiceErrors.hs | mpl-2.0 | 5,551 | 0 | 10 | 1,066 | 657 | 395 | 262 | 75 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
module Ntp.Util
( ntpPort
, WithAddrFamily (..)
, runWithAddrFamily
, getAddrFamily
, AddrFamily (..)
, Addresses
, Sockets
, resolveNtpHost
, sendPacket
, createAndBindSock
, udpLocalAddresses
, foldThese
, pairThese
, ntpTrace
, logDebug
, logInfo
, logWarning
, logError
) where
import Control.Exception (Exception, IOException, catch, throw)
import Control.Monad (void)
import Data.Bifunctor (Bifunctor (..))
import Data.Binary (encode)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as LBS
import Data.Foldable (traverse_)
import Data.List (find)
import Data.Semigroup (First (..), Last (..), Option (..),
Semigroup (..))
import Data.Text (Text)
import Data.These (These (..))
import Formatting (sformat, shown, (%))
import Network.Socket (AddrInfo,
AddrInfoFlag (AI_ADDRCONFIG, AI_PASSIVE),
Family (AF_INET, AF_INET6), PortNumber, SockAddr (..),
Socket, SocketOption (ReuseAddr), SocketType (Datagram),
addrAddress, addrFamily, addrFlags, addrSocketType)
import qualified Network.Socket as Socket
import qualified Network.Socket.ByteString as Socket.ByteString (sendTo)
import Ntp.Packet (NtpPacket)
import Pos.Util.Trace (Trace, traceWith, wlogTrace)
import qualified Pos.Util.Wlog as Wlog
ntpTrace :: Trace IO (Wlog.Severity, Text)
ntpTrace = wlogTrace "NtpClient"
logWarning :: Text -> IO ()
logWarning msg = traceWith ntpTrace (Wlog.Warning, msg)
logInfo :: Text -> IO ()
logInfo msg = traceWith ntpTrace (Wlog.Info, msg)
logDebug :: Text -> IO ()
logDebug msg = traceWith ntpTrace (Wlog.Debug, msg)
logError :: Text -> IO ()
logError msg = traceWith ntpTrace (Wlog.Error, msg)
data AddrFamily = IPv4 | IPv6
deriving Show
-- |
-- Newtype wrapper which tags the type with either IPv4 or IPv6 phantom type.
data WithAddrFamily (t :: AddrFamily) a where
WithIPv6 :: a -> WithAddrFamily 'IPv6 a
WithIPv4 :: a -> WithAddrFamily 'IPv4 a
instance Show a => Show (WithAddrFamily t a) where
show a = show (getAddrFamily a) ++ " " ++ show (runWithAddrFamily a)
instance Eq a => Eq (WithAddrFamily t a) where
a == b = runWithAddrFamily a == runWithAddrFamily b
instance Functor (WithAddrFamily t) where
fmap f (WithIPv6 a) = WithIPv6 (f a)
fmap f (WithIPv4 a) = WithIPv4 (f a)
runWithAddrFamily :: WithAddrFamily t a -> a
runWithAddrFamily (WithIPv6 a) = a
runWithAddrFamily (WithIPv4 a) = a
getAddrFamily :: WithAddrFamily t a -> AddrFamily
getAddrFamily (WithIPv6 _) = IPv6
getAddrFamily (WithIPv4 _) = IPv4
-- |
-- Note that the composition of `foldThese . bimap f g` is a proof that
-- @'These a b@ is the [free
-- product](https://en.wikipedia.org/wiki/Free_product) of two semigroups @a@
-- and @b@.
foldThese
:: Semigroup a
=> These a a
-> a
foldThese (This a) = a
foldThese (That a) = a
foldThese (These a1 a2) = a1 <> a2
pairThese
:: These a b
-> These x y
-> Maybe (These (a, x) (b, y))
pairThese (These a b) (These x y) = Just $ These (a, x) (b, y)
pairThese (This a) (This x) = Just $ This (a, x)
pairThese (These a _) (This x) = Just $ This (a, x)
pairThese (This a) (These x _) = Just $ This (a, x)
pairThese (That b) (That y) = Just $ That (b, y)
pairThese (These _ b) (That y) = Just $ That (b, y)
pairThese (That b) (These _ y) = Just $ That (b, y)
pairThese _ _ = Nothing
-- |
-- Store created sockets. If system supports IPv6 and IPv4 we create socket for
-- IPv4 and IPv6. Otherwise only one.
type Sockets = These
(Last (WithAddrFamily 'IPv6 Socket))
(Last (WithAddrFamily 'IPv4 Socket))
-- |
-- A counter part of @'Ntp.Client.Sockets'@ data type.
type Addresses = These
(First (WithAddrFamily 'IPv6 SockAddr))
(First (WithAddrFamily 'IPv4 SockAddr))
ntpPort :: PortNumber
ntpPort = 123
-- |
-- Returns a list of alternatives. At most of length two,
-- at most one ipv6 / ipv4 address.
resolveHost :: String -> IO (Maybe Addresses)
resolveHost host = do
let hints = Socket.defaultHints
{ addrSocketType = Datagram
, addrFlags = [AI_ADDRCONFIG] -- since we use @AF_INET@ family
}
-- TBD why catch here? Why not let @'resolveHost'@ throw the exception?
addrInfos <- Socket.getAddrInfo (Just hints) (Just host) Nothing
`catch` (\(e :: IOException) -> logError (sformat ("getAddrInfo error: "%shown) e) >> return [])
let maddr = getOption $ foldMap fn addrInfos
case maddr of
Nothing ->
logWarning $ sformat ("Host "%shown%" is not resolved") host
Just addr ->
let g :: First (WithAddrFamily t SockAddr) -> [SockAddr]
g (First a) = [runWithAddrFamily a]
addrs :: [SockAddr]
addrs = foldThese . bimap g g $ addr
in logInfo $ sformat ("Host "%shown%" is resolved: "%shown)
host addrs
return maddr
where
-- Return supported address: one ipv6 and one ipv4 address.
fn :: AddrInfo -> Option Addresses
fn addr = case Socket.addrFamily addr of
Socket.AF_INET6 ->
Option $ Just $ This $ First $ (WithIPv6 $ Socket.addrAddress addr)
Socket.AF_INET ->
Option $ Just $ That $ First $ (WithIPv4 $ Socket.addrAddress addr)
_ -> mempty
resolveNtpHost :: String -> IO (Maybe Addresses)
resolveNtpHost host = do
addr <- resolveHost host
return $ fmap (bimap adjustPort adjustPort) addr
where
adjustPort :: First (WithAddrFamily t SockAddr) -> First (WithAddrFamily t SockAddr)
adjustPort = fmap $ fmap (replacePort ntpPort)
replacePort :: PortNumber -> SockAddr -> SockAddr
replacePort port (SockAddrInet _ host) = SockAddrInet port host
replacePort port (SockAddrInet6 _ flow host scope) = SockAddrInet6 port flow host scope
replacePort _ sockAddr = sockAddr
createAndBindSock
:: AddrFamily
-- ^ indicates which socket family to create, either @AF_INET6@ or @AF_INET@
-> [AddrInfo]
-- ^ list of local addresses
-> IO (Maybe Sockets)
createAndBindSock addressFamily addrs =
traverse createDo (selectAddr addrs)
where
selectAddr :: [AddrInfo] -> Maybe AddrInfo
selectAddr = find $ \addr ->
case addressFamily of
IPv6 -> addrFamily addr == AF_INET6
IPv4 -> addrFamily addr == AF_INET
createDo addr = do
sock <- Socket.socket (addrFamily addr) Datagram Socket.defaultProtocol
Socket.setSocketOption sock ReuseAddr 1
Socket.bind sock (addrAddress addr)
logInfo $
sformat ("Created socket (family/addr): "%shown%"/"%shown)
(addrFamily addr) (addrAddress addr)
case addressFamily of
IPv6 -> return $ This $ Last $ (WithIPv6 sock)
IPv4 -> return $ That $ Last $ (WithIPv4 sock)
udpLocalAddresses :: IO [AddrInfo]
udpLocalAddresses = do
let hints = Socket.defaultHints
{ addrFlags = [AI_PASSIVE]
, addrSocketType = Datagram }
#if MIN_VERSION_network(2,8,0)
port = Socket.defaultPort
#else
port = Socket.aNY_PORT
#endif
-- Hints Host Service
Socket.getAddrInfo (Just hints) Nothing (Just $ show port)
data SendToException
= NoMatchingSocket
| SendToIOException AddrFamily IOException
deriving Show
instance Exception SendToException
-- |
-- Send a request to @addr :: Addresses@ using @sock :: Sockets@.
sendTo
:: Sockets
-- ^ sockets to use
-> ByteString
-> Addresses
-- ^ addresses to send to
-> IO ()
sendTo sock bs addr = case fmap (foldThese . bimap fn fn) $ pairThese sock addr of
Just io -> io
Nothing -> throw NoMatchingSocket
where
fn :: ( Last (WithAddrFamily t Socket)
, First (WithAddrFamily t SockAddr)
)
-> IO ()
fn (Last sock_, First addr_) =
void (Socket.ByteString.sendTo (runWithAddrFamily sock_) bs (runWithAddrFamily addr_))
`catch` handleIOException (getAddrFamily addr_)
handleIOException :: AddrFamily -> IOException -> IO ()
handleIOException addressFamily e = throw (SendToIOException addressFamily e)
-- |
-- Low level primitive which sends a request to a single NTP server.
sendPacket
:: Sockets
-> NtpPacket
-> [Addresses]
-> IO ()
sendPacket sock packet addrs = do
let bs = LBS.toStrict $ encode $ packet
traverse_
(\addr ->
(sendTo sock bs addr)
`catch` handleSendToException addr
)
addrs
where
handleSendToException :: Addresses -> SendToException -> IO ()
handleSendToException addr e@NoMatchingSocket =
logError $ sformat
("sendPacket SendToException: "%shown%" "%shown%": "%shown) addr sock e
handleSendToException addr (SendToIOException addressFamily ioerr) = do
logError $ sformat
("sendPacket IOError: "%shown%" "%shown%": "%shown) addr sock ioerr
case (addr, addressFamily) of
-- try to send the packet to the other address in case the current
-- system does not support IPv4/6.
(These _ r, IPv6) -> do
logDebug $ sformat ("sendPacket re-sending using: "%shown) (runWithAddrFamily $ getFirst r)
sendPacket sock packet [That r]
(These l _, IPv4) -> do
logDebug $ sformat ("sendPacket re-sending using: "%shown) (runWithAddrFamily $ getFirst l)
sendPacket sock packet [This l]
_ ->
logDebug "sendPacket: not retrying"
| input-output-hk/cardano-sl | networking/src/Ntp/Util.hs | apache-2.0 | 10,248 | 0 | 18 | 2,895 | 2,906 | 1,537 | 1,369 | 217 | 4 |
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2012-2013
-- License : BSD2
-- Maintainer: Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Ermine.Parser.Keywords where
import Data.HashSet
import Data.Monoid
-- | This is the set of keywords that can only occur at the beginning of the line for auto-completion purposes.
startingKeywords :: HashSet String
startingKeywords = fromList
[ "abstract"
, "class"
, "data"
, "database"
, "export"
, "field"
, "foreign"
, "import"
, "instance"
, "private"
, "type"
]
-- | This is the set of keywords that can occur anywhere on the line for auto-completion purposes.
otherKeywords :: HashSet String
otherKeywords = fromList
[ "case"
, "constraint"
, "constructor"
, "do"
, "exists"
, "forall"
, "hole"
, "in"
, "infix"
, "infixl"
, "infixr"
, "let"
, "of"
, "phi"
, "postfix"
, "prefix"
, "rho"
, "subtype"
, "table"
, "where"
, "_"
, "Γ"
, "ρ"
, "φ"
]
-- | The set of all keywords.
--
-- @'keywords' = 'startingKeywords' '<>' 'otherKeywords'@
keywords :: HashSet String
keywords = startingKeywords <> otherKeywords
| PipocaQuemada/ermine | src/Ermine/Parser/Keywords.hs | bsd-2-clause | 1,333 | 0 | 6 | 270 | 185 | 120 | 65 | 44 | 1 |
module SubHask.Compatibility.HyperLogLog
where
import SubHask.Algebra
import SubHask.Category
import SubHask.Internal.Prelude
import qualified Data.HyperLogLog as H
import qualified Data.Reflection as R
import qualified Data.Semigroup as S
import qualified Prelude as P
-- FIXME: move the below imports to separate compatibility layers
import qualified Data.Bytes.Serial as S
import qualified Data.Approximate as A
import qualified Control.Lens as L
type instance Scalar Int64 = Int64
newtype HyperLogLog p a = H (H.HyperLogLog p)
mkMutable [t| forall p a. HyperLogLog p a |]
type instance Scalar (HyperLogLog p a) = Integer -- FIXME: make Int64
type instance Logic (HyperLogLog p a) = Bool
type instance Elem (HyperLogLog p a) = a
instance Semigroup (HyperLogLog p a) where
(H h1)+(H h2) = H $ h1 S.<> h2
instance Abelian (HyperLogLog p a)
instance
( R.Reifies p Integer
) => Normed (HyperLogLog p a)
where
size (H h) = P.fromIntegral $ L.view A.estimate (H.size h)
instance
( R.Reifies p Integer
, S.Serial a
) => Constructible (HyperLogLog p a)
where
cons a (H h) = H $ H.insert a h
| Drezil/subhask | src/SubHask/Compatibility/HyperLogLog.hs | bsd-3-clause | 1,150 | 0 | 10 | 236 | 366 | 208 | 158 | -1 | -1 |
-------------------------------------------------------------------------------------
-- |
-- Copyright : (c) Hans Hoglund 2012
--
-- License : BSD-style
--
-- Maintainer : hans@hanshoglund.se
-- Stability : experimental
-- Portability : portable
--
-- A Haskell representation of MusicXML.
-- You may want to use the "Data.Music.MusicXml.Simple" module to generate the representation.
--
-- For an introduction to MusicXML, see <http://www.makemusic.com/musicxml/tutorial>.
--
-------------------------------------------------------------------------------------
module Data.Music.MusicXml (
-- * Score
Score(..),
ScoreHeader(..),
Identification(..),
Creator(..),
Defaults(..),
ScoreAttrs(..),
PartAttrs(..),
MeasureAttrs(..),
-- ** Part list
PartList(..),
PartListElem(..),
-- * Music
Music(..),
MusicElem(..),
-- ** Attributes
Attributes(..),
TimeSignature(..),
ClefSign(..),
-- ** Notes
Note(..),
FullNote(..),
IsChord,
noChord,
Tie(..),
noTies,
NoteProps(..),
HasNoteProps(..),
-- ** Notations
Notation(..),
Articulation(..),
Ornament(..),
Technical(..),
-- ** Directions
Direction(..),
-- ** Lyrics
Lyric(..),
-- * Basic types
-- ** Pitch
Pitch(..),
DisplayPitch(..),
PitchClass,
Semitones(..),
noSemitones,
Octaves(..),
Fifths(..),
Line(..),
Mode(..),
Accidental(..),
-- ** Time
Duration(..),
NoteType(..),
Divs(..),
NoteVal(..),
NoteSize(..),
Beat(..),
BeatType(..),
-- ** Dynamics
Dynamics,
-----------------------------------------------------------------------------
-- ** Misc
StemDirection(..),
NoteHead(..),
LineType(..),
Level(..),
BeamType(..),
StartStop(..),
StartStopChange(..),
StartStopContinue(..),
StartStopContinueChange(..),
-----------------------------------------------------------------------------
-- * Import and export functions
-----------------------------------------------------------------------------
toXml,
showXml
) where
import Text.XML.Light hiding (Line)
import Data.Music.MusicXml.Score
import Data.Music.MusicXml.Time
import Data.Music.MusicXml.Pitch
import Data.Music.MusicXml.Dynamics
import Data.Music.MusicXml.Read
import Data.Music.MusicXml.Write
import Data.Music.MusicXml.Write.Score
-- --------------------------------------------------------------------------------
-- Import and export functions
-- --------------------------------------------------------------------------------
-- |
-- Render a score as a MusicXML string.
showXml :: Score -> String
showXml = ppTopElement . toXml
-- |
-- Render a score as MusicXML.
toXml :: Score -> Element
toXml = fromSingle . write
-- --------------------------------------------------------------------------------
fromSingle :: [a] -> a
fromSingle [x] = x
fromSingle _ = error "fromSingle: non-single list"
| music-suite/musicxml2 | src/Data/Music/MusicXml.hs | bsd-3-clause | 3,371 | 0 | 6 | 922 | 556 | 395 | 161 | 74 | 1 |
module Database.DSH.VSL.VirtSegAlgebra where
| ulricha/dsh | src/Database/DSH/VSL/VirtSegAlgebra.hs | bsd-3-clause | 45 | 0 | 3 | 3 | 8 | 6 | 2 | 1 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.