code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
module Tests.Control.Monad(tests) where
import Test.HUnitPlus.Base
import qualified Tests.Control.Monad.Symbols as Symbols
tests :: Test
tests = "Monad" ~: [Symbols.tests]
| emc2/compiler-misc | test/Tests/Control/Monad.hs | bsd-3-clause | 1,732 | 0 | 7 | 289 | 76 | 60 | 16 | 5 | 1 |
-- |
-- Module : Foundation.VFS.FilePath
-- License : BSD-style
-- Maintainer : foundation
-- Stability : experimental
-- Portability : portable
--
-- # Opaque implementation for FilePath
--
-- The underlying type of a FilePath is a `Foundation.ByteArray`. It is indeed like
-- this because for some systems (Unix systems) a `FilePath` is a null
-- terminated array of bytes.
--
-- # FilePath and FileName for type checking validation
--
-- In order to add some constraint at compile time, it is not possible to
-- append (`</>`) a `FilePath` to another `FilePath`.
-- You can only append (`</>`) a `FileName` to a given `FilePath`.
--
{-# LANGUAGE CPP #-}
module Foundation.VFS.FilePath
( FilePath
, Relativity(..)
, FileName
-- * conversion
, filePathToString
, filePathToLString
-- ** unsafe
, unsafeFilePath
, unsafeFileName
, extension
) where
import Basement.Compat.Base
import Basement.Compat.Semigroup
import Foundation.Collection
import Foundation.Array
import Foundation.String (Encoding(..), ValidationFailure, toBytes, fromBytes, String)
import Foundation.VFS.Path(Path(..))
import qualified Data.List
-- ------------------------------------------------------------------------- --
-- System related helpers --
-- ------------------------------------------------------------------------- --
#ifdef mingw32_HOST_OS
pathSeparatorWINC :: Char
pathSeparatorWINC = '\\'
-- | define the Path separator for Windows systems : '\\'
pathSeparatorWIN :: String
pathSeparatorWIN = fromString [pathSeparatorWINC]
#else
pathSeparatorPOSIXC :: Char
pathSeparatorPOSIXC = '/'
-- | define the Path separator for POSIX systems : '/'
pathSeparatorPOSIX :: String
pathSeparatorPOSIX = fromString [pathSeparatorPOSIXC]
#endif
pathSeparatorC :: Char
pathSeparator :: String
#ifdef mingw32_HOST_OS
pathSeparatorC = pathSeparatorWINC
pathSeparator = pathSeparatorWIN
#else
pathSeparatorC = pathSeparatorPOSIXC
pathSeparator = pathSeparatorPOSIX
#endif
-- ------------------------------------------------------------------------- --
-- FilePath --
-- ------------------------------------------------------------------------- --
-- | information about type of FilePath
--
-- A file path being only `Relative` or `Absolute`.
data Relativity = Absolute | Relative
deriving (Eq, Show)
-- | FilePath is a collection of FileName
--
-- TODO: Eq and Ord are implemented using Show
-- This is not very efficient and would need to be improved
-- Also, it is possible the ordering is not necessary what we want
-- in this case.
--
-- A FilePath is one of the following:
--
-- * An Absolute:
-- * starts with one of the follwing "/"
-- * A relative:
-- * don't start with a "/"
--
-- * authorised:
-- * "/"
-- * "/file/path"
-- * "."
-- * ".."
-- * "work/haskell/hs-foundation"
--
-- * unauthorised
-- * "path//"
data FilePath = FilePath Relativity [FileName]
instance Show FilePath where
show = filePathToLString
instance Eq FilePath where
(==) a b = (==) (show a) (show b)
instance Ord FilePath where
compare a b = compare (show a) (show b)
-- | error associated to filepath manipulation
data FilePath_Invalid
= ContiguousPathSeparator
-- ^ this mean there were 2 contiguous path separators.
--
-- This is not valid in Foundation's FilePath specifications
deriving (Typeable, Show)
instance Exception FilePath_Invalid
instance IsString FilePath where
fromString [] = FilePath Absolute mempty
fromString s@(x:xs)
| hasContigueSeparators s = throw ContiguousPathSeparator
| otherwise = FilePath relativity $ case relativity of
Absolute -> fromString <$> splitOn isSeparator xs
Relative -> fromString <$> splitOn isSeparator s
where
relativity :: Relativity
relativity = if isSeparator x then Absolute else Relative
-- | A filename (or path entity) in the FilePath
--
-- * Authorised
-- * ""
-- * "."
-- * ".."
-- * "foundation"
-- * Unauthorised
-- * "/"
-- * "file/"
-- * "/file"
-- * "file/path"
--
data FileName = FileName (UArray Word8)
deriving (Eq)
-- | errors related to FileName manipulation
data FileName_Invalid
= ContainsNullByte
-- ^ this means a null byte was found in the FileName
| ContainsSeparator
-- ^ this means a path separator was found in the FileName
| EncodingError ValidationFailure
-- ^ encoding error
| UnknownTrailingBytes (UArray Word8)
-- ^ some unknown trainling bytes found
deriving (Typeable, Show)
instance Exception FileName_Invalid
instance Show FileName where
show = fileNameToLString
instance IsString FileName where
fromString [] = FileName mempty
fromString xs | hasNullByte xs = throw ContainsNullByte
| hasSeparator xs = throw ContainsSeparator
| otherwise = FileName $ toBytes UTF8 $ fromString xs
hasNullByte :: [Char] -> Bool
hasNullByte = Data.List.elem '\0'
hasSeparator :: [Char] -> Bool
hasSeparator = Data.List.elem pathSeparatorC
isSeparator :: Char -> Bool
isSeparator = (==) pathSeparatorC
hasContigueSeparators :: [Char] -> Bool
hasContigueSeparators [] = False
hasContigueSeparators [_] = False
hasContigueSeparators (x1:x2:xs) =
(isSeparator x1 && x1 == x2) || hasContigueSeparators xs
instance Semigroup FileName where
(<>) (FileName a) (FileName b) = FileName $ a `mappend` b
instance Monoid FileName where
mempty = FileName mempty
mappend (FileName a) (FileName b) = FileName $ a `mappend` b
instance Path FilePath where
type PathEnt FilePath = FileName
type PathPrefix FilePath = Relativity
type PathSuffix FilePath = ()
(</>) = join
splitPath (FilePath r xs) = (r, xs, ())
buildPath (r, xs , _) = FilePath r xs
-- compare to the original </>, this type disallow to be able to append an absolute filepath to a filepath
join :: FilePath -> FileName -> FilePath
join p (FileName x) | null x = p
join (FilePath r xs) x = FilePath r $ snoc xs x
filePathToString :: FilePath -> String
filePathToString (FilePath Absolute []) = fromString [pathSeparatorC]
filePathToString (FilePath Relative []) = fromString "."
filePathToString (FilePath Absolute fns) = cons pathSeparatorC $ filenameIntercalate fns
filePathToString (FilePath Relative fns) = filenameIntercalate fns
filenameIntercalate :: [FileName] -> String
filenameIntercalate = mconcat . Data.List.intersperse pathSeparator . fmap fileNameToString
-- | convert a FileName into a String
--
-- This function may throw an exception associated to the encoding
fileNameToString :: FileName -> String
fileNameToString (FileName fp) =
-- FIXME probably incorrect considering windows.
-- this is just to get going to be able to be able to reuse System.IO functions which
-- works on [Char]
case fromBytes UTF8 fp of
(s, Nothing, bs)
| null bs -> s
| otherwise -> throw $ UnknownTrailingBytes bs
(_, Just err, _) -> throw $ EncodingError err
-- | conversion of FileName into a list of Char
--
-- this function may throw exceptions
fileNameToLString :: FileName -> [Char]
fileNameToLString = toList . fileNameToString
-- | conversion of a FilePath into a list of Char
--
-- this function may throw exceptions
filePathToLString :: FilePath -> [Char]
filePathToLString = toList . filePathToString
-- | build a file path from a given list of filename
--
-- this is unsafe and is mainly needed for testing purpose
unsafeFilePath :: Relativity -> [FileName] -> FilePath
unsafeFilePath = FilePath
-- | build a file name from a given ByteArray
--
-- this is unsafe and is mainly needed for testing purpose
unsafeFileName :: UArray Word8 -> FileName
unsafeFileName = FileName
extension :: FileName -> Maybe FileName
extension (FileName fn) = case splitOn (\c -> c == 0x2E) fn of
[] -> Nothing
[_] -> Nothing
xs -> Just $ FileName $ last $ nonEmpty_ xs
| vincenthz/hs-foundation | foundation/Foundation/VFS/FilePath.hs | bsd-3-clause | 8,201 | 0 | 12 | 1,812 | 1,469 | 824 | 645 | -1 | -1 |
{-# LANGUAGE RecordWildCards, ViewPatterns #-}
module Development.Bake.Pretty(ovenPretty, ovenPrettyMerge, Pretty(..)) where
import Development.Bake.Core.Type
import Data.List.Extra
data Pretty a = Pretty String a deriving (Read,Show,Eq)
instance Stringy a => Stringy (Pretty a) where
stringyTo (Pretty a b) = a ++ "=" ++ stringyTo b
stringyFrom s = case breakOn "=" s of
(a,_:b) -> Pretty a $ stringyFrom b
_ -> Pretty "" $ stringyFrom s
stringyPretty (Pretty a b) = a ++ "=" ++ stringyPretty b
-- | Define an oven that allows @foo=...@ annotations to be added to the strings.
-- These can be used to annotate important information, e.g. instead of talking about
-- Git SHA1's, you can talk about @person=SHA1@ or @branch=SHA1@.
ovenPretty :: Oven state patch test -> Oven state (Pretty patch) test
ovenPretty oven@Oven{..} = oven
{ovenUpdate = \s ps -> ovenUpdate s (map unpretty ps)
,ovenPrepare = \s ps -> ovenPrepare s (map unpretty ps)
,ovenPatchExtra = \s p -> ovenPatchExtra s (fmap unpretty p)
,ovenSupersede = \p1 p2 -> ovenSupersede (unpretty p1) (unpretty p2)
}
where
unpretty :: Pretty a -> a
unpretty (Pretty _ x) = x
-- | An oven suitable for use with 'ovenPretty' that supersedes patches which have the same
-- pretty name.
ovenPrettyMerge :: Oven state (Pretty patch) test -> Oven state (Pretty patch) test
ovenPrettyMerge oven = oven
{ovenSupersede = \(Pretty p1 _) (Pretty p2 _) -> p1 == p2
}
| Pitometsu/bake | src/Development/Bake/Pretty.hs | bsd-3-clause | 1,502 | 0 | 10 | 323 | 466 | 246 | 220 | 22 | 1 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Home where
import Import as I
import Data.Time
import Data.List as I (isPrefixOf)
import Text.Blaze.Html (preEscapedToHtml)
import Text.Blaze.Html.Renderer.String (renderHtml)
import Yesod.Auth (requireAuthId)
import Yesod.Auth.HashDB (setSaltAndPasswordHash)
import Data.Digest.Pure.SHA (sha1, showDigest)
import Data.Text as T (append, pack, unpack)
import Data.ByteString.Lazy.Char8 as BS (pack)
import Data.Maybe
-- This is a handler function for the GET request method on the HomeR
-- resource pattern. All of your resource patterns are defined in
-- config/routes
--
-- The majority of the code you will write in Yesod lives in these handler
-- functions. You can spread them across multiple files if you are so
-- inclined, or create a single monolithic file.
getHomeR :: Handler Html
getHomeR = do
articles <- runDB $ selectList [ArticlePromoteHeadline ==. True, ArticleApproved ==. True] [Desc ArticleId]
users <- sequence $ fmap (\x -> articleAuthorName x) articles
let zippedArticles = I.zip articles users
defaultLayout $ do
aDomId <- newIdent
setTitle "乃村研究室ホームページ"
$(widgetFile "homepage")
getChangePassR :: Handler Html
getChangePassR = do
userId <- requireAuthId
user <- runDB $ get404 userId
defaultLayout $ do
aDomId <- newIdent
setTitle "パスワード変更"
$(widgetFile "changePass")
postChangePassR :: Handler Html
postChangePassR = do
userId <- requireAuthId
user <- runDB $ get404 userId
let salt = userSalt user
inputPassword <- runInputPost $ ireq textField "password"
runDB $ do
update userId
[ UserPassword =. (Just $ saltedHash salt inputPassword) ]
setMessage $ toHtml $ (userIdent user) <> " is updated."
redirect $ HomeR
-- local functions --
articleAuthorName :: Entity Article -> Handler (Maybe User)
articleAuthorName (Entity _ article) = do
runDB $ get (articleUser article)
displayAuthorName :: Maybe User -> Text
displayAuthorName (Just user) = userIdent user
displayAuthorName Nothing = "Unknown user"
takeHeadLine :: Html -> Html
takeHeadLine content = preEscapedToHtml $ prettyHeadLine $ renderHtml content
prettyHeadLine :: String -> String
prettyHeadLine article = gsub "_br_" "<br>" $ stripTags $ gsub "<br>" "_br_" $ foldArticle article
stripTags :: [Char] -> [Char]
stripTags str = stripTags' False str
stripTags' :: Bool -> [Char] -> [Char]
stripTags' bool (x:xs)
| xs == [] = if x == '>'
then []
else [x]
| bool == True = if x == '>'
then stripTags' False xs
else stripTags' True xs
| bool == False = if x == '<'
then stripTags' True xs
else x : (stripTags' False xs)
| otherwise = [] -- maybe don't occur
gsub :: Eq a => [a] -> [a] -> [a] -> [a]
gsub _ _ [] = []
gsub x y str@(s:ss)
| I.isPrefixOf x str = y ++ gsub x y (drop (length x) str)
| otherwise = s:gsub x y ss
foldArticle :: String -> String
foldArticle content = case foldAtFolding content of
Just value -> value
Nothing -> I.unlines $ I.take defaultNumOfLines $ I.lines content
foldAtFolding :: String -> Maybe String
foldAtFolding content = if (I.length splitContent) > 1
then Just $ I.head splitContent
else Nothing
where splitContent = split "<!-- folding -->" content
defaultNumOfLines :: Int
defaultNumOfLines = 3
numOfNewArticles :: Int
numOfNewArticles = 3
-- We want to import Data.List.Utils (split), but...
split :: Eq a => [a] -> [a] -> [[a]]
split _ [] = []
split delim str =
let (firstline, remainder) = breakList (startswith delim) str
in
firstline : case remainder of
[] -> []
x -> if x == delim
then [] : []
else split delim
(drop (length delim) x)
startswith :: Eq a => [a] -> [a] -> Bool
startswith = isPrefixOf
breakList :: ([a] -> Bool) -> [a] -> ([a], [a])
breakList func = spanList (not . func)
spanList :: ([a] -> Bool) -> [a] -> ([a], [a])
spanList _ [] = ([],[])
spanList func list@(x:xs) =
if func list
then (x:ys,zs)
else ([],list)
where (ys,zs) = spanList func xs
saltedHash :: Text -> Text -> Text
saltedHash salt = T.pack . showDigest . sha1 . BS.pack . T.unpack . T.append salt
| kobayashi1027/nomnichi-haskell | Handler/Home.hs | bsd-2-clause | 4,641 | 0 | 16 | 1,278 | 1,474 | 767 | 707 | 106 | 4 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Encode/Decode/Hash Add-on</title>
<maps>
<homeID>encoder</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/encoder/src/main/javahelp/org/zaproxy/addon/encoder/resources/help_ko_KR/helpset_ko_KR.hs | apache-2.0 | 974 | 77 | 69 | 156 | 419 | 212 | 207 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1994-1998
\section[UniqSet]{Specialised sets, for things with @Uniques@}
Based on @UniqFMs@ (as you would expect).
Basically, the things need to be in class @Uniquable@.
-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
module UniqSet (
-- * Unique set type
UniqSet, -- type synonym for UniqFM a
getUniqSet,
pprUniqSet,
-- ** Manipulating these sets
emptyUniqSet,
unitUniqSet,
mkUniqSet,
addOneToUniqSet, addListToUniqSet,
delOneFromUniqSet, delOneFromUniqSet_Directly, delListFromUniqSet,
delListFromUniqSet_Directly,
unionUniqSets, unionManyUniqSets,
minusUniqSet, uniqSetMinusUFM,
intersectUniqSets,
restrictUniqSetToUFM,
uniqSetAny, uniqSetAll,
elementOfUniqSet,
elemUniqSet_Directly,
filterUniqSet,
filterUniqSet_Directly,
sizeUniqSet,
isEmptyUniqSet,
lookupUniqSet,
lookupUniqSet_Directly,
partitionUniqSet,
mapUniqSet,
unsafeUFMToUniqSet,
nonDetEltsUniqSet,
nonDetKeysUniqSet,
nonDetFoldUniqSet,
nonDetFoldUniqSet_Directly
) where
import GhcPrelude
import UniqFM
import Unique
import Data.Coerce
import Outputable
import Data.Foldable (foldl')
import Data.Data
import qualified Data.Semigroup as Semi
-- Note [UniqSet invariant]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
-- UniqSet has the following invariant:
-- The keys in the map are the uniques of the values
-- It means that to implement mapUniqSet you have to update
-- both the keys and the values.
newtype UniqSet a = UniqSet {getUniqSet' :: UniqFM a}
deriving (Data, Semi.Semigroup, Monoid)
emptyUniqSet :: UniqSet a
emptyUniqSet = UniqSet emptyUFM
unitUniqSet :: Uniquable a => a -> UniqSet a
unitUniqSet x = UniqSet $ unitUFM x x
mkUniqSet :: Uniquable a => [a] -> UniqSet a
mkUniqSet = foldl' addOneToUniqSet emptyUniqSet
addOneToUniqSet :: Uniquable a => UniqSet a -> a -> UniqSet a
addOneToUniqSet (UniqSet set) x = UniqSet (addToUFM set x x)
addListToUniqSet :: Uniquable a => UniqSet a -> [a] -> UniqSet a
addListToUniqSet = foldl' addOneToUniqSet
delOneFromUniqSet :: Uniquable a => UniqSet a -> a -> UniqSet a
delOneFromUniqSet (UniqSet s) a = UniqSet (delFromUFM s a)
delOneFromUniqSet_Directly :: UniqSet a -> Unique -> UniqSet a
delOneFromUniqSet_Directly (UniqSet s) u = UniqSet (delFromUFM_Directly s u)
delListFromUniqSet :: Uniquable a => UniqSet a -> [a] -> UniqSet a
delListFromUniqSet (UniqSet s) l = UniqSet (delListFromUFM s l)
delListFromUniqSet_Directly :: UniqSet a -> [Unique] -> UniqSet a
delListFromUniqSet_Directly (UniqSet s) l =
UniqSet (delListFromUFM_Directly s l)
unionUniqSets :: UniqSet a -> UniqSet a -> UniqSet a
unionUniqSets (UniqSet s) (UniqSet t) = UniqSet (plusUFM s t)
unionManyUniqSets :: [UniqSet a] -> UniqSet a
unionManyUniqSets = foldl' (flip unionUniqSets) emptyUniqSet
minusUniqSet :: UniqSet a -> UniqSet a -> UniqSet a
minusUniqSet (UniqSet s) (UniqSet t) = UniqSet (minusUFM s t)
intersectUniqSets :: UniqSet a -> UniqSet a -> UniqSet a
intersectUniqSets (UniqSet s) (UniqSet t) = UniqSet (intersectUFM s t)
restrictUniqSetToUFM :: UniqSet a -> UniqFM b -> UniqSet a
restrictUniqSetToUFM (UniqSet s) m = UniqSet (intersectUFM s m)
uniqSetMinusUFM :: UniqSet a -> UniqFM b -> UniqSet a
uniqSetMinusUFM (UniqSet s) t = UniqSet (minusUFM s t)
elementOfUniqSet :: Uniquable a => a -> UniqSet a -> Bool
elementOfUniqSet a (UniqSet s) = elemUFM a s
elemUniqSet_Directly :: Unique -> UniqSet a -> Bool
elemUniqSet_Directly a (UniqSet s) = elemUFM_Directly a s
filterUniqSet :: (a -> Bool) -> UniqSet a -> UniqSet a
filterUniqSet p (UniqSet s) = UniqSet (filterUFM p s)
filterUniqSet_Directly :: (Unique -> elt -> Bool) -> UniqSet elt -> UniqSet elt
filterUniqSet_Directly f (UniqSet s) = UniqSet (filterUFM_Directly f s)
partitionUniqSet :: (a -> Bool) -> UniqSet a -> (UniqSet a, UniqSet a)
partitionUniqSet p (UniqSet s) = coerce (partitionUFM p s)
uniqSetAny :: (a -> Bool) -> UniqSet a -> Bool
uniqSetAny p (UniqSet s) = anyUFM p s
uniqSetAll :: (a -> Bool) -> UniqSet a -> Bool
uniqSetAll p (UniqSet s) = allUFM p s
sizeUniqSet :: UniqSet a -> Int
sizeUniqSet (UniqSet s) = sizeUFM s
isEmptyUniqSet :: UniqSet a -> Bool
isEmptyUniqSet (UniqSet s) = isNullUFM s
lookupUniqSet :: Uniquable a => UniqSet b -> a -> Maybe b
lookupUniqSet (UniqSet s) k = lookupUFM s k
lookupUniqSet_Directly :: UniqSet a -> Unique -> Maybe a
lookupUniqSet_Directly (UniqSet s) k = lookupUFM_Directly s k
-- See Note [Deterministic UniqFM] to learn about nondeterminism.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetEltsUniqSet :: UniqSet elt -> [elt]
nonDetEltsUniqSet = nonDetEltsUFM . getUniqSet'
-- See Note [Deterministic UniqFM] to learn about nondeterminism.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetKeysUniqSet :: UniqSet elt -> [Unique]
nonDetKeysUniqSet = nonDetKeysUFM . getUniqSet'
-- See Note [Deterministic UniqFM] to learn about nondeterminism.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetFoldUniqSet :: (elt -> a -> a) -> a -> UniqSet elt -> a
nonDetFoldUniqSet c n (UniqSet s) = nonDetFoldUFM c n s
-- See Note [Deterministic UniqFM] to learn about nondeterminism.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetFoldUniqSet_Directly:: (Unique -> elt -> a -> a) -> a -> UniqSet elt -> a
nonDetFoldUniqSet_Directly f n (UniqSet s) = nonDetFoldUFM_Directly f n s
-- See Note [UniqSet invariant]
mapUniqSet :: Uniquable b => (a -> b) -> UniqSet a -> UniqSet b
mapUniqSet f = mkUniqSet . map f . nonDetEltsUniqSet
-- Two 'UniqSet's are considered equal if they contain the same
-- uniques.
instance Eq (UniqSet a) where
UniqSet a == UniqSet b = equalKeysUFM a b
getUniqSet :: UniqSet a -> UniqFM a
getUniqSet = getUniqSet'
-- | 'unsafeUFMToUniqSet' converts a @'UniqFM' a@ into a @'UniqSet' a@
-- assuming, without checking, that it maps each 'Unique' to a value
-- that has that 'Unique'. See Note [UniqSet invariant].
unsafeUFMToUniqSet :: UniqFM a -> UniqSet a
unsafeUFMToUniqSet = UniqSet
instance Outputable a => Outputable (UniqSet a) where
ppr = pprUniqSet ppr
pprUniqSet :: (a -> SDoc) -> UniqSet a -> SDoc
pprUniqSet f (UniqSet s) = pprUniqFM f s
| shlevy/ghc | compiler/utils/UniqSet.hs | bsd-3-clause | 6,618 | 0 | 9 | 1,274 | 1,743 | 892 | 851 | 115 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# Language TypeFamilies #-}
{-# Language DeriveGeneric #-}
module T8479 where
import GHC.Generics
import Data.Kind (Type)
class Blah (a :: Type -> Type) where
type F a :: Type -> Type
data Foo (f :: Type -> Type) a = MkFoo ((F f) a) deriving Generic1
| sdiehl/ghc | testsuite/tests/generics/T8479.hs | bsd-3-clause | 285 | 0 | 10 | 56 | 88 | 53 | 35 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
----------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.GroupNavigation
-- Copyright : (c) nzeh@cs.dal.ca
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : nzeh@cs.dal.ca
-- Stability : unstable
-- Portability : unportable
--
-- Provides methods for cycling through groups of windows across
-- workspaces, ignoring windows that do not belong to this group. A
-- group consists of all windows matching a user-provided boolean
-- query.
--
-- Also provides a method for jumping back to the most recently used
-- window in any given group.
--
----------------------------------------------------------------------
module XMonad.Actions.GroupNavigation ( -- * Usage
-- $usage
Direction (..)
, nextMatch
, nextMatchOrDo
, nextMatchWithThis
, historyHook
) where
import Control.Monad.Reader
import Data.Foldable as Fold
import Data.Map as Map
import Data.Sequence as Seq
import Data.Set as Set
import Graphics.X11.Types
import Prelude hiding (concatMap, drop, elem, filter, null, reverse)
import XMonad.Core
import XMonad.ManageHook
import XMonad.Operations (windows, withFocused)
import qualified XMonad.StackSet as SS
import qualified XMonad.Util.ExtensibleState as XS
{- $usage
Import the module into your @~\/.xmonad\/xmonad.hs@:
> import XMonad.Actions.GroupNavigation
To support cycling forward and backward through all xterm windows, add
something like this to your keybindings:
> , ((modm , xK_t), nextMatch Forward (className =? "XTerm"))
> , ((modm .|. shiftMask, xK_t), nextMatch Backward (className =? "XTerm"))
These key combinations do nothing if there is no xterm window open.
If you rather want to open a new xterm window if there is no open
xterm window, use 'nextMatchOrDo' instead:
> , ((modm , xK_t), nextMatchOrDo Forward (className =? "XTerm") (spawn "xterm"))
> , ((modm .|. shiftMask, xK_t), nextMatchOrDo Backward (className =? "XTerm") (spawn "xterm"))
You can use 'nextMatchWithThis' with an arbitrary query to cycle
through all windows for which this query returns the same value as the
current window. For example, to cycle through all windows in the same
window class as the current window use:
> , ((modm , xK_f), nextMatchWithThis Forward className)
> , ((modm , xK_b), nextMatchWithThis Backward className)
Finally, you can define keybindings to jump to the most recent window
matching a certain Boolean query. To do this, you need to add
'historyHook' to your logHook:
> main = xmonad $ def { logHook = historyHook }
Then the following keybindings, for example, allow you to return to
the most recent xterm or emacs window or to simply to the most recent
window:
> , ((modm .|. controlMask, xK_e), nextMatch History (className =? "Emacs"))
> , ((modm .|. controlMask, xK_t), nextMatch History (className =? "XTerm"))
> , ((modm , xK_BackSpace), nextMatch History (return True))
Again, you can use 'nextMatchOrDo' instead of 'nextMatch' if you want
to execute an action if no window matching the query exists. -}
--- Basic cyclic navigation based on queries -------------------------
-- | The direction in which to look for the next match
data Direction = Forward -- ^ Forward from current window or workspace
| Backward -- ^ Backward from current window or workspace
| History -- ^ Backward in history
-- | Focuses the next window for which the given query produces the
-- same result as the currently focused window. Does nothing if there
-- is no focused window (i.e., the current workspace is empty).
nextMatchWithThis :: Eq a => Direction -> Query a -> X ()
nextMatchWithThis dir qry = withFocused $ \win -> do
prop <- runQuery qry win
nextMatch dir (qry =? prop)
-- | Focuses the next window that matches the given boolean query.
-- Does nothing if there is no such window. This is the same as
-- 'nextMatchOrDo' with alternate action @return ()@.
nextMatch :: Direction -> Query Bool -> X ()
nextMatch dir qry = nextMatchOrDo dir qry (return ())
-- | Focuses the next window that matches the given boolean query. If
-- there is no such window, perform the given action instead.
nextMatchOrDo :: Direction -> Query Bool -> X () -> X ()
nextMatchOrDo dir qry act = orderedWindowList dir
>>= focusNextMatchOrDo qry act
-- Produces the action to perform depending on whether there's a
-- matching window
focusNextMatchOrDo :: Query Bool -> X () -> Seq Window -> X ()
focusNextMatchOrDo qry act = findM (runQuery qry)
>=> maybe act (windows . SS.focusWindow)
-- Returns the list of windows ordered by workspace as specified in
-- ~/.xmonad/xmonad.hs
orderedWindowList :: Direction -> X (Seq Window)
orderedWindowList History = liftM (\(HistoryDB w ws) -> maybe ws (ws |>) w) XS.get
orderedWindowList dir = withWindowSet $ \ss -> do
wsids <- asks (Seq.fromList . workspaces . config)
let wspcs = orderedWorkspaceList ss wsids
wins = dirfun dir
$ Fold.foldl' (><) Seq.empty
$ fmap (Seq.fromList . SS.integrate' . SS.stack) wspcs
cur = SS.peek ss
return $ maybe wins (rotfun wins) cur
where
dirfun Backward = Seq.reverse
dirfun _ = id
rotfun wins x = rotate $ rotateTo (== x) wins
-- Returns the ordered workspace list as specified in ~/.xmonad/xmonad.hs
orderedWorkspaceList :: WindowSet -> Seq String -> Seq WindowSpace
orderedWorkspaceList ss wsids = rotateTo isCurWS wspcs'
where
wspcs = SS.workspaces ss
wspcsMap = Fold.foldl' (\m ws -> Map.insert (SS.tag ws) ws m) Map.empty wspcs
wspcs' = fmap (\wsid -> wspcsMap ! wsid) wsids
isCurWS ws = SS.tag ws == SS.tag (SS.workspace $ SS.current ss)
--- History navigation, requires a layout modifier -------------------
-- The state extension that holds the history information
data HistoryDB = HistoryDB (Maybe Window) -- currently focused window
(Seq Window) -- previously focused windows
deriving (Read, Show, Typeable)
instance ExtensionClass HistoryDB where
initialValue = HistoryDB Nothing Seq.empty
extensionType = PersistentExtension
-- | Action that needs to be executed as a logHook to maintain the
-- focus history of all windows as the WindowSet changes.
historyHook :: X ()
historyHook = XS.get >>= updateHistory >>= XS.put
-- Updates the history in response to a WindowSet change
updateHistory :: HistoryDB -> X HistoryDB
updateHistory (HistoryDB oldcur oldhist) = withWindowSet $ \ss -> do
let newcur = SS.peek ss
wins = Set.fromList $ SS.allWindows ss
newhist = flt (flip Set.member wins) (ins oldcur oldhist)
return $ HistoryDB newcur (del newcur newhist)
where
ins x xs = maybe xs (<| xs) x
del x xs = maybe xs (\x' -> flt (/= x') xs) x
--- Two replacements for Seq.filter and Seq.breakl available only in
--- containers-0.3.0.0, which only ships with ghc 6.12. Once we
--- decide to no longer support ghc < 6.12, these should be replaced
--- with Seq.filter and Seq.breakl.
flt :: (a -> Bool) -> Seq a -> Seq a
flt p = Fold.foldl (\xs x -> if p x then xs |> x else xs) Seq.empty
brkl :: (a -> Bool) -> Seq a -> (Seq a, Seq a)
brkl p xs = flip Seq.splitAt xs
$ snd
$ Fold.foldr (\x (i, j) -> if p x then (i-1, i-1) else (i-1, j)) (l, l) xs
where
l = Seq.length xs
--- Some sequence helpers --------------------------------------------
-- Rotates the sequence by one position
rotate :: Seq a -> Seq a
rotate xs = rotate' (viewl xs)
where
rotate' EmptyL = Seq.empty
rotate' (x' :< xs') = xs' |> x'
-- Rotates the sequence until an element matching the given condition
-- is at the beginning of the sequence.
rotateTo :: (a -> Bool) -> Seq a -> Seq a
rotateTo cond xs = let (lxs, rxs) = brkl cond xs in rxs >< lxs
--- A monadic find ---------------------------------------------------
-- Applies the given action to every sequence element in turn until
-- the first element is found for which the action returns true. The
-- remaining elements in the sequence are ignored.
findM :: Monad m => (a -> m Bool) -> Seq a -> m (Maybe a)
findM cond xs = findM' cond (viewl xs)
where
findM' _ EmptyL = return Nothing
findM' qry (x' :< xs') = do
isMatch <- qry x'
if isMatch
then return (Just x')
else findM qry xs'
| pjones/xmonad-test | vendor/xmonad-contrib/XMonad/Actions/GroupNavigation.hs | bsd-2-clause | 8,762 | 0 | 17 | 2,082 | 1,556 | 830 | 726 | 90 | 3 |
module Distribution.Client.Dependency.Modular.Index where
import Data.List as L
import Data.Map as M
import Prelude hiding (pi)
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.Tree
-- | An index contains information about package instances. This is a nested
-- dictionary. Package names are mapped to instances, which in turn is mapped
-- to info.
type Index = Map PN (Map I PInfo)
-- | Info associated with a package instance.
-- Currently, dependencies, flags, encapsulations and failure reasons.
-- Packages that have a failure reason recorded for them are disabled
-- globally, for reasons external to the solver. We currently use this
-- for shadowing which essentially is a GHC limitation, and for
-- installed packages that are broken.
data PInfo = PInfo (FlaggedDeps PN) FlagInfo Encaps (Maybe FailReason)
deriving (Show)
-- | Encapsulations. A list of package names.
type Encaps = [PN]
mkIndex :: [(PN, I, PInfo)] -> Index
mkIndex xs = M.map M.fromList (groupMap (L.map (\ (pn, i, pi) -> (pn, (i, pi))) xs))
groupMap :: Ord a => [(a, b)] -> Map a [b]
groupMap xs = M.fromListWith (flip (++)) (L.map (\ (x, y) -> (x, [y])) xs)
| DavidAlphaFox/ghc | libraries/Cabal/cabal-install/Distribution/Client/Dependency/Modular/Index.hs | bsd-3-clause | 1,304 | 0 | 13 | 198 | 308 | 189 | 119 | 16 | 1 |
module Test13 where
f ((x : xs)) = x : xs
g = f (1 : [1, 2])
| kmate/HaRe | old/testing/refacFunDef/Test13_AstOut.hs | bsd-3-clause | 64 | 0 | 8 | 21 | 47 | 27 | 20 | 3 | 1 |
-- |Simple vectorised constructors and projections.
--
module Vectorise.Vect
( Vect, VVar, VExpr, VBind
, vectorised
, lifted
, mapVect
, vVarType
, vNonRec
, vRec
, vVar
, vType
, vTick
, vLet
, vLams
, vVarApps
, vCaseDEFAULT
)
where
import CoreSyn
import Type ( Type )
import Var
-- |Contains the vectorised and lifted versions of some thing.
--
type Vect a = (a,a)
type VVar = Vect Var
type VExpr = Vect CoreExpr
type VBind = Vect CoreBind
-- |Get the vectorised version of a thing.
--
vectorised :: Vect a -> a
vectorised = fst
-- |Get the lifted version of a thing.
--
lifted :: Vect a -> a
lifted = snd
-- |Apply some function to both the vectorised and lifted versions of a thing.
--
mapVect :: (a -> b) -> Vect a -> Vect b
mapVect f (x, y) = (f x, f y)
-- |Combine vectorised and lifted versions of two things componentwise.
--
zipWithVect :: (a -> b -> c) -> Vect a -> Vect b -> Vect c
zipWithVect f (x1, y1) (x2, y2) = (f x1 x2, f y1 y2)
-- |Get the type of a vectorised variable.
--
vVarType :: VVar -> Type
vVarType = varType . vectorised
-- |Wrap a vectorised variable as a vectorised expression.
--
vVar :: VVar -> VExpr
vVar = mapVect Var
-- |Wrap a vectorised type as a vectorised expression.
--
vType :: Type -> VExpr
vType ty = (Type ty, Type ty)
-- |Make a vectorised note.
--
vTick :: Tickish Id -> VExpr -> VExpr
vTick = mapVect . Tick
-- |Make a vectorised non-recursive binding.
--
vNonRec :: VVar -> VExpr -> VBind
vNonRec = zipWithVect NonRec
-- |Make a vectorised recursive binding.
--
vRec :: [VVar] -> [VExpr] -> VBind
vRec vs es = (Rec (zip vvs ves), Rec (zip lvs les))
where
(vvs, lvs) = unzip vs
(ves, les) = unzip es
-- |Make a vectorised let expresion.
--
vLet :: VBind -> VExpr -> VExpr
vLet = zipWithVect Let
-- |Make a vectorised lambda abstraction.
--
-- The lifted version also binds the lifting context 'lc'.
--
vLams :: Var -- ^ Var bound to the lifting context.
-> [VVar] -- ^ Parameter vars for the abstraction.
-> VExpr -- ^ Body of the abstraction.
-> VExpr
vLams lc vs (ve, le)
= (mkLams vvs ve, mkLams (lc:lvs) le)
where
(vvs, lvs) = unzip vs
-- |Apply an expression to a set of argument variables.
--
-- The lifted version is also applied to the variable of the lifting context.
--
vVarApps :: Var -> VExpr -> [VVar] -> VExpr
vVarApps lc (ve, le) vvs
= (ve `mkVarApps` vs, le `mkVarApps` (lc : ls))
where
(vs, ls) = unzip vvs
vCaseDEFAULT :: VExpr -- scrutiniy
-> VVar -- bnder
-> Type -- type of vectorised version
-> Type -- type of lifted version
-> VExpr -- body of alternative.
-> VExpr
vCaseDEFAULT (vscrut, lscrut) (vbndr, lbndr) vty lty (vbody, lbody)
= (Case vscrut vbndr vty (mkDEFAULT vbody),
Case lscrut lbndr lty (mkDEFAULT lbody))
where
mkDEFAULT e = [(DEFAULT, [], e)]
| oldmanmike/ghc | compiler/vectorise/Vectorise/Vect.hs | bsd-3-clause | 2,935 | 0 | 9 | 729 | 817 | 469 | 348 | 67 | 1 |
{-# LANGUAGE CPP #-}
module X86.RegInfo (
mkVirtualReg,
regDotColor
)
where
#include "nativeGen/NCG.h"
#include "HsVersions.h"
import Size
import Reg
import Outputable
import Platform
import Unique
import UniqFM
import X86.Regs
mkVirtualReg :: Unique -> Size -> VirtualReg
mkVirtualReg u size
= case size of
FF32 -> VirtualRegSSE u
FF64 -> VirtualRegSSE u
FF80 -> VirtualRegD u
_other -> VirtualRegI u
regDotColor :: Platform -> RealReg -> SDoc
regDotColor platform reg
= let Just str = lookupUFM (regColors platform) reg
in text str
regColors :: Platform -> UniqFM [Char]
regColors platform = listToUFM (normalRegColors platform ++ fpRegColors)
normalRegColors :: Platform -> [(Reg,String)]
normalRegColors platform
| target32Bit platform = [ (eax, "#00ff00")
, (ebx, "#0000ff")
, (ecx, "#00ffff")
, (edx, "#0080ff") ]
| otherwise = [ (rax, "#00ff00"), (eax, "#00ff00")
, (rbx, "#0000ff"), (ebx, "#0000ff")
, (rcx, "#00ffff"), (ecx, "#00ffff")
, (rdx, "#0080ff"), (edx, "#00ffff")
, (r8, "#00ff80")
, (r9, "#008080")
, (r10, "#0040ff")
, (r11, "#00ff40")
, (r12, "#008040")
, (r13, "#004080")
, (r14, "#004040")
, (r15, "#002080") ]
fpRegColors :: [(Reg,String)]
fpRegColors =
[ (fake0, "#ff00ff")
, (fake1, "#ff00aa")
, (fake2, "#aa00ff")
, (fake3, "#aa00aa")
, (fake4, "#ff0055")
, (fake5, "#5500ff") ]
++ zip (map regSingle [24..39]) (repeat "red")
| forked-upstream-packages-for-ghcjs/ghc | compiler/nativeGen/X86/RegInfo.hs | bsd-3-clause | 1,869 | 0 | 11 | 714 | 516 | 303 | 213 | 51 | 4 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE FlexibleInstances #-}
{- With "hugs -98 +o test.hs" gives me:
ERROR "test.hs":8 - Cannot justify constraints in instance member binding
*** Expression : fromStr
*** Type : FromStr [a] => String -> [a]
*** Given context : FromStr [a]
*** Constraints : FromStr [a]
Adding the constraint "FromStr a" to the declaration of fromStr fixes
the problem, but that seems like it should be redundant. Removing the
second instance (lines 10-11) also fixes the problem, interestingly enough.
/Bjorn Bringert -}
-- August 08: on reflection I think a complaint about overlapping
-- instances for line 8 is absolutely right, so I've changed this to
-- expected-failure
-- Sept 08: on further reflection (!) I'm changing it back
-- See Note [Subtle interaction of recursion and overlap]
-- in TcInstDcls
module ShouldCompile where
class FromStr a where
fromStr :: String -> a
typeError :: FromStr a => a -> a
typeError t = error "type error"
instance {-# OVERLAPPABLE #-} FromStr [a] where
fromStr _ = typeError undefined -- line 8
instance {-# OVERLAPPING #-} FromStr [(String,a)] where -- line 10
fromStr _ = typeError undefined -- line 11
| urbanslug/ghc | testsuite/tests/typecheck/should_compile/tc176.hs | bsd-3-clause | 1,272 | 0 | 7 | 276 | 113 | 65 | 48 | 11 | 1 |
{-# LANGUAGE ExistentialQuantification, TemplateHaskell #-}
module Light.Primitive
( Primitive, primitive, primitiveShape, primitiveMaterial
, Material(..)
)
where
import Light.Shape
import Light.Geometry.Transform
data Material = Material deriving (Eq, Show, Read)
data Primitive = Primitive { primitiveShape :: ShapeBox
, primitiveMaterial :: Material
}
deriving (Show)
primitive :: (Shape s, Transformable s, Show s) => s -> Material -> Primitive
primitive s = Primitive (shapeBox s)
instance Shape Primitive where
shapeTransform = shapeTransform . primitiveShape
bound = bound . primitiveShape
worldBound = worldBound . primitiveShape
surfaceArea = surfaceArea . primitiveShape
intersects r s = intersects r (primitiveShape s)
intersect r s = intersect r (primitiveShape s)
| jtdubs/Light | src/Light/Primitive.hs | mit | 908 | 0 | 8 | 233 | 226 | 125 | 101 | 19 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.DelayNode
(js_getDelayTime, getDelayTime, DelayNode, castToDelayNode,
gTypeDelayNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"delayTime\"]"
js_getDelayTime :: JSRef DelayNode -> IO (JSRef AudioParam)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DelayNode.delayTime Mozilla DelayNode.delayTime documentation>
getDelayTime :: (MonadIO m) => DelayNode -> m (Maybe AudioParam)
getDelayTime self
= liftIO ((js_getDelayTime (unDelayNode self)) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/DelayNode.hs | mit | 1,351 | 6 | 11 | 156 | 377 | 238 | 139 | 23 | 1 |
--------------------------------------------------------------------------------
-- |
-- | Module : Data
-- | Copyright : (c) Vladimir Lopatin 2014
-- | License : BSD3
-- |
-- | Maintainer : Vladimir Lopatin <madjestic13@gmail.com>
-- | Stability : experimental
-- | Portability : untested
-- |
-- | The NGL library works, by dumping a vertex array into OpenGL buffer
-- |
-- | basic shapes types should be of 2 kinds:
-- | Shapes positioned by center
-- | Shapes' positioned by bottom-left corner--
--------------------------------------------------------------------------------
module NGL.Shape where
import Graphics.Rendering.OpenGL (Vertex2(..))
import NGL.Utils
data Shape = Circle Point Radius Divisions
| Square Point Side
| Rect Point Point
| Line Point Point Float -- | Ordered pair to store directionality
| Triangle Point Point Point
| Quad [Point] -- | BL vertex TR vertex
| Polygon [Point] -- | [Triangle] ?
| Polyline [Point] Float
| Curve [Point]
deriving Show
data Transform = Rotate2D Float Point
| Translate2D Point Point
deriving Show
type Picture =[Vertex2 Float]
type Point =(Float, Float)
type Radius = Float
type Side = Float
type Divisions = Int
toVertex :: [[Point]] -> Picture
toVertex xs = map vertex $ concat xs
vertex :: Point -> Vertex2 Float
vertex p = (\(k,l) -> Vertex2 k l) p
rotate :: Float -> [(Float, Float)] -> [(Float, Float)]
rotate theta = rotate2D' (toRadians theta)
shape :: Shape -> [Point]
shape (Square pos side) = square pos side
shape (Circle pos rad divs) = circle pos rad divs
shape (Rect bl tr) = rect bl tr -- | bl := bottom left, tr := top right
shape (Line p1 p2 w) = line p1 p2 w
shape (Polyline ps w) = polyline ps w
shape (Triangle p1 p2 p3) = triangle p1 p2 p3
polyline :: [Point] -> Float -> [Point]
polyline ps w = concatMap (\(x,y) -> line x y w) $ pairs $ abbcca ps
triangle :: Point -> Point -> Point -> [Point]
triangle p1 p2 p3 = [p1, p2, p3]
square :: Point -> Float -> [Point]
square pos side = [p1, p2, p3,
p1, p3, p4]
where
x = fst pos
y = snd pos
r = side/2
p1 = (x + r, y + r)
p2 = (x - r, y + r)
p3 = (x - r, y - r)
p4 = (x + r, y - r)
abbcca :: [a] -> [a]
abbcca (x:xs) = [x] ++ (concat $ map (\(x,y) -> [x,y]) $ map (\x -> (x, x)) (init xs)) ++ [last xs]
circle :: Point -> Float -> Int -> [Point]
circle pos r divs =
let
x = fst pos
y = snd pos
divs' = fromIntegral divs
sines = map ((y +).(r *).sin) [0.0, 2*pi/divs' .. 2*pi]
cosines = map ((x +).(r *).cos) [0.0, 2*pi/divs' .. 2*pi]
in
concat $ insertpos $ abbcca $ zip sines cosines
where
insertpos (x:y:[]) = [[pos,x,y]]
insertpos (x:y:xs) = [pos,x,y] : insertpos xs
rect :: Point -> Point -> [Point]
rect (x1,y1) (x2,y2) = [(x2,y2),(x1,y2),(x1,y1),
(x2,y2),(x1,y1),(x2,y1)]
line :: Point -> Point -> Float -> [Point]
line (x1,y1) (x2,y2) w = map (addVectors (x1,y1)) $ rotate2D' theta $ rect (0.0,-w/2) (len,w/2) -- rotation is wrong
where
(x,y) = normalize $ ((x2-x1),(y2-y1))
theta = signum y * acos x -- | angle in radians
len = sqrt((x2-x1)^2+ (y2-y1)^2)
| ublubu/zombieapaperclypse | NGL/Shape.hs | mit | 3,609 | 2 | 13 | 1,170 | 1,348 | 760 | 588 | 69 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- | This module provides functionality to manipulate raw transaction. It
-- automatically interprets transactions using the `bitcoin-tx` package, so
-- you can work with actual 'Btc.Transaction' objects rather than their
-- serialized format.
module Network.Bitcoin.Api.Transaction where
import Data.Aeson
import Data.Aeson.Lens
import Data.Maybe (fromMaybe, catMaybes)
import Control.Lens ((^.), (^?))
import qualified Data.Base58String as B58S
import qualified Data.Bitcoin.Block as Btc hiding (encode, decode)
import qualified Data.Bitcoin.Transaction as Btc
import qualified Data.Bitcoin.Types as BT
import qualified Network.Bitcoin.Api.Blockchain as Blockchain
import qualified Network.Bitcoin.Api.Internal as I
import qualified Network.Bitcoin.Api.Types as T
import Network.Bitcoin.Api.Types.UnspentTransaction hiding (confirmations)
-- | Creates a new transaction, but does not sign or submit it yet. You provide
-- a set of unspent transactions that you have the authority to spend, and you
-- provide a destination for all your bitcoins.
--
-- __WARNING: Check your math!__ If the sum of the Btc in unspent transactions
-- of your request is more than the sum of the Btc in the destinations, this
-- will be the miner's fee. It is reasonable to leave a small amount for the
-- miners, but if there is a large discrepancy between input and output, there
-- are no guarantees you will be warned.
--
-- All this function does is create a default script on how to spend coins from
-- one or more inputs to one or more outputs. Checking and verifying the
-- transaction will only happen when you actually submit the transaction to
-- the network.
create :: T.Client -- ^ The client session we are using
-> [UnspentTransaction] -- ^ The inputs we are using for this transaction
-> [(BT.Address, BT.Btc)] -- ^ A key/value pair which associates a
-- destination address with a specific amount
-- of bitcoins to send.
-> IO Btc.Transaction
create client utxs outputs =
let configuration = [toJSON (map txToOutpoint utxs), object (map outToAddress outputs)]
txToOutpoint tx = object [
("txid", toJSON (tx ^. transactionId)),
("vout", toJSON (tx ^. vout))]
outToAddress (addr, btc) = (B58S.toText addr, toJSON btc)
in (return . Btc.decode) =<< I.call client "createrawtransaction" configuration
-- | Signs a raw transaction with configurable parameters.
sign :: T.Client -- ^ Our client context
-> Btc.Transaction -- ^ The transaction to sign
-> Maybe [UnspentTransaction] -- ^ Previous outputs being spent by this transaction
-> Maybe [BT.PrivateKey] -- ^ Private keys to use for signing.
-> IO (Btc.Transaction, Bool) -- ^ The signed transaction, and a boolean that is true
-- when the signing is complete or and is false when
-- more signatures are required.
sign client tx utxs pks =
let configuration = [configurationTx tx, configurationUtxs utxs, configurationPks pks]
configurationTx tx' =
toJSON (Btc.encode tx')
configurationUtxs Nothing = Null
configurationUtxs (Just utxs') =
toJSON (map utxToDependency utxs')
where
utxToDependency utx = object [
("txid", toJSON (utx ^. transactionId)),
("vout", toJSON (utx ^. vout)),
("scriptPubKey", toJSON (utx ^. scriptPubKey)),
("redeemScript", toJSON (utx ^. redeemScript))]
configurationPks Nothing = Null
configurationPks (Just privateKeys) =
toJSON privateKeys
extractTransaction res =
maybe
(error "Incorrect JSON response")
Btc.decode
(res ^? key "hex" . _JSON)
extractCompleted res =
fromMaybe
(error "Incorrect JSON response")
(res ^? key "complete" . _JSON)
in do
res <- I.call client "signrawtransaction" configuration :: IO Value
return (extractTransaction res, extractCompleted res)
-- | Sends a transaction through the Bitcoin network
send :: T.Client
-> Btc.Transaction
-> IO BT.TransactionId
send client tx =
let configuration = [toJSON (Btc.encode tx)]
in I.call client "sendrawtransaction" configuration
-- | Returns a list of transactions that occured since a certain block height.
-- If no block height was provided, the genisis block with height 0 is assumed.
-- The transactions returned are listed chronologically.
list :: T.Client -- ^ Our client session context
-> Maybe Integer -- ^ The offset / height we should start listing transactions
-> Maybe Integer -- ^ Minimum amount of confirmations for a transaction to have. Should be 1 or higher.
-- A default value of 6 is used.
-> IO [Btc.Transaction]
list client Nothing confirmations = list client (Just 0) confirmations
list client offset Nothing = list client offset (Just 6)
list client (Just offset) (Just confirmations) = do
limit <- Blockchain.getBlockCount client
blocks <- mapM (Blockchain.getBlock client) =<< mapM (Blockchain.getBlockHash client) [offset..limit - confirmations]
return $ foldl (\lhs rhs -> lhs ++ rhs ^. Btc.blockTxns) [] (catMaybes blocks)
| solatis/haskell-bitcoin-api | src/Network/Bitcoin/Api/Transaction.hs | mit | 5,718 | 0 | 16 | 1,606 | 1,007 | 557 | 450 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
import Control.Lens
import Control.Lens.TH
data Record1 a = Record1
{ _a :: Int
, _b :: Maybe a
} deriving Show
data Record2 = Record2
{ _c :: String
, _d :: [Int]
} deriving Show
$(makeLenses ''Record1)
$(makeLenses ''Record2)
records = [
Record1 {
_a = 1,
_b = Nothing
},
Record1 {
_a = 2,
_b = Just $ Record2 {
_c = "Picard",
_d = [1,2,3]
}
},
Record1 {
_a = 3,
_b = Just $ Record2 {
_c = "Riker",
_d = [4,5,6]
}
},
Record1 {
_a = 4,
_b = Just $ Record2 {
_c = "Data",
_d = [7,8,9]
}
}
]
-- Some abstract traversals.
ids = traverse.a
names = traverse.b._Just.c
nums = traverse.b._Just.d
list2 = traverse.b._Just.d.ix 2
-- Modify/read/extract in terms of generic traversals.
-- Modify to set all 'id' fields to 0
ex1 = set ids 0 records
-- Return a view of the concatenated 'd' fields for all nested records.
ex2 = view nums records
-- [1,2,3,4,5,6,7,8,9]
-- Increment all 'id' fields by 1
ex3 = over ids (+1) records
-- Return a list of all 'c' fields.
ex4 = toListOf names records
-- ["Picard","Riker","Data"]
-- Return the the second element of all 'd' fields.
ex5 = toListOf list2 records
-- [3,6,9]
| riwsky/wiwinwlh | src/lens.hs | mit | 1,357 | 0 | 11 | 397 | 360 | 215 | 145 | 42 | 1 |
module Euler.Problems.Euler012
(
euler012
) where
import Data.List (group)
import Euler.Primes (primeFactors)
euler012 :: () -> Int
euler012 _ = fromIntegral $ head $ dropWhile ((<501) . divisors) $ drop 1 triangles
where
triangles = 1 : zipWith (+) triangles [2..]
divisors = product . map ((+1) . length) . group . primeFactors
| b52/projecteuler | src/Euler/Problems/Euler012.hs | mit | 357 | 0 | 13 | 80 | 136 | 76 | 60 | 9 | 1 |
main = do
line <- getLine
if null line
then return ()
else do
putStrLn $ reverseWords line
main
reverseWords :: String -> String
reverseWords = unwords . map reverse . words
| fabriceleal/learn-you-a-haskell | 09/infinite_input.hs | mit | 228 | 0 | 11 | 86 | 70 | 33 | 37 | 9 | 2 |
module Util.Util where
import Graphics.Rendering.OpenGL
-- |'fib' returns given Fibonacci number
fib :: Int -> Int
fib = (!!) fibs
-- |'fibs' is a list of Fibonacci numbers
fibs :: [Int]
fibs = 0 : 1 : zipWith (+) fibs (tail fibs)
cube w =
renderPrimitive Quads $ do
vertex $ Vertex3 w w w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 w w w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 (-w) w (-w)
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 w w w
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 (-w) w (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) w (-w)
plane width = renderPrimitive Quads $ do
let texCoord2f = texCoord :: TexCoord2 GLfloat -> IO ()
vertex3f = vertex :: Vertex3 GLfloat -> IO ()
w = width / 2
texCoord2f $ TexCoord2 0 1
vertex3f $ Vertex3 (-w) (-w) 0
texCoord2f $ TexCoord2 1 1
vertex3f $ Vertex3 w (-w) 0
texCoord2f $ TexCoord2 1 0
vertex3f $ Vertex3 w w 0
texCoord2f $ TexCoord2 0 0
vertex3f $ Vertex3 (-w) w 0
points :: Int -> [(GLfloat,GLfloat,GLfloat)]
points n' = let n = fromIntegral n' in map (\k -> let t = 2*pi*k/n in (sin t, cos t, 0.0)) [1..n]
| andrey013/mynd | src/Util/Util.hs | mit | 1,595 | 0 | 16 | 444 | 891 | 441 | 450 | 46 | 1 |
module Rebase.Data.Profunctor.Adjunction
(
module Data.Profunctor.Adjunction
)
where
import Data.Profunctor.Adjunction
| nikita-volkov/rebase | library/Rebase/Data/Profunctor/Adjunction.hs | mit | 122 | 0 | 5 | 12 | 23 | 16 | 7 | 4 | 0 |
{-# LANGUAGE RecordWildCards, DeriveGeneric #-}
module Exp where
import GHC.Generics
import Language.Haskell.Exts.Annotated
-- import Control.Monad
import Control.Applicative
import Control.Arrow
import Text.PrettyPrint.GenericPretty
import Data.List
import Data.Char
import Data.Maybe
import Control.Conditional
import Safe
import Debug.Trace
try :: IO ()
try = do
putStrLn "begin"
s <-readFile "./test/Data/Blah.hs"
let (hl, e) = case parseFileContentsWithComments defaultParseMode{parseFilename = "Blah.hs"} s of
ParseOk (m, cms) -> (sort $ map hlComment cms ++ hlModule m, [])
err -> ([], prty . show $ err)
putStrLn e
putStrLn . pr hl $ s
-- putStrLn (prty . show $ hl)
putStrLn "done"
pr :: [Highlight] -> String -> String
pr hl = drop 1 . reverse . fst . foldl _scan ("1", (hl, 1,0))
where
_scan (s@(chp:_), st) ch = case ch of
'\n' -> _ignore (s, if chp == '\r' then st else _incL st) ch
'\r' -> _ignore (s, if chp == '\n' then st else _incL st) ch
'\t' -> _ignore (s, head . drop 8 . iterate _incC $ st) ch
_ -> if isSpace ch then _ignore (s, _incC st) ch else _proc (s, _discard . _incC $ st) $ ch
_scan x _ = error $ "_scan mis match " ++ show x
_incL (hs, l, _) = (hs, l + 1, 0)
_incC (hs, l, c) = (hs, l, c + 1)
_discard x@([],_,_) = x
_discard (h:hs, l, c) | hlEnd h <= (l,c) = _discard (hs, l, c)
| otherwise = (h:hs, l, c)
_discard' (s, st) = (s, _discard st)
_ignore (s, st) ch = (ch:s, st)
_proc (s, st@([],_,_)) ch = (ch : s, st)
_proc (s, st@(h:_, l, c)) ch = (_end (ch : _start s h l c) h l c, st)
_start s h l c | hlStart h == (l,c) = foldl (flip (:)) s (_hlO . hlType $ h)
| otherwise = s
_end s h l c | hlEnd h == (l,c+1) = foldl (flip (:)) s (_hlC . hlType $ h)
| otherwise = s
_hlO = (++"|") . ("<"++) . _hlId
_hlC = (++">") . ("|"++) . _hlId
_hlId hlt = case hlt of
HlComment -> "co"
HlModuleName -> "mn"
HlKeyword -> "kw"
HlImport -> "im"
HlPragma -> "pr"
HlBrace -> "br"
HlComma -> "cm"
HlElipse -> "el"
HlIdentType -> "it"
HlIdentFunc -> "if"
HlSymbolType -> "st"
HlSymbolFunc -> "sf"
HlSpecialCon -> "sc"
HlOpType -> "ot"
HlOpFunc -> "of"
HlOther -> "__"
type LnCol = (Int, Int)
data Highlight = Highlight { hlStart :: LnCol
, hlEnd :: LnCol
, hlType :: HighlightType
}
deriving (Show, Eq, Ord, Generic)
instance Out Highlight
defaultHighlight :: Highlight
defaultHighlight = Highlight (0,0) (0,0) HlOther
data HighlightType = HlComment
| HlModuleName
| HlKeyword
| HlImport
| HlPragma
| HlBrace
| HlComma
| HlElipse
| HlIdentType
| HlIdentFunc
| HlSymbolType
| HlSymbolFunc
| HlSpecialCon
| HlOpType
| HlOpFunc
| HlOther
deriving (Show, Eq, Ord, Generic)
instance Out HighlightType
prty :: String -> String
prty = fst . foldl f ("", "")
where
f (s, pfx) c
| c `elem` "{([" = let pfx' = pfx ++ " " in (s ++ "\n" ++ pfx ++ [c], pfx')
| c `elem` "})]" = let pfx' = drop 2 pfx in (s ++ "\n" ++ pfx' ++ [c], pfx')
| c `elem` "," = (s ++ "\n" ++ pfx ++ [c], pfx)
| otherwise = (s ++ [c], pfx)
tracePrtyMsg :: Show a => String -> a -> b -> b
tracePrtyMsg s a = trace ((s++) . prty . show $ a)
hlSrcSpan :: HighlightType -> SrcSpan -> Highlight
hlSrcSpan t SrcSpan {..} = defaultHighlight { hlStart = (srcSpanStartLine, srcSpanStartColumn)
, hlEnd = (srcSpanEndLine, srcSpanEndColumn)
, hlType = t
}
hlSrcSpanInfo :: HighlightType -> SrcSpanInfo -> Highlight
hlSrcSpanInfo t = hlSrcSpan t . srcInfoSpan
hlComment :: Comment -> Highlight
hlComment (Comment _ sp _) = hlSrcSpan HlComment sp
type SPI = SrcSpanInfo
hlModule :: Module SPI -> [Highlight]
hlModule (XmlPage _ _ _ _ _ _ _) = error "not supporting XmlPage"
hlModule (XmlHybrid _ _ _ _ _ _ _ _ _) = error "not supporting XmlHybrid"
hlModule (Module _ mHead mPragmas mImport decls) = hlModuleHead mHead
++ map hlModulePragma mPragmas
++ concatMap hlImportDecl mImport
++ concatMap hlDecl decls
hlModuleHead :: Maybe (ModuleHead SPI) -> [Highlight]
hlModuleHead Nothing = []
hlModuleHead (Just (ModuleHead l mName mWarning mExpList)) = [mImport, hlModuleName mName, mWhere]
++ hlWarningText mWarning
++ hlExportSpecList mExpList
where
[mImport, mWhere] = map (hlSrcSpan HlKeyword) . srcInfoPoints $ l
hlModuleName :: ModuleName SPI -> Highlight
hlModuleName (ModuleName i _) = hlSrcSpanInfo HlModuleName i
hlWarningText :: Maybe (WarningText SPI) -> [Highlight]
hlWarningText x = case x of
Nothing -> []
Just (DeprText i s) -> [hlSrcSpanInfo HlPragma i]
Just (WarnText i s) -> [hlSrcSpanInfo HlPragma i]
hlExportSpecList :: Maybe (ExportSpecList SPI) -> [Highlight]
hlExportSpecList x = case x of
Nothing -> []
Just (ExportSpecList i es) -> hlBracedListPunc i ++ concatMap hlExportSpec es
hlBracedExpr_ :: ([SrcSpan] -> ([Highlight], [SrcSpan])) -> [SrcSpan] -> [Highlight]
hlBracedExpr_ inner (ph:ps) = ob : cb : cs
where
ob = hlSrcSpan HlBrace ph
(cs, pl:_) = inner ps
cb = hlSrcSpan HlBrace pl
hlBracedListPunc :: SPI -> [Highlight]
hlBracedListPunc = hlBracedListPunc' . srcInfoPoints
hlBracedListPunc' :: [SrcSpan] -> [Highlight]
hlBracedListPunc' = hlBracedExpr_ cms
where cms ps = foldl f ([],ps) ps
where f (cs', ps') p = case drop 1 ps' of
[] -> (cs', ps')
ps'' -> (hlSrcSpan HlComma p : cs', ps'')
hlBracedElipse :: SPI -> [Highlight]
hlBracedElipse = hlBracedExpr_ cms . srcInfoPoints
where cms (p:ps) = ([hlSrcSpan HlElipse p], ps)
hlExportSpec :: ExportSpec SPI -> [Highlight]
hlExportSpec x = case x of
EVar _ n -> hlQName False n
EAbs _ n -> hlQName True n
EThingAll i n -> hlBracedElipse i ++ hlQName True n
EThingWith i n cs -> hlBracedListPunc i ++ hlQName True n ++ map hlCName cs
EModuleContents i n -> tracePrtyMsg "EModuleContents" i $ [hlModuleName n]
hlQName :: Bool -> QName SPI -> [Highlight]
hlQName typeLevel x = case x of
Qual _ mn n -> _correct mn (hlModuleName mn) (hlName typeLevel n)
UnQual _ n -> [hlName typeLevel n]
Special _ n -> [hlSpecialCon n]
where
_correct (ModuleName _ s) m n = m {hlEnd = (fst . hlEnd $ m, (snd . hlStart $ m) + length s + 1)}
: n {hlStart = (fst . hlStart $ n, (snd . hlStart $ n) + length s + 1)}
: []
hlName :: Bool -> Name SPI -> Highlight
hlName True (Ident i _) = hlSrcSpanInfo HlIdentType i
hlName False (Ident i _) = hlSrcSpanInfo HlIdentFunc i
hlName True (Symbol i _) = hlSrcSpanInfo HlSymbolType i
hlName False (Symbol i _) = hlSrcSpanInfo HlSymbolFunc i
hlSpecialCon :: SpecialCon SPI -> Highlight
hlSpecialCon x = case x of
UnitCon i -> tracePrtyMsg "UnitCon" i hlSrcSpanInfo HlSpecialCon i
ListCon i -> tracePrtyMsg "ListCon" i hlSrcSpanInfo HlSpecialCon i
FunCon i -> tracePrtyMsg "FunCon" i hlSrcSpanInfo HlSpecialCon i
TupleCon i _ _ -> tracePrtyMsg "TupleCon" i hlSrcSpanInfo HlSpecialCon i
Cons i -> tracePrtyMsg "Cons" i hlSrcSpanInfo HlSpecialCon i
UnboxedSingleCon i -> tracePrtyMsg "UnboxedSingleCon" i hlSrcSpanInfo HlSpecialCon i
hlCName :: CName SPI -> Highlight
hlCName x = case x of
VarName _ n -> hlName False n
ConName _ n -> hlName True n
hlModulePragma :: ModulePragma SPI -> Highlight
hlModulePragma x = case x of
LanguagePragma i _ -> hlSrcSpanInfo HlPragma i
OptionsPragma i _ _ -> hlSrcSpanInfo HlPragma i
AnnModulePragma i _ -> hlSrcSpanInfo HlPragma i
hlImportDecl :: ImportDecl SPI -> [Highlight]
hlImportDecl ImportDecl {..} = [hlModuleName importModule] ++ _hlImprt ++ _hlSrc ++ _hlQual ++ _hlPkg ++ _hlAs ++ _hlSpec
where
mk t = (:[]) . hlSrcSpan t . head &&& drop 1
(_hlImprt, ps) = mk HlImport . srcInfoPoints $ importAnn
(_hlSrc, ps') = case importSrc of
True -> let ([b], _ps) = mk HlPragma ps
([e], _ps') = mk HlOther _ps
in ([b{hlEnd = hlEnd e}], _ps')
False -> ([], ps)
(_hlQual, ps'') = case importQualified of
True -> mk HlImport ps'
False -> ([], ps')
(_hlPkg, ps''') = case importPkg of
Just s -> mk HlImport ps''
Nothing -> ([], ps'')
_hlAs = case importAs of
Just mn -> let (cs, _ps) = mk HlImport ps'''
in hlModuleName mn : cs
Nothing -> []
_hlSpec = case importSpecs of
Nothing -> []
Just (ImportSpecList i hid imps) -> _hlSpecPunc i hid ++ concatMap _hlImpSpec imps
_hlImpSpec x = case x of
IVar _ n -> [hlName False n]
IAbs _ n -> [hlName True n]
IThingAll i n -> hlName True n : hlBracedElipse i
IThingWith i n cns -> hlName True n : hlBracedListPunc i ++ map hlCName cns
_hlSpecPunc i hid = case hid of
False -> hlBracedListPunc i
True -> uncurry (:) .
( hlSrcSpan HlImport . head
&&& hlBracedListPunc . (\p->i{srcInfoPoints = p}) . drop 1
) . srcInfoPoints $ i
hlDecl :: Decl SPI -> [Highlight]
hlDecl x = case x of
TypeDecl i hd tp -> let hl = hlSrcSpan HlKeyword
sps = srcInfoPoints i
in (hl . head) sps : (hl . last) sps : hlDeclHead hd ++ hlTypE tp
TypeFamDecl i hd knd -> (map (hlSrcSpan HlKeyword) . srcInfoPoints $ i)
++ hlDeclHead hd
++ maybe [] hlKind knd
DataDecl i dn ctx hd qs dr -> hlDataOrNew dn
: (map (hlSrcSpan HlKeyword) . srcInfoPoints $ i)
++ hlContext ctx
++ hlDeclHead hd
++ concatMap hlQualConDecl qs
++ hlDeriving dr
GDataDecl i dn ctx hd knd gds dr -> hlDataOrNew dn
: (map (hlSrcSpan HlKeyword) . srcInfoPoints $ i)
++ hlContext ctx
++ hlDeclHead hd
++ maybe [] hlKind knd
++ concatMap hlGadtDecl gds
++ hlDeriving dr
DataFamDecl i ctx hd knd -> tracePrtyMsg "DataFamDecl" i hlContext ctx ++ hlDeclHead hd ++ maybe [] hlKind knd
TypeInsDecl i tp1 tp2 -> tracePrtyMsg "TypeInstDecl" i hlTypE tp1 ++ hlTypE tp2
DataInsDecl i dn tp qs dr -> []
GDataInsDecl i dn tp knd gds dr -> []
ClassDecl i ctx hd fds cds -> []
InstDecl i ctx ihd ids -> []
DerivDecl i ctx ihd -> []
InfixDecl i ass l ops -> []
DefaultDecl i tp -> []
SpliceDecl i exp -> []
TypeSig i ns tp -> []
FunBind i ms -> []
PatBind i p mtp rhs bnds -> []
ForImp i cv sfty s nm tp -> []
ForExp i cv s nm tp -> []
RulePragmaDecl i r -> []
DeprPragmaDecl i ds -> []
WarnPragmaDecl i ds -> []
InlineSig i b act qnm -> []
InlineConlikeSig i act qnm -> []
SpecSig i act qnm tp -> []
SpecInlineSig i b act qnm tp -> []
InstSig i ctx ihd -> []
AnnPragma i ann -> []
hlDeclHead :: DeclHead SPI -> [Highlight]
hlDeclHead x = case x of
DHead i n tvs -> hlName True n : concatMap hlTyVarBind tvs
DHInfix i tvl n tvr -> hlTyVarBind tvl ++ [hlName True n] ++ hlTyVarBind tvr
DHParen i dh -> hlDeclHead dh
hlTyVarBind :: TyVarBind SPI -> [Highlight]
hlTyVarBind x = case x of
KindedVar i nm kd -> zipWith ($) (zipWith ($) (repeat hlSrcSpan) [HlBrace, HlKeyword, HlBrace]) (srcInfoPoints i) ++ [hlName True nm] ++ hlKind kd
UnkindedVar _ nm -> [hlName True nm]
hlKind :: Kind SPI -> [Highlight]
hlKind x = case x of
KindStar i -> [hlSrcSpanInfo HlOpType i]
KindBang i -> [hlSrcSpanInfo HlOpType i]
KindFn i k1 k2 -> (hlSrcSpan HlKeyword . head . srcInfoPoints $ i) : (hlKind k1 ++ hlKind k2)
KindParen i k -> hlBracedListPunc i ++ hlKind k
KindVar i n -> [hlName True n]
hlTypE :: Type SPI -> [Highlight]
hlTypE x = case x of
TyForall i tvb ctx tp -> (map (hlSrcSpan HlKeyword) . srcInfoPoints $ i)
++ maybe [] (concatMap hlTyVarBind) tvb
++ hlContext ctx
++ hlTypE tp
TyFun i tp1 tp2 -> (hlSrcSpan HlKeyword . head . srcInfoPoints $ i)
: hlTypE tp1
++ hlTypE tp2
TyTuple i _ tps -> hlBracedListPunc i ++ concatMap hlTypE tps
TyList i tp -> hlBracedListPunc i ++ (hlTypE tp)
TyApp _ tp1 tp2 -> hlTypE tp1 ++ hlTypE tp2
TyVar _ nm -> [hlName True nm]
TyCon _ qn -> hlQName True qn
TyParen i tp -> hlBracedListPunc i ++ (hlTypE tp)
TyInfix i tp1 qn tp2 -> trace (("TyInfix - "++) . prty . show $ i) (hlTypE tp1 ++ hlQName True qn ++ hlTypE tp2)
TyKind i tp kd -> trace (("TyKind - "++) . prty . show $ i) (hlTypE tp ++ hlKind kd)
hlContext :: Maybe (Context SPI) -> [Highlight]
hlContext x = case x of
Just (CxSingle i ass) -> _punc i ++ hlAsst ass
Just (CxTuple i ass) -> _punc i ++ concatMap hlAsst ass
Just (CxParen i ctx) -> _punc i ++ hlContext (Just ctx)
Just (CxEmpty i) -> trace (("CxEmpty - " ++ ) . prty . show $ i) []
_ -> []
where _punc = uncurry (:) . (hlSrcSpan HlKeyword . last &&& select null (const []) hlBracedListPunc' . init) . srcInfoPoints
hlAsst :: Asst SPI -> [Highlight]
hlAsst x = case x of
ClassA i qn tps -> hlQName True qn ++ concatMap hlTypE tps
InfixA i tp1 qn tp2 -> hlTypE tp1 ++ hlQName True qn ++ hlTypE tp2
IParam i ipn tp -> hlIPName ipn : hlTypE tp
EqualP i tp1 tp2 -> hlTypE tp1 ++ hlTypE tp2
hlIPName :: IPName SPI -> Highlight
hlIPName x = case x of
IPDup i s -> trace (("IPDup - " ++ ) . prty . show $ i) $ hlSrcSpanInfo HlIdentType i
IPLin i s -> trace (("IPLin - " ++ ) . prty . show $ i) $ hlSrcSpanInfo HlIdentType i
hlDataOrNew :: DataOrNew SPI -> Highlight
hlDataOrNew x = case x of
DataType i -> hlSrcSpanInfo HlKeyword i
NewType i -> hlSrcSpanInfo HlKeyword i
hlQualConDecl :: QualConDecl SPI -> [Highlight]
hlQualConDecl (QualConDecl i tvb ctx cdecl) = -- tracePrtyMsg "hlQualConDecl" i
maybe [] (concatMap hlTyVarBind) tvb
++ hlContext ctx
++ hlConDecl cdecl
++ if isJust tvb then map (hlSrcSpan HlKeyword) . srcInfoPoints $ i else []
-- ++ (select null (const []) hlBracedListPunc' . srcInfoPoints $ i)
hlDeriving :: Maybe (Deriving SPI) -> [Highlight]
hlDeriving x = case x of
Just (Deriving i ihs) -> (uncurry (:) . (hlSrcSpan HlKeyword . head &&& select null (const []) hlBracedListPunc' . drop 1) . srcInfoPoints $ i)
++ concatMap hlInstanceHead ihs
_ -> []
hlInstanceHead :: InstHead SPI -> [Highlight]
hlInstanceHead x = case x of
IHead i qn tps -> {-tracePrtyMsg "IHead" i -}hlQName True qn ++ concatMap hlTypE tps
IHInfix i tp1 qn tp2 -> {-tracePrtyMsg "IHInfix" i -}hlTypE tp1 ++ hlQName True qn ++ hlTypE tp2
IHParen i ih -> {-tracePrtyMsg "IHParen" i -}hlBracedListPunc i ++ hlInstanceHead ih
hlConDecl :: ConDecl SPI -> [Highlight]
hlConDecl x = case x of
ConDecl i nm bgts -> -- tracePrtyMsg "ConDecl" i
hlName True nm
: concatMap hlBangType bgts
InfixConDecl i bgt1 nm bgt2 -> -- tracePrtyMsg "InfixConDecl" i
hlName True nm
: hlBangType bgt1
++ hlBangType bgt2
RecDecl i nm flds -> -- tracePrtyMsg "RecDecl" i
hlName True nm
: hlBracedListPunc i
++ concatMap hlFieldDecl flds
hlFieldDecl :: FieldDecl SPI -> [Highlight]
hlFieldDecl (FieldDecl i nms bgt) = -- tracePrtyMsg "FieldDecl" i
(hlSrcSpan HlKeyword . last . srcInfoPoints $ i)
: (map (hlSrcSpan HlComma) . init . srcInfoPoints $ i)
++ map (hlName True) nms ++ hlBangType bgt
hlBangType :: BangType SPI -> [Highlight]
hlBangType x = case x of
BangedTy i tp -> (hlSrcSpan HlKeyword . head . srcInfoPoints $ i) : hlTypE tp
UnBangedTy _ tp -> hlTypE tp
UnpackedTy i tp -> tracePrtyMsg "UnpackedTy" i hlTypE tp
hlGadtDecl :: GadtDecl SPI -> [Highlight]
hlGadtDecl (GadtDecl i nm tp) = (hlSrcSpan HlKeyword . head . srcInfoPoints $ i) : hlName True nm : hlTypE tp
| HanStolpo/ghc-edit | test/Exp.hs | mit | 19,543 | 0 | 19 | 7,823 | 6,468 | 3,242 | 3,226 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Export the data source to various data formats.
module DataAnalysis.Application.Handler.Export where
import Blaze.ByteString.Builder
--
import Data.Conduit
import qualified Data.Conduit.List as CL
-- -- import Data.Conduit.Zlib
-- -- import Data.Default
import Data.Double.Conversion.Text
import Data.IORef (newIORef)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.XML.Types
-- -- import Text.XML.Stream.Render
import Yesod
import DataAnalysis.Application.Foundation
import DataAnalysis.Application.Analyze
import DataAnalysis.Application.Types
-- | Export the data source to various data formats.
getExportR :: Text -> ExportType -> Handler TypedContent
getExportR ident typ = do
countRef <- liftIO $ newIORef 0
logRef <- liftIO $ newIORef id
source <- analysisSource countRef logRef
error "TODO: Export"
{-case typ of
CsvData ->
attachmentFromSource
(fname "csv")
"text/csv"
(source
$= CL.mapMaybe dataPointCSV
$= (writeHeaders settings >> fromCSV settings)
$= CL.map fromByteString)
where settings = def
CsvDataGzip ->
attachmentFromSource
(fname "csv.gz")
"application/x-gzip"
(source
$= CL.mapMaybe dataPointCSV
$= (writeHeaders settings >> fromCSV settings)
$= gzip
$= CL.map fromByteString)
where settings = def
XmlData ->
attachmentFromSource
(fname "xml")
"application/xml"
(source
$= toXmlRows dataPointXML
$= renderBuilder settings)
where settings = def
XmlDataGzip ->
attachmentFromSource
(fname "xml.gz")
"application/x-gzip"
(source
$= toXmlRows dataPointXML
$= renderBytes settings
$= gzip
$= CL.map fromByteString)
where settings = def-}
where fname ext =
ident <> "-export." <> ext
--------------------------------------------------------------------------------
-- CSV export
-- | Convert a data point to maybe a row. Not all data points are
-- data… points.
dataPointCSV :: DataPoint -> Maybe (Map Text Text)
dataPointCSV (DP2 (D2D label value g)) =
Just
(Map.fromList
[("label",label)
,("value",toShortest value)
,("group",fromMaybe "" g)])
dataPointCSV (DP3 (D3D x y z)) =
Just
(Map.fromList
[("x",toShortest (fromIntegral x))
,("y",toShortest (fromIntegral y))
,("z",toShortest z)])
dataPointCSV DPM{} =
Nothing
--------------------------------------------------------------------------------
-- XML export
-- | Render a data point to XML events.
dataPointXML :: Monad m => DataPoint -> Producer m Event
dataPointXML (DP2 dp) =
do with "label" (text (_d2dLabel dp))
with "value" (text (tshow (_d2dValue dp)))
maybe (return ()) (with "label" . text) (_d2dGroup dp)
where text = yield . EventContent . ContentText
dataPointXML (DP3 (D3D x y z)) =
do with "x" (text (tshow (fromIntegral x)))
with "y" (text (tshow (fromIntegral y)))
with "z" (text (tshow z))
where text = yield . EventContent . ContentText
dataPointXML DPM{} =
return ()
-- | Show a double to text.
tshow :: Double -> Text
tshow = T.pack . show
--------------------------------------------------------------------------------
-- Utilities
-- | Output an attachment from a source.
attachmentFromSource :: Text
-> ContentType
-> Source (HandlerT site IO) Builder
-> HandlerT site IO TypedContent
attachmentFromSource filename contentType source = do
addHeader "content-disposition"
("attachment; filename=" <> T.pack (show (T.unpack filename)))
respondSource contentType
(source $= CL.map Chunk)
-- | Render to an XML document of rows.
toXmlRows :: Monad m => (row -> Conduit row m Event) -> Conduit row m Event
toXmlRows renderRow =
do yield EventBeginDocument
with "rows"
(awaitForever (with "row" . renderRow))
yield EventEndDocument
-- | With opening/closing tags for the given name, render the inner
-- conduit inside it.
with :: Monad m => Name -> Conduit void m Event -> Conduit void m Event
with name inner =
do yield (EventBeginElement name [])
inner
yield (EventEndElement name)
| teuffy/min-var-ci | src/DataAnalysis/Application/Handler/Export.hs | mit | 4,762 | 0 | 15 | 1,306 | 954 | 496 | 458 | 78 | 1 |
{-# LANGUAGE RecordWildCards, ParallelListComp #-}
module Exec where
import Data.Char
import Data.List
import Debug.Trace
import TypesEtc
import Sprockell
-- ============================================================================================
-- execution functions for simulation purposes: exec, sim
--
-- addrs: register+memory addresses that you want to follow/inspect when running a program (instrs).
-- instrs: a list of assembly instructions (the program) that the Sprockell executes.
-- count: counts the number of instructions that are executeds. The total is shown at the end.
-- state: contains 4 things: program counter, stack pointer, registers, data memory.
-- i:is: list of inputs. In this case just a repeating clock tick.
--
-- The output of exec is generated every clock cycle by the function demoOutput,
-- after which exec continues in the next state (state') calculated by one cycle of the Sprockell processor.
exec addrs instrs (count, state@State{..}) (i:is)
| instrs!!pc==EndProg = traceShow ("Instructions: " ++ show count)
[]
| otherwise = demoOutput addrs instrs state' : exec addrs instrs (count+1, state') is
where
state' = sprockell instrs state i
-- ============================================================================================
-- generating demoOutput
--
-- demoOutput calculates a value of type DemoOutput. The function show for this type is in TypesEtc.hs
demoOutput addrs instrs State{..} = DemoOutput pc
(instrs!!pc)
(map (regbank!!) regaddrs)
(map (dmem!!) memaddrs)
sp
(map (dmem!!) [sp0+1..sp])
where
(regaddrs,memaddrs) = addrs
-- sim: the simulation function which runs exec and outputs the result in a readable way.
-- --------------------------------------------------------------------------------------
sim addrs instrs = putStr . unlines . map show $ results
where
results = demoOutput addrs instrs initstate : exec addrs instrs (0,initstate) clock
-- showInstructions: shows a list of instructions in a readable way.
-- -----------------------------------------------------------------
showInstrs instrs = putStr . unlines $ strs
where
m = length $ show $ length instrs + 1
strs = [ ' ' : replicate (m-w) ' ' ++ show n ++ ": " ++ show instr | (n,instr) <- zip [0..] instrs
, w <- [length $ show n]
]
-- ============================================================================================
-- Examples
-- ============================================================================================
{---------------------------------------------
| Example 1: computes the value of 3^5 (= 243)
----------------------------------------------
Program in imperative pseudo-code:
a = 3;
n = 5;
power = 1;
while (n != 0) {
power = a * power;
n = n-1;
};
----------------------------------------------}
-- A list of assembly instruction that calculates example 1
-- --------------------------------------------------------
instrs1 = [ Load (Imm 3) 3 -- 0 value of a (=3) is put in register 3;
-- Register 3 will be used for a.
, Load (Imm 5) 4 -- 1 value of n (=5) is put in register 4;
-- Register 4 will contain the value of n throughout the execution.
, Load (Imm 1) 5 -- 2 initial value of power (=1) is put in register 5;
-- Register 5 be used for the value of the power.
, Compute Equal 4 0 1 -- 3 Compute n==0 (reg 4 contains the value of n, reg 0 contains 0), and put the result in register 1;
-- Register 1 is checked for conditional jumps.
, Jump CA 8 -- 4 If True (ie register 1 contains 1), then go to EndProg
, Compute Mul 3 5 5 -- 5 multiply a (reg 3) with power (reg 5), give the result to power
, Compute Decr 4 0 4 -- 6 Decrement n (reg 4) with 1
, Jump UA 3 -- 7 Go back to instruction 3
, EndProg -- 8
]
-- relevant addresses to show during simulation
-- --------------------------------------------
addrs1 = ( [3,4,5] -- registers
, [] -- heap
) :: ([Int],[Int])
-- show the list of instructions
-- -----------------------------
is1 = showInstrs instrs1
-- run the program instrs1, and show the content of the addresses addrs1
-- ---------------------------------------------------------------------
run1 = sim addrs1 instrs1
{---------------------------------------
| Example 2: compute the "3n+1" function
----------------------------------------
Program in imperative pseudo-code:
program threeNplus1;
var a;
function even (n);
{return (n%2) == 0};
function three (n);
{ while n>1
{ if even(n)
{ n=n/2; }
{ n=(3*n)+1; };
};
return n
};
{ a = three(7);
}
-}
-- Haskell definition (runnable):
-- -----------------------------------------------------
three :: Int -> [Int]
three n | n == 1 = [1]
| n `mod` 2 == 0 = n : three (n `div` 2)
| otherwise = n : three (3*n +1)
-- A list of assembly instruction that calculates example 2
-- --------------------------------------------------------
instrs2 = [ Load (Imm 1) 2 -- 0 Load the constant 1 in register 2
, Load (Imm 2) 3 -- 1 Load the constant 2 in register 3
, Load (Imm 3) 4 -- 2 Load the constant 3 in register 4
, Load (Imm 7) 5 -- 3 Load initial value of n (7) in register 5
, Compute Equal 5 2 1 -- 4 Compute n==1, and load result in register 1;
, Jump CA 13 -- 5 If reg1=1, then we're done, and thus go to EndProg
, Compute Mod 5 3 1 -- 6 Otherwise: calculate n`mod`2, and load the result in reg1.
, Jump CA 10 -- 7 If reg1=1 (i.e: if n is odd), then go to instruction 10
, Compute Div 5 3 5 -- 8 else divide n by 2 (the content of reg3) and put the result in register 5.
, Jump UA 4 -- 9 Jump back to instruction 4.
, Compute Mul 5 4 5 -- 10 At this point n is odd, thus multiply by 3 (the content of reg4)...
, Compute Add 5 2 5 -- 11 ... and add 1 (the content of reg2).
, Jump UA 4 -- 12 Jump back to 4.
, EndProg -- 13 End of Program.
]
-- relevant addresses to show during simulation
-- --------------------------------------------
addrs2 = ( [1,5] -- registers
, [] -- heap
) :: ([Int],[Int])
-- show the list of instructions
-- -----------------------------
is2 = showInstrs instrs2
-- run the program instrs2, and show the content of the addresses addrs2
-- ---------------------------------------------------------------------
run2 = sim addrs2 instrs2
| Oboema/FP-GO1 | Exec.hs | mit | 6,509 | 42 | 14 | 1,392 | 996 | 558 | 438 | 62 | 1 |
module Network.Server
(
module Network.Server.Common
) where
import Network.Server.Common
| harrisi/on-being-better | list-expansion/Haskell/course/projects/NetworkServer/haskell/src/Network/Server.hs | cc0-1.0 | 94 | 0 | 5 | 13 | 21 | 14 | 7 | 4 | 0 |
{- lat - tool to track alerts from LWN.
- Copyright (C) 2010 Magnus Therning
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, version 3 of the License.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Commands.Distro
( distroAdd
, distroList
) where
import qualified AlertDB as ADB
import qualified Types as T
import qualified Commands.ArgTypes as CAT
-- {{{1 distroadd
distroAdd :: CAT.ArgType -> IO ()
distroAdd (CAT.DistroAdd dbPath name url) =
ADB.runAlertDB dbPath (ADB.addDistro $ T.distro name (Just url))
distroAdd _ = print "Bad argument for distroAdd"
-- {{{1 distrolist
distroList :: CAT.ArgType -> IO ()
distroList (CAT.DistroList dbPath) =
ADB.runAlertDB dbPath ADB.getAllDistro >>= mapM_ (print . T.prettyNormal)
distroList _ = print "Bad argument for distroList"
| magthe/lat | src/Commands/Distro.hs | gpl-3.0 | 1,291 | 0 | 10 | 243 | 181 | 98 | 83 | 14 | 1 |
module Main where
import Data.Char (isSpace)
import Data.List (intersperse, break)
import Text.Printf (printf)
main :: IO ()
main = interact labels2vtt
labels2vtt :: String -> String
labels2vtt input = webVtt
where
webVtt = unlines $ "WEBVTT" : "" : cues
cues = map (showVttCue . readLabel) labels
labels = lines input
readLabel :: String -> (Double, Double, String)
readLabel label = (read start, read end, text)
where
(start, rest) = firstWord label
(end, text) = firstWord rest
firstWord :: String -> (String, String)
firstWord str = (first, drop 1 rest)
where
(first, rest) = break isSpace str
showVttCue :: (Double, Double, String) -> String
showVttCue (begin, end, text) =
unlines [unwords [timecode begin, "-->", timecode end], text]
timecode :: Double -> String
timecode t =
join ":" [hh, mm, join "." [ss, ms]]
where
join sep = concat . intersperse sep
hh = printf "%02d" $ (secs `div` (60 * 60) :: Int)
mm = printf "%02d" $ (secs `div` 60 `rem` 60 :: Int)
ss = printf "%02d" $ (secs `rem` 60 :: Int)
ms = printf "%03d" $ (truncate $ 1000 * mils :: Int)
(secs, mils) = properFraction t
| mkovacs/labels2vtt | src/main/Main.hs | gpl-3.0 | 1,164 | 0 | 11 | 261 | 489 | 271 | 218 | 30 | 1 |
{-|
Module : Voogie.Parse.FOOL
Description : Parser of FOOL formulas embedded in Boogie properties.
Copyright : (c) Evgenii Kotelnikov, 2019
License : GPL-3
Maintainer : evgeny.kotelnikov@gmail.com
Stability : provisional
-}
module Voogie.Parse.FOOL (
module Voogie.Parse,
term,
formula
) where
import Control.Applicative ((<|>), many)
import Text.Parsec.Expr (Assoc, buildExpressionParser)
import Voogie.AST.FOOL
import Voogie.Parse
import Voogie.Language
term :: Parser Term
term = buildExpressionParser operators arg
unary :: UnaryOp -> Operator Term
unary = prefix <$> Unary <*> nameOf
binary :: BinaryOp -> Assoc -> Operator Term
binary = infix' <$> Binary <*> nameOf
equals :: Sign -> Assoc -> Operator Term
equals = infix' <$> Equals <*> nameOf
operators :: [[Operator Term]]
operators = unaryOperators ++ binaryOperators
where
unaryOperators = fmap (\op -> unary <$> [op]) [minBound..]
binaryOperators = [
assocLeft $ binary <$> [Multiply, Divide],
assocLeft $ binary <$> [Add, Subtract],
assocNone $ binary <$> [Greater, Less, Geq, Leq],
assocNone $ equals <$> [Pos, Neg],
assocLeft $ binary <$> [And, Or],
assocNone $ binary <$> [Imply],
assocLeft $ binary <$> [Iff]
]
arg :: Parser Term
arg = parens term
<|> quantify
<|> ast (BooleanConstant <$> boolean)
<|> ast (IntegerConstant <$> integer)
<|> ref
<|> ifElse
ref :: Parser Term
ref = ast $ Ref <$> identifier <*> many (brackets $ commaSep1 term)
ifElse :: Parser Term
ifElse = ast $ IfElse
<$> (reserved kwdIf *> term)
<*> (reserved kwdThen *> term)
<*> (reserved kwdElse *> term)
quantify :: Parser Formula
quantify = ast $ Quantify
<$> quantifier
<*> commaSep1 (typed $ commaSep1 identifier) <* reserved opQsep
<*> term
formula :: Parser Formula
formula = term
| aztek/voogie | src/Voogie/Parse/FOOL.hs | gpl-3.0 | 1,902 | 0 | 11 | 438 | 560 | 307 | 253 | 49 | 1 |
{-
TrigramCore_Test: Tests for the trigram generator.
Copyright (C) 2009 Mark Buer
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module TrigramCore_Test where
import TrigramCore
import Test.HUnit
import Data.Map
import Data.Maybe
import System.Random
-- useful test fixtures
emptyWords :: [String]
emptyWords = []
manyWords :: [String]
manyWords = replicate 100000 "large"
trigramMapFromWordsTests = TestLabel "trigramMapFromWords tests" $ TestList [
TestCase $ assertEqual
"Should get empty map from empty list"
Data.Map.empty $ trigramMapFromWords emptyWords
,
TestCase $ assertEqual
"Should not run out of stack space..."
[(("large", "large"), drop 2 manyWords)] $ toList (trigramMapFromWords manyWords)
]
takeRandomTests = TestLabel "takeRandom tests" $ TestList [
TestCase $ assertEqual
"Should take nothing from an empty list"
True $ isNothing $ takeRandom emptyWords (mkStdGen 1234)
,
TestCase $ assertEqual
"Should take the only string in the list"
"only" $ fst $ fromJust $ takeRandom ["only"] (mkStdGen 1234)
,
TestCase $ assertEqual
"Should take the first string in the list"
"one" $ fst $ fromJust $ takeRandom ["one", "two"] (mkStdGen 4467821)
,
TestCase $ assertEqual
"Should take the second string in the list"
"two" $ fst $ fromJust $ takeRandom ["one", "two"] (mkStdGen 1234)
]
spaceGraduallyTests = TestLabel "spaceGradually tests" $ TestList [
TestCase $ assertEqual
"No strings should result in no strings"
emptyWords $ spaceGradually emptyWords
,
TestCase $ assertEqual
"One strings should result in one strings"
["one"] $ spaceGradually ["one"]
,
TestCase $ assertEqual
"Two strings should result in two strings"
["one", "one two"] $ spaceGradually ["one", "two"]
,
TestCase $ assertEqual
"Three strings should result in three strings"
["one", "one two", "one two three"] $ spaceGradually ["one", "two", "three"]
]
potentialLinesTests = TestLabel "potentialLines tests" $ TestList [
TestCase $ assertEqual
"Empty list should result in empty list"
[] $ potentialLines emptyWords
,
TestCase $ assertEqual
"List with one element should result in list of one tuple"
[("one", [])] $ potentialLines ["one"]
,
TestCase $ assertEqual
"List with two elements should result in list with two tuples"
[("one", ["two"]), ("one two", [])] $ potentialLines ["one", "two"]
,
TestCase $ assertEqual
"List with three elements should result in list with two three"
[("one", ["two", "three"]), ("one two", ["three"]), ("one two three", [])] $ potentialLines ["one", "two", "three"]
]
takeLineTests = TestLabel "takeLine tests" $ TestList [
TestCase $ assertEqual
"Strings should be exactly the length of a line"
("", emptyWords) $ takeLine 10 []
,
TestCase $ assertEqual
"Strings should be exactly the length of a line"
("This is a test", emptyWords) $ takeLine 14 ["This", "is", "a", "test"]
]
prettifyTests = TestLabel "prettify tests" $ TestList [
TestCase $ assertEqual
"Should be empty string"
"\n" $ prettify emptyWords 80
,
TestCase $ assertEqual
"String forced despite line length"
"Hello\n" $ prettify ["Hello"] 3
,
TestCase $ assertEqual
"String (without newline) should be exactly the length of a line"
"This is a test\n" $ prettify ["This", "is", "a", "test"] 14
,
TestCase $ assertEqual
"Line should be broken according to length"
"This\nis a\ntest\n" $ prettify ["This", "is", "a", "test"] 6
]
main = runTestTT $ TestList [
trigramMapFromWordsTests,
takeRandomTests,
spaceGraduallyTests,
potentialLinesTests,
takeLineTests,
prettifyTests
]
| zymbolic-creations/learningHaskell | trigram/TrigramCore_Test.hs | gpl-3.0 | 4,683 | 0 | 13 | 1,264 | 859 | 460 | 399 | 83 | 1 |
import Test.DocTest
main :: IO ()
main = doctest ["src/Main.hs"]
| romanofski/checkCalendar | tests/doctests.hs | gpl-3.0 | 66 | 0 | 6 | 11 | 27 | 14 | 13 | 3 | 1 |
module HsRegex (subRange, char, dot, endl, stl, spc, notSpc, wc, notWc, digit,
notDigit, alnum, plus, star, pipe, range, notRange, qMark, wb,
reGroup, var, mN, mLN, mN1N2, combine, (=~), RegexS)
where
import Control.Monad as C
import Control.Monad.Loops
import Control.Monad.State as S
import Control.Monad.Writer
import Data.Char
import Data.List
import Data.String.Utils
import qualified Data.Text as T
-- [Int] contains the mathches of a single Regex
-- [[Int]] records the history of matches of the full combined regex.
data RegexS = RegexS {failed :: Bool, groups :: [T.Text], text :: T.Text, positions :: [[Int]]} deriving (Show)
type Regex = State RegexS
-- returns current position
position :: RegexS -> Int
position = head . head . positions
-- for extracting substrings
subRange :: (Int, Int) -> T.Text -> T.Text
subRange (x, y) = T.take (y - x) . T.drop x
-- for extracting strings from index tuple
extractMatches :: T.Text -> [(Int, Int)] -> [T.Text]
extractMatches str = fmap (`subRange` str)
addGroup :: T.Text -> Regex ()
addGroup g = modify $ \(RegexS f gs t p) -> RegexS f (gs ++ [g]) t p
getGroup :: Int -> Regex (Maybe T.Text)
getGroup varN = do gs <- gets groups
return $ if varN < length gs then Just $ gs !! (varN - 1)
else Nothing
-- sets current position
setPos :: Int -> Regex ()
setPos p = modify $ \(RegexS f g t psCol) -> RegexS f g t ([p] : psCol)
-- executes Regex without altering state
saveState :: Regex Bool -> Regex Bool
saveState m = do initialState <- get
matched <- m
put initialState
return matched
-- puts Regex state in Fail or Sucess state depending on outcome
regexGuard :: Regex Bool -> Regex Bool
regexGuard m = do matched <- m
if matched then successRegex
else failRegex
-- adds new collection of matches.
movePos :: Int -> Regex ()
movePos i = getPos >>= (setPos . (i +))
-- appends to current collection of matches.
addPos :: Int -> Regex ()
addPos p = modify $ \(RegexS f g t (ps:psCol)) -> RegexS f g t ((p:ps):psCol)
-- removes the current collection of matches and returns it.
popPosCollection :: Regex [Int]
popPosCollection = state $ \(RegexS f g t (ps:psCol)) -> (ps, RegexS f g t psCol)
-- returns the value of the character at the current position
currentChar :: Regex Char
currentChar = gets $ \s -> T.index (text s) (position s)
-- returns furthermost position in current collection of matches.
getPos :: Regex Int
getPos = gets position
-- returns length of target text
getTextLength :: Regex Int
getTextLength = gets $ T.length . text
-- returns the positions of matches
getMatches :: RegexS -> [Int]
getMatches = head . positions
-- puts Regex in a failed state
failRegex :: Regex Bool
failRegex = modify (\(RegexS _ g t p) -> RegexS True g t p) >> return False
-- puts Regex in a success state
successRegex :: Regex Bool
successRegex = modify (\(RegexS _ g t p) -> RegexS False g t p) >> return True
match :: Regex Bool -> Regex Bool
match m = do
matched <- m
st <- get
if position st < T.length (text st) && matched
then popPosCollection >>= addPos . head >> return True
else popPosCollection >> return False
-- match using regexp m as many times as possible
greedyMatch :: Regex Bool -> Regex Int
greedyMatch m = length <$> whileM (match m) (return ())
-- match using regexp m N times
-- if N is not satisfied, fst is false
matchN :: Regex Bool -> Int -> Regex Bool
matchN m i = andM $ replicate i (match m)
-- match using regexp m at least N times
-- if N is not satisfied, fst is false
matchAtLeastN :: Regex Bool -> Int -> Regex Bool
matchAtLeastN m i = (i <=) <$> greedyMatch m
-- match using regexp m between N1 and N2
-- if not between N1 N2, fst is false
matchBetweenN1N2 :: Regex Bool -> Int -> Int -> Regex Bool
matchBetweenN1N2 m n1 n2 = (>= n1) . length . takeWhile id <$>
replicateM n2 (match m)
-- characters
char :: Char -> Regex Bool
char c = liftM (== c) currentChar <* movePos 1
-- .
dot :: Regex Bool
dot = liftM ('\n' /=) currentChar <* movePos 1
-- $
endl :: Regex Bool
endl = orM [liftM2 (>=) getPos getTextLength, liftM (== '\n') currentChar] <*
movePos 1
-- ^
stl :: Regex Bool
stl = gets $ \st -> position st == 0 ||
T.index (text st) (position st - 1) == '\n'
-- \s
spc :: Regex Bool
spc = liftM isSpace currentChar <* movePos 1
-- \S
notSpc :: Regex Bool
notSpc = liftM not spc
-- \w
wc :: Regex Bool
wc = liftM isLetter currentChar <* movePos 1
-- \W
notWc :: Regex Bool
notWc = liftM not wc
-- \d
digit :: Regex Bool
digit = liftM (`elem` ['0' .. '9']) currentChar <* movePos 1
-- \D
notDigit :: Regex Bool
notDigit = liftM not digit
-- \w
alnum :: Regex Bool
alnum = liftM isAlphaNum currentChar <* movePos 1
-- +
plus :: Regex Bool -> Regex Bool
plus m = (> 0) <$> greedyMatch m
-- *
star :: Regex Bool -> Regex Bool
star m = greedyMatch m >> return True
-- |
pipe :: Regex Bool -> Regex Bool -> Regex Bool
pipe m1 m2 = do initialState <- get
matched <- m1
if matched then return True else put initialState >> m2
-- []
range :: String -> Regex Bool
range cs = liftM (`elem` cs) currentChar <* movePos 1
-- [^]
notRange :: String -> Regex Bool
notRange cs = liftM not (range cs)
-- ?
qMark :: Regex Bool -> Regex Bool
qMark m = do oldPos <- getPos
mR <- m
if not mR then setPos oldPos >> return True else return True
-- \b
wb :: Regex Bool
wb = plus spc >> saveState alnum
-- (regexp)
reGroup :: [Regex Bool] -> Regex Bool
reGroup rs = do initialPos <- getPos
matched <- combine rs
finalPos <- getPos
st <- get
addGroup $ subRange (initialPos, finalPos) (text st)
return matched
-- $n
var :: Int -> Regex Bool
var varN = do
pos <- getPos
st <- get
g <- getGroup varN
case g of
(Just str) -> do let strEndIndex = pos + T.length str
if not $ T.null str &&
str == subRange (pos, strEndIndex) (text st)
then setPos strEndIndex >> return True
else return False
Nothing -> return False
-- {n}
mN :: Regex Bool -> Int -> Regex Bool
mN = matchN
-- {,n}
mLN :: Regex Bool -> Int -> Regex Bool
mLN = matchAtLeastN
-- {n, n}
mN1N2 :: Regex Bool -> (Int, Int) -> Regex Bool
mN1N2 m (minM, maxM) = matchBetweenN1N2 m minM maxM
genStates :: RegexS -> [RegexS]
genStates (RegexS f g t (p:ps)) = fmap (RegexS f g t . (:ps) . (`drop` p))
[0.. length p - 1]
sortMatches :: [Regex RegexS] -> Regex [RegexS]
sortMatches = fmap (sortOn position . filter (not . failed)) . C.sequence
-- threads an updated state through all the Regex
withRegexState :: [Regex Bool] -> RegexS -> Regex RegexS
withRegexState [] st = return st
withRegexState (r:rs) st =
do put st
pos <- getPos
len <- getTextLength
if pos < len then
do m <- regexGuard r
s <- get
if m then
if length (getMatches s) > 1 then
do newSts <- sortMatches $ map (withRegexState rs) (genStates s)
if not $ null newSts then
let newSt = last newSts in put newSt >> return newSt
else failRegex >> get
else withRegexState rs s
else failRegex >> return s
else failRegex >> return st
-- combines multiple Regex in one Regex
combine :: [Regex Bool] -> Regex Bool
combine rs = liftM (not . failed) $ get >>= withRegexState rs
-- replace all matches of (combine rs) with subStr
replaceRegex :: [Regex Bool] -> String -> String -> String
replaceRegex rs ss subStr = foldr (`replace` subStr) ss (ss =~ rs)
-- return all matches of (combine rs)
matchRegex :: [Regex Bool] -> T.Text -> [(Int, Int)]
matchRegex rs ss = S.join $ map (snd . runWriter . subMatch ss (combine rs))
[0 .. T.length ss]
where subMatch :: T.Text -> Regex Bool -> Int -> Writer [(Int, Int)] ()
subMatch str re i =
let st = execState re (RegexS False [] str [[i]])
pos = position st in
when (pos <= T.length (text st) && not (failed st)) $ tell [(i, pos)]
-- -- apply the regex on tails str and return bigest match
(=~) :: String -> [Regex Bool] -> [String]
(=~) str rs = fmap T.unpack $ extractMatches textStr $ matchRegex rs
(T.append textStr $ T.pack "\n") where textStr = T.pack str
| cgroza/HsRegex | HsRegex.hs | gpl-3.0 | 8,686 | 0 | 19 | 2,403 | 3,079 | 1,597 | 1,482 | 175 | 5 |
-- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
func (x :+: xr) = x
| lspitzner/brittany | data/Test416.hs | agpl-3.0 | 134 | 0 | 7 | 17 | 17 | 9 | 8 | 1 | 1 |
{-----------------------------------------------------------------------------
A LIBRARY OF MONADIC PARSER COMBINATORS
29th July 1996
Graham Hutton Erik Meijer
University of Nottingham University of Utrecht
This Haskell 1.3 script defines a library of parser combinators, and is taken
from sections 1-6 of our article "Monadic Parser Combinators". Some changes
to the library have been made in the move from Gofer to Haskell:
* Do notation is used in place of monad comprehension notation;
* The parser datatype is defined using "newtype", to avoid the overhead
of tagging and untagging parsers with the P constructor.
------------------------------------------------------------------------------
** Extended to allow a symbol table/state to be threaded through the monad.
** Extended to allow a parameterised token type, rather than just strings.
** Extended to allow error-reporting.
(Extensions: 1998-2000 Malcolm.Wallace@cs.york.ac.uk)
(More extensions: 2004 gk-haskell@ninebynine.org)
------------------------------------------------------------------------------}
-- | This library of monadic parser combinators is based on the ones
-- defined by Graham Hutton and Erik Meijer. It has been extended by
-- Malcolm Wallace to use an abstract token type (no longer just a
-- string) as input, and to incorporate state in the monad, useful
-- for symbol tables, macros, and so on. Basic facilities for error
-- reporting have also been added, and later extended by Graham Klyne
-- to return the errors through an @Either@ type, rather than just
-- calling @error@.
module Text.ParserCombinators.HuttonMeijerWallace
(
-- * The parser monad
Parser(..)
-- * Primitive parser combinators
, item, eof, papply, papply'
-- * Derived combinators
, (+++), {-sat,-} tok, nottok, many, many1
, sepby, sepby1, chainl, chainl1, chainr, chainr1, ops, bracket
, toEOF
-- * Error handling
, elserror
-- * State handling
, stupd, stquery, stget
-- * Re-parsing
, reparse
) where
import Data.Char
import Control.Monad
import Control.Applicative hiding (many)
infixr 5 +++
--- The parser monad ---------------------------------------------------------
type ParseResult s t e a = Either e [(a,s,[Either e t])]
newtype Parser s t e a = P ( s -> [Either e t] -> ParseResult s t e a )
-- ^ The parser type is parametrised on the types of the state @s@,
-- the input tokens @t@, error-type @e@, and the result value @a@.
-- The state and remaining input are threaded through the monad.
instance Functor (Parser s t e) where
-- fmap :: (a -> b) -> (Parser s t e a -> Parser s t e b)
fmap f (P p) = P (\st inp -> case p st inp of
Right res -> Right [(f v, s, out) | (v,s,out) <- res]
Left err -> Left err
)
instance Applicative (Parser s t e) where
pure = return
(<*>) = ap
instance Alternative (Parser s t e) where
(<|>) = mplus
empty = mzero
instance Monad (Parser s t e) where
-- return :: a -> Parser s t e a
return v = P (\st inp -> Right [(v,st,inp)])
-- >>= :: Parser s t e a -> (a -> Parser s t e b) -> Parser s t e b
(P p) >>= f = P (\st inp -> case p st inp of
Right res -> foldr joinresults (Right [])
[ papply' (f v) s out | (v,s,out) <- res ]
Left err -> Left err
)
-- fail :: String -> Parser s t e a
fail err = P (\st inp -> Right [])
-- I know it's counterintuitive, but we want no-parse, not an error.
instance MonadPlus (Parser s t e) where
-- mzero :: Parser s t e a
mzero = P (\st inp -> Right [])
-- mplus :: Parser s t e a -> Parser s t e a -> Parser s t e a
(P p) `mplus` (P q) = P (\st inp -> joinresults (p st inp) (q st inp))
-- joinresults ensures that explicitly raised errors are dominant,
-- provided no parse has yet been found. The commented out code is
-- a slightly stricter specification of the real code.
joinresults :: ParseResult s t e a -> ParseResult s t e a -> ParseResult s t e a
{-
joinresults (Left p) (Left q) = Left p
joinresults (Left p) (Right _) = Left p
joinresults (Right []) (Left q) = Left q
joinresults (Right p) (Left q) = Right p
joinresults (Right p) (Right q) = Right (p++q)
-}
joinresults (Left p) q = Left p
joinresults (Right []) q = q
joinresults (Right p) q = Right (p++ case q of Left _ -> []
Right r -> r)
--- Primitive parser combinators ---------------------------------------------
-- | Deliver the first remaining token.
item :: Parser s t e t
item = P (\st inp -> case inp of
[] -> Right []
(Left e: _) -> Left e
(Right x: xs) -> Right [(x,st,xs)]
)
-- | Fail if end of input is not reached
eof :: Show p => Parser s (p,t) String ()
eof = P (\st inp -> case inp of
[] -> Right [((),st,[])]
(Left e:_) -> Left e
(Right (p,_):_) -> Left ("End of input expected at "
++show p++"\n but found text")
)
{-
-- | Ensure the value delivered by the parser is evaluated to WHNF.
force :: Parser s t e a -> Parser s t e a
force (P p) = P (\st inp -> let Right xs = p st inp
h = head xs in
h `seq` Right (h: tail xs)
)
-- [[[GK]]] ^^^^^^
-- WHNF = Weak Head Normal Form, meaning that it has no top-level redex.
-- In this case, I think that means that the first element of the list
-- is fully evaluated.
--
-- NOTE: the original form of this function fails if there is no parse
-- result for p st inp (head xs fails if xs is null), so the modified
-- form can assume a Right value only.
--
-- Why is this needed?
-- It's not exported, and the only use of this I see is commented out.
---------------------------------------
-}
-- | Deliver the first parse result only, eliminating any backtracking.
first :: Parser s t e a -> Parser s t e a
first (P p) = P (\st inp -> case p st inp of
Right (x:xs) -> Right [x]
otherwise -> otherwise
)
-- | Apply the parser to some real input, given an initial state value.
-- If the parser fails, raise 'error' to halt the program.
-- (This is the original exported behaviour - to allow the caller to
-- deal with the error differently, see @papply'@.)
papply :: Parser s t String a -> s -> [Either String t]
-> [(a,s,[Either String t])]
papply (P p) st inp = either error id (p st inp)
-- | Apply the parser to some real input, given an initial state value.
-- If the parser fails, return a diagnostic message to the caller.
papply' :: Parser s t e a -> s -> [Either e t]
-> Either e [(a,s,[Either e t])]
papply' (P p) st inp = p st inp
--- Derived combinators ------------------------------------------------------
-- | A choice between parsers. Keep only the first success.
(+++) :: Parser s t e a -> Parser s t e a -> Parser s t e a
p +++ q = first (p `mplus` q)
-- | Deliver the first token if it satisfies a predicate.
sat :: (t -> Bool) -> Parser s (p,t) e t
sat p = do {(_,x) <- item; if p x then return x else mzero}
-- | Deliver the first token if it equals the argument.
tok :: Eq t => t -> Parser s (p,t) e t
tok t = do {(_,x) <- item; if x==t then return t else mzero}
-- | Deliver the first token if it does not equal the argument.
nottok :: Eq t => [t] -> Parser s (p,t) e t
nottok ts = do {(_,x) <- item; if x `notElem` ts then return x
else mzero}
-- | Deliver zero or more values of @a@.
many :: Parser s t e a -> Parser s t e [a]
many p = many1 p +++ return []
--many p = force (many1 p +++ return [])
-- | Deliver one or more values of @a@.
many1 :: Parser s t e a -> Parser s t e [a]
many1 p = do {x <- p; xs <- many p; return (x:xs)}
-- | Deliver zero or more values of @a@ separated by @b@'s.
sepby :: Parser s t e a -> Parser s t e b -> Parser s t e [a]
p `sepby` sep = (p `sepby1` sep) +++ return []
-- | Deliver one or more values of @a@ separated by @b@'s.
sepby1 :: Parser s t e a -> Parser s t e b -> Parser s t e [a]
p `sepby1` sep = do {x <- p; xs <- many (do {sep; p}); return (x:xs)}
chainl :: Parser s t e a -> Parser s t e (a->a->a) -> a
-> Parser s t e a
chainl p op v = (p `chainl1` op) +++ return v
chainl1 :: Parser s t e a -> Parser s t e (a->a->a) -> Parser s t e a
p `chainl1` op = do {x <- p; rest x}
where
rest x = do {f <- op; y <- p; rest (f x y)}
+++ return x
chainr :: Parser s t e a -> Parser s t e (a->a->a) -> a
-> Parser s t e a
chainr p op v = (p `chainr1` op) +++ return v
chainr1 :: Parser s t e a -> Parser s t e (a->a->a) -> Parser s t e a
p `chainr1` op = do {x <- p; rest x}
where
rest x = do { f <- op
; y <- p `chainr1` op
; return (f x y)
}
+++ return x
ops :: [(Parser s t e a, b)] -> Parser s t e b
ops xs = foldr1 (+++) [do {p; return op} | (p,op) <- xs]
bracket :: (Show p,Show t) =>
Parser s (p,t) e a -> Parser s (p,t) e b ->
Parser s (p,t) e c -> Parser s (p,t) e b
bracket open p close = do { open
; x <- p
; close -- `elserror` "improperly matched construct";
; return x
}
-- | Accept a complete parse of the input only, no partial parses.
toEOF :: Show p =>
Parser s (p,t) String a -> Parser s (p,t) String a
toEOF p = do { x <- p; eof; return x }
--- Error handling -----------------------------------------------------------
-- | Return an error using the supplied diagnostic string, and a token type
-- which includes position information.
parseerror :: (Show p,Show t) => String -> Parser s (p,t) String a
parseerror err = P (\st inp ->
case inp of
[] -> Left "Parse error: unexpected EOF\n"
(Left e:_) -> Left ("Lexical error: "++e)
(Right (p,t):_) ->
Left ("Parse error: in "++show p++"\n "
++err++"\n "++"Found "++show t)
)
-- | If the parser fails, generate an error message.
elserror :: (Show p,Show t) => Parser s (p,t) String a -> String
-> Parser s (p,t) String a
p `elserror` s = p +++ parseerror s
--- State handling -----------------------------------------------------------
-- | Update the internal state.
stupd :: (s->s) -> Parser s t e ()
stupd f = P (\st inp-> {-let newst = f st in newst `seq`-}
Right [((), f st, inp)])
-- | Query the internal state.
stquery :: (s->a) -> Parser s t e a
stquery f = P (\st inp-> Right [(f st, st, inp)])
-- | Deliver the entire internal state.
stget :: Parser s t e s
stget = P (\st inp-> Right [(st, st, inp)])
--- Push some tokens back onto the input stream and reparse ------------------
-- | This is useful for recursively expanding macros. When the
-- user-parser recognises a macro use, it can lookup the macro
-- expansion from the parse state, lex it, and then stuff the
-- lexed expansion back down into the parser.
reparse :: [Either e t] -> Parser s t e ()
reparse ts = P (\st inp-> Right [((), st, ts++inp)])
------------------------------------------------------------------------------
| Kludgy/polyparse-fork | src/Text/ParserCombinators/HuttonMeijerWallace.hs | lgpl-2.1 | 12,811 | 0 | 19 | 4,566 | 3,123 | 1,664 | 1,459 | 127 | 3 |
{-# LANGUAGE OverloadedStrings, CPP #-}
module Model.ActionKey where
import qualified Data.ByteString.Builder as BS
import qualified Data.ByteString.Char8 as BS
import qualified Database.PostgreSQL.Simple.FromRow as PG
import qualified Database.PostgreSQL.Simple.FromField as PG
import qualified Database.PostgreSQL.Simple.ToField as PG
import qualified Data.Time.Clock as DTC
import Debug.Trace (traceShow)
#ifdef __HASTE__
type Text = String
#else
import Data.Text (Text)
#endif
{-# ANN module ("HLint: ignore Use camelCase" :: String) #-}
type ActionKeyKey = Text
data Action = ConfirmRegistration | ResetPassword deriving (Show, Read)
instance PG.FromField Action where
fromField f bs =
case bs of
Nothing -> PG.returnError PG.UnexpectedNull f ""
Just val -> pure $ read (traceShow val (BS.unpack val))
instance PG.ToField Action where
toField val = PG.Plain $ PG.inQuotes $ BS.stringUtf8 $ show val
data ActionKey = ActionKey
{ ac_id :: Int
, ac_user_id :: Int
, ac_action :: Action
, ac_key :: ActionKeyKey
, ac_created :: DTC.UTCTime
} deriving (Show)
instance PG.FromRow ActionKey where
fromRow = ActionKey <$> PG.field <*> PG.field <*> PG.field <*> PG.field <*> PG.field
| DataStewardshipPortal/ds-wizard | Model/ActionKey.hs | apache-2.0 | 1,229 | 0 | 15 | 210 | 316 | 187 | 129 | 29 | 0 |
module Main where
import Control.Lens
import Control.Monad
import Data.Bits
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Bytes.Get
import Data.List (intercalate)
import System.Ext2
import System.Environment
main :: IO ()
main = do
args <- getArgs
when (length args /= 1) $
error "Usage: ext2checker <path to ext2 filesystem>"
fs <- BL.readFile (head args)
let s = flip runGetL fs $ skip 1024 >> readSuperblock
putStrLn $ "FS Size: " ++ show (fsSize s) ++ " Bytes"
putStrLn $ "Unallocated: " ++ show (unallocated s) ++ " Bytes"
putStrLn $ "FS State: " ++ s ^. state . to show
putStrLn $ "Required feature flags: " ++
(intercalate ", " . map show $ s ^. featureCompat)
putStrLn $ "Optional feature flags: " ++
(intercalate ", " . map show $ s ^. featureIncompat)
putStrLn $ "Read-only feature flags: " ++
(intercalate ", " . map show $ s ^. featureRoCompat)
where
fsSize :: Superblock -> Double
fsSize s =
fromIntegral
((s ^. blocksCount) *
(1024 `shiftL` fromIntegral (s ^. logBlockSize)))
unallocated :: Superblock -> Double
unallocated s =
fromIntegral
((s ^. freeBlocksCount) *
(1024 `shiftL` fromIntegral (s ^. logBlockSize)))
| relrod/ext2 | src/ext2checker.hs | bsd-2-clause | 1,244 | 0 | 14 | 284 | 417 | 212 | 205 | -1 | -1 |
module Graphics.GL.Low.Classes where
import Graphics.GL
-- | OpenGL internal image formats.
class InternalFormat a where
internalFormat :: (Eq b, Num b) => proxy a -> b
-- | The allowed attachment point for images with an internal format.
class InternalFormat a => Attachable a where
attachPoint :: (Eq b, Num b) => proxy a -> b
-- | Textures are GL objects.
class GLObject a => Texture a where
-- | Framebuffers can be bound to the framebuffer binding target. There is
-- a default framebuffer and the client may create an arbitrary number of
-- new framebuffer objects.
class Framebuffer a where
framebufferName :: Num b => a -> b
class GLObject a => BufferObject a where
-- | Mappable to GL enums.
class ToGL a where
toGL :: (Num b, Eq b) => a -> b
instance ToGL Bool where
toGL True = GL_TRUE
toGL False = GL_FALSE
-- | All GL objects have some numeric name.
class GLObject a where
glObjectName :: Num b => a -> b
| sgraf812/lowgl | Graphics/GL/Low/Classes.hs | bsd-2-clause | 944 | 0 | 9 | 197 | 239 | 123 | 116 | -1 | -1 |
-- | Collection of utilities to make @wybor@ customization palatable
--
-- Those are mostly thin wrappers over things in "System.Console.ANSI" from @ansi-terminal@
module Ansi
( reset
, bold
, regular
, underlining
, swap
, unswap
, fgcolor
, bgcolor
, Ansi.Underlining(..)
, Ansi.ColorIntensity(..)
, Ansi.Color(..)
) where
import Data.Text (Text)
import qualified Data.Text as Text
import qualified System.Console.ANSI as Ansi
-- | Sets all attributes off
reset :: Text
reset = sgr Ansi.Reset
-- | Set bold font style
bold :: Text
bold = sgr (Ansi.SetConsoleIntensity Ansi.BoldIntensity)
-- | Set regular font style
regular :: Text
regular = sgr (Ansi.SetConsoleIntensity Ansi.NormalIntensity)
-- | Set underlining style
underlining :: Ansi.Underlining -> Text
underlining = sgr . Ansi.SetUnderlining
-- | Swap foreground and background colors
swap :: Text
swap = sgr (Ansi.SetSwapForegroundBackground True)
-- | Unswap foreground and background colors
unswap :: Text
unswap = sgr (Ansi.SetSwapForegroundBackground False)
-- | Set foreground color
fgcolor :: Ansi.ColorIntensity -> Ansi.Color -> Text
fgcolor i c = sgr (Ansi.SetColor Ansi.Foreground i c)
-- | Set background color
bgcolor :: Ansi.ColorIntensity -> Ansi.Color -> Text
bgcolor i c = sgr (Ansi.SetColor Ansi.Background i c)
sgr :: Ansi.SGR -> Text
sgr = Text.pack . Ansi.setSGRCode . return
| supki/wybor | src/Ansi.hs | bsd-2-clause | 1,402 | 0 | 8 | 247 | 336 | 193 | 143 | 33 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Horbits.UI.Camera.Control (setupMouseControl) where
import Control.Lens
import Control.Monad.Trans.State
import Data.IORef
import Graphics.UI.Gtk
import Linear
import Horbits.Data.Binding
import Horbits.UI.Camera.Internal
-- Ongoing mouse state
data MState = MState [MouseButton] (Double, Double)
-- Camera updates
mousePan :: (Monad m, RealFloat a, Epsilon a) => (Double, Double) -> StateT (OrthoCamera a) m ()
mousePan (dx, dy) = do
w <- use orthoCameraViewportWidth
h <- use orthoCameraViewportHeight
let v = V2 (2 * realToFrac dx / fromIntegral w) (2 * realToFrac dy / fromIntegral h)
modify (addTranslation v)
mouseRotate :: (Monad m, RealFloat a, Epsilon a) => (Double, Double) -> StateT (OrthoCamera a) m ()
mouseRotate (dx, dy) = do
w <- use orthoCameraViewportWidth
h <- use orthoCameraViewportHeight
modify . addColatitude $ pi * realToFrac dy / fromIntegral w
modify . addLongitude $ pi * realToFrac dx / fromIntegral h
mouseScroll :: (Monad m, Num a, Ord a) => ScrollDirection -> StateT (OrthoCamera a) m ()
mouseScroll dir = do
let z = if dir == ScrollUp then zoomIn else zoomOut
modify z
-- Mouse event processing
-- TODO map MState with lens?
onButtonEvent :: (HasUpdate v MState MState)
=> (MouseButton -> [MouseButton] -> [MouseButton]) -> v -> EventM EButton ()
onButtonEvent f st = do
button <- eventButton
coords <- eventCoordinates
st $~ newState coords button -- TODO ??? zoom or sth
where
newState c b (MState bs _) = MState (f b bs) c
onMouseMove :: (HasGetter vs MState, HasSetter vs MState,
HasGetter vc (OrthoCamera a), HasSetter vc (OrthoCamera a),
RealFloat a, Epsilon a) =>
vc -> vs -> EventM t (Double, Double) -> EventM t ()
onMouseMove cam st evCoords = do
(coords @ (cx, cy)) <- evCoords
MState buttons (sx, sy) <- readVar st
st $= MState buttons coords -- TODO MState manipulation is weak, see above, also <<%= (!)
evalStateVar cam $ case buttons of
LeftButton : _ -> mousePan (cx - sx, sy - cy)
RightButton : _ -> mouseRotate (cx - sx, sy - cy)
_ -> return ()
setupMouseControl :: (HasGetter v (OrthoCamera a), HasSetter v (OrthoCamera a),
WidgetClass w, RealFloat a, Epsilon a)
=> w -> v -> IO [ConnectId w]
setupMouseControl w cam = do
st <- newVar (MState [] (0.0, 0.0)) :: IO (IORef MState)
widgetAddEvents w [PointerMotionHintMask, Button1MotionMask, Button3MotionMask]
sequence [
on w motionNotifyEvent $ tryEvent $ do
onMouseMove cam st eventCoordinates
eventRequestMotions,
on w buttonPressEvent $ tryEvent $
onButtonEvent (\b bs -> b : filter (/= b) bs) st,
on w buttonReleaseEvent $ tryEvent $
onButtonEvent (\b bs -> filter (/= b) bs) st,
on w scrollEvent $ tryEvent $ do
d <- eventScrollDirection
evalStateVar cam $ mouseScroll d
]
| chwthewke/horbits | src/horbits/Horbits/UI/Camera/Control.hs | bsd-3-clause | 3,179 | 1 | 15 | 900 | 1,091 | 552 | 539 | 62 | 3 |
module ETA.CodeGen.Utils where
import ETA.Main.DynFlags
import ETA.BasicTypes.Name
import ETA.Types.TyCon
import ETA.BasicTypes.Literal
import Codec.JVM
import Data.Char (ord)
import Control.Arrow(first)
import ETA.CodeGen.Name
import ETA.CodeGen.Rts
import ETA.Debug
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8)
import Data.Monoid
import Data.Foldable
cgLit :: Literal -> (FieldType, Code)
cgLit (MachChar c) = (jint, iconst jint . fromIntegral $ ord c)
cgLit (MachInt i) = (jint, iconst jint $ fromIntegral i)
cgLit (MachWord i) = (jint, iconst jint $ fromIntegral i)
cgLit (MachInt64 i) = (jlong, lconst $ fromIntegral i)
-- TODO: Verify that fromIntegral converts well
cgLit (MachWord64 i) = (jlong, lconst $ fromIntegral i)
cgLit (MachFloat r) = (jfloat, fconst $ fromRational r)
cgLit (MachDouble r) = (jdouble, dconst $ fromRational r)
-- TODO: Remove this literal variant?
cgLit MachNullAddr = (jobject, lconst 0)
cgLit MachNull = (jobject, aconst_null jobject)
cgLit (MachStr s) = (jstring, sconst $ decodeUtf8 s)
-- TODO: Implement MachLabel
cgLit MachLabel {} = error "cgLit: MachLabel"
cgLit other = pprPanic "mkSimpleLit" (ppr other)
litToInt :: Literal -> Int
litToInt (MachInt i) = fromInteger i
litToInt (MachWord i) = fromInteger i
litToInt (MachChar c) = ord c
litToInt _ = error "litToInt: not integer"
intSwitch :: Code -> [(Int, Code)] -> Maybe Code -> Code
intSwitch = gswitch
litSwitch :: FieldType -> Code -> [(Literal, Code)] -> Code -> Code
litSwitch ft expr branches deflt
-- | isObjectFt ft = deflt -- ASSERT (length branches == 0)
-- TODO: When switching on an object, perform a checkcast
-- TODO: When switching on long/float/double, use an if-else tree
| null branches = deflt
| ft `notElem` [jint, jbool, jbyte, jshort, jchar] = error $ "litSwitch[" ++ show ft ++ "]: " ++
"primitive cases not supported for non-integer values"
| otherwise = intSwitch expr intBranches (Just deflt)
where intBranches = map (first litToInt) branches
tagToClosure :: DynFlags -> TyCon -> Code -> (FieldType, Code)
tagToClosure dflags tyCon loadArg = (closureType, enumCode)
where enumCode = invokestatic (mkMethodRef modClass fieldName [] (Just arrayFt))
<> loadArg
<> gaload closureType
tyName = tyConName tyCon
modClass = moduleJavaClass $ nameModule tyName
fieldName = nameTypeTable dflags $ tyConName tyCon
arrayFt = jarray closureType
initCodeTemplate' :: FieldType -> Bool -> Text -> Text -> FieldRef -> Code -> MethodDef
initCodeTemplate' retFt synchronized modClass qClName field code =
mkMethodDef modClass accessFlags qClName [] (Just retFt) $ fold
[ getstatic field
, ifnonnull mempty code
, getstatic field
, greturn retFt ]
where accessFlags = [Public, Static] ++ (if synchronized then [Synchronized] else [])
initCodeTemplate :: Bool -> Text -> Text -> FieldRef -> Code -> MethodDef
initCodeTemplate synchronized modClass qClName field code =
initCodeTemplate' closureType synchronized modClass qClName field code
| pparkkin/eta | compiler/ETA/CodeGen/Utils.hs | bsd-3-clause | 3,217 | 0 | 13 | 695 | 980 | 518 | 462 | 62 | 2 |
{-# LANGUAGE DataKinds, GADTs, TypeFamilies, TypeOperators #-}
module Text.Printf.Safe.Core (type (~>), Formatter, Printf(..),
HList(..), printf, printf') where
import Data.String (IsString (..))
-- | Variadic function types.
type family (~>) as b where
(~>) '[] a = a
(~>) (x ': xs) a = x -> xs ~> a
-- | Formatter type.
type Formatter a = a -> String
-- | Printf Format.
data Printf xs where
EOS :: Printf '[]
(:<>) :: String -> Printf xs -> Printf xs
(:%) :: Formatter x -> Printf xs -> Printf (x ': xs)
instance (xs ~ '[]) => IsString (Printf xs) where
fromString str = str :<> EOS
-- | Hetero list.
data HList ts where
HNil :: HList '[]
(:-) :: a -> HList xs -> HList (a ': xs)
infixr 9 :-, :<>, :%
-- | HList version.
printf' :: Printf ts -> HList ts -> String
printf' ps0 ts0 = go ps0 ts0 ""
where
go :: Printf us -> HList us -> ShowS
go EOS HNil = id
go (str :<> fs) xs = showString str . go fs xs
go (fm :% fs) (x :- ds) = showString (fm x) . go fs ds
go _ _ = error "bug in GHC!"
-- | Variadic version.
printf :: Printf xs -> xs ~> String
printf p = go p ""
where
go :: Printf xs -> String -> xs ~> String
go EOS a = a
go (str :<> xs) a = go xs (a ++ str)
go (fmt :% xs) a = \x -> go xs (a ++ fmt x)
| konn/safe-printf | src/Text/Printf/Safe/Core.hs | bsd-3-clause | 1,341 | 1 | 11 | 396 | 576 | 313 | 263 | -1 | -1 |
module Cakefile where
import Development.Cake3
import Development.Cake3.Ext.UrWeb
import Cakefile_P
main = writeMake (file "Makefile") $ do
prebuild [cmd|urweb -version|]
u <- uwlib (file "Script.urp") $ do
ffi (file "Script.urs")
include (file "Script.h")
src (file "Script.c")
pkgconfig "jansson"
t1 <- uwapp "-dbms sqlite" (file "Test1.urp") $ do
allow url "http://code.jquery.com/ui/1.10.3/jquery-ui.js"
allow mime "text/javascript"
library u
debug
ur (file "Test1.ur")
t2 <- uwapp "-dbms sqlite" (file "Test2.urp") $ do
library u
ur (file "Test2.ur")
rule $ do
phony "all"
depend u
depend t1
depend t2
return ()
| grwlf/cake3 | Example/UrWeb/Cakefile.hs | bsd-3-clause | 698 | 0 | 14 | 166 | 247 | 109 | 138 | -1 | -1 |
-- | The issues API as described on <http://developer.github.com/v3/issues/>.
module Github.Issues (
issue
,issue'
,issuesForRepo
,issuesForRepo'
,IssueLimitation(..)
,module Github.Data
) where
import Github.Data
import Github.Private
import Data.List (intercalate)
import Data.Time.Format (formatTime)
import System.Locale (defaultTimeLocale)
import Data.Time.Clock (UTCTime(..))
-- | A data structure for describing how to filter issues. This is used by
-- @issuesForRepo@.
data IssueLimitation =
AnyMilestone -- ^ Issues appearing in any milestone. [default]
| NoMilestone -- ^ Issues without a milestone.
| MilestoneId Int -- ^ Only issues that are in the milestone with the given id.
| Open -- ^ Only open issues. [default]
| OnlyClosed -- ^ Only closed issues.
| Unassigned -- ^ Issues to which no one has been assigned ownership.
| AnyAssignment -- ^ All issues regardless of assignment. [default]
| AssignedTo String -- ^ Only issues assigned to the user with the given login.
| Mentions String -- ^ Issues which mention the given string, taken to be a user's login.
| Labels [String] -- ^ A list of labels to filter by.
| Ascending -- ^ Sort ascending.
| Descending -- ^ Sort descending. [default]
| Since UTCTime -- ^ Only issues created since the specified date and time.
-- | Details on a specific issue, given the repo owner and name, and the issue
-- number.'
--
-- > issue' (Just ("github-username", "github-password")) "thoughtbot" "paperclip" "462"
issue' :: Maybe GithubAuth -> String -> String -> Int -> IO (Either Error Issue)
issue' auth user repoName issueNumber =
githubGet' auth ["repos", user, repoName, "issues", show issueNumber]
-- | Details on a specific issue, given the repo owner and name, and the issue
-- number.
--
-- > issue "thoughtbot" "paperclip" "462"
issue :: String -> String -> Int -> IO (Either Error Issue)
issue = issue' Nothing
-- | All issues for a repo (given the repo owner and name), with optional
-- restrictions as described in the @IssueLimitation@ data type.
--
-- > issuesForRepo' (Just ("github-username", "github-password")) "thoughtbot" "paperclip" [NoMilestone, OnlyClosed, Mentions "jyurek", Ascending]
issuesForRepo' :: Maybe GithubAuth -> String -> String -> [IssueLimitation] -> IO (Either Error [Issue])
issuesForRepo' auth user repoName issueLimitations =
githubGetWithQueryString'
auth
["repos", user, repoName, "issues"]
(queryStringFromLimitations issueLimitations)
where
queryStringFromLimitations = intercalate "&" . map convert
convert AnyMilestone = "milestone=*"
convert NoMilestone = "milestone=none"
convert (MilestoneId n) = "milestone=" ++ show n
convert Open = "state=open"
convert OnlyClosed = "state=closed"
convert Unassigned = "assignee=none"
convert AnyAssignment = "assignee=*"
convert (AssignedTo u) = "assignee=" ++ u
convert (Mentions u) = "mentioned=" ++ u
convert (Labels l) = "labels=" ++ intercalate "," l
convert Ascending = "direction=asc"
convert Descending = "direction=desc"
convert (Since t) =
"since=" ++ formatTime defaultTimeLocale "%FT%TZ" t
-- | All issues for a repo (given the repo owner and name), with optional
-- restrictions as described in the @IssueLimitation@ data type.
--
-- > issuesForRepo "thoughtbot" "paperclip" [NoMilestone, OnlyClosed, Mentions "jyurek", Ascending]
issuesForRepo :: String -> String -> [IssueLimitation] -> IO (Either Error [Issue])
issuesForRepo = issuesForRepo' Nothing
| erochest/github | Github/Issues.hs | bsd-3-clause | 3,623 | 0 | 12 | 709 | 593 | 334 | 259 | 55 | 13 |
module Wigner.Complex (
Complex((:+)),
ComplexValued(conjugate),
ComplexNum(fromComplexRational)) where
import Data.Ratio
import Wigner.Texable
data Complex a = a :+ a deriving (Show, Eq)
class ComplexValued a where
conjugate :: a -> a
instance (Num a) => ComplexValued (Complex a) where
conjugate (x :+ y) = x :+ (-y)
instance (Num a) => Num (Complex a) where
negate (x :+ y) = negate x :+ negate y
(x1 :+ y1) + (x2 :+ y2) = (x1 + x2) :+ (y1 + y2)
(x1 :+ y1) * (x2 :+ y2) = (x1 * x2 - y1 * y2) :+ (x1 * y2 + y1 * x2)
abs x = undefined
signum x = undefined
fromInteger x = fromInteger x :+ 0
instance (Fractional a) => Fractional (Complex a) where
(x1 :+ y1) / (x2 :+ y2) = ((x1 * x2 + y1 * y2) / m) :+ ((x1 * y2 - y1 * x2) / m) where
m = x2 * x2 + y2 * y2
fromRational x = fromRational x :+ fromRational 0
class ComplexNum a where
fromComplexRational :: Complex Rational -> a
instance (Texable a, Ord a, Num a) => Texable (Complex a) where
showTex (x :+ y)
| y == 0 = sx
| x == 0 && y == 1 = "i"
| x == 0 && y == -1 = "-i"
| x == 0 = sy ++ "i"
| otherwise = "(" ++ showTex (x :+ 0) ++ sign ++ showTex (0 :+ y) ++ ")"
where
sx = showTex x
sy = showTex y
sign = if y < 0 then "" else "+"
| fjarri/wigner | src/Wigner/Complex.hs | bsd-3-clause | 1,348 | 0 | 13 | 428 | 665 | 347 | 318 | 40 | 0 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Desugaring arrow commands
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-incomplete-record-updates #-}
module DsArrows ( dsProcExpr ) where
#include "HsVersions.h"
import GhcPrelude
import Match
import DsUtils
import DsMonad
import GHC.Hs hiding (collectPatBinders, collectPatsBinders,
collectLStmtsBinders, collectLStmtBinders,
collectStmtBinders )
import TcHsSyn
import qualified GHC.Hs.Utils as HsUtils
-- NB: The desugarer, which straddles the source and Core worlds, sometimes
-- needs to see source types (newtypes etc), and sometimes not
-- So WATCH OUT; check each use of split*Ty functions.
-- Sigh. This is a pain.
import {-# SOURCE #-} DsExpr ( dsExpr, dsLExpr, dsLExprNoLP, dsLocalBinds,
dsSyntaxExpr )
import TcType
import Type ( splitPiTy )
import TcEvidence
import CoreSyn
import CoreFVs
import CoreUtils
import MkCore
import DsBinds (dsHsWrapper)
import Name
import Id
import ConLike
import TysWiredIn
import BasicTypes
import PrelNames
import Outputable
import VarSet
import SrcLoc
import ListSetOps( assocMaybe )
import Data.List
import Util
import UniqDSet
data DsCmdEnv = DsCmdEnv {
arr_id, compose_id, first_id, app_id, choice_id, loop_id :: CoreExpr
}
mkCmdEnv :: CmdSyntaxTable GhcTc -> DsM ([CoreBind], DsCmdEnv)
-- See Note [CmdSyntaxTable] in GHC.Hs.Expr
mkCmdEnv tc_meths
= do { (meth_binds, prs) <- mapAndUnzipM mk_bind tc_meths
-- NB: Some of these lookups might fail, but that's OK if the
-- symbol is never used. That's why we use Maybe first and then
-- panic. An eager panic caused trouble in typecheck/should_compile/tc192
; let the_arr_id = assocMaybe prs arrAName
the_compose_id = assocMaybe prs composeAName
the_first_id = assocMaybe prs firstAName
the_app_id = assocMaybe prs appAName
the_choice_id = assocMaybe prs choiceAName
the_loop_id = assocMaybe prs loopAName
-- used as an argument in, e.g., do_premap
; check_lev_poly 3 the_arr_id
-- used as an argument in, e.g., dsCmdStmt/BodyStmt
; check_lev_poly 5 the_compose_id
-- used as an argument in, e.g., dsCmdStmt/BodyStmt
; check_lev_poly 4 the_first_id
-- the result of the_app_id is used as an argument in, e.g.,
-- dsCmd/HsCmdArrApp/HsHigherOrderApp
; check_lev_poly 2 the_app_id
-- used as an argument in, e.g., HsCmdIf
; check_lev_poly 5 the_choice_id
-- used as an argument in, e.g., RecStmt
; check_lev_poly 4 the_loop_id
; return (meth_binds, DsCmdEnv {
arr_id = Var (unmaybe the_arr_id arrAName),
compose_id = Var (unmaybe the_compose_id composeAName),
first_id = Var (unmaybe the_first_id firstAName),
app_id = Var (unmaybe the_app_id appAName),
choice_id = Var (unmaybe the_choice_id choiceAName),
loop_id = Var (unmaybe the_loop_id loopAName)
}) }
where
mk_bind (std_name, expr)
= do { rhs <- dsExpr expr
; id <- newSysLocalDs (exprType rhs)
-- no check needed; these are functions
; return (NonRec id rhs, (std_name, id)) }
unmaybe Nothing name = pprPanic "mkCmdEnv" (text "Not found:" <+> ppr name)
unmaybe (Just id) _ = id
-- returns the result type of a pi-type (that is, a forall or a function)
-- Note that this result type may be ill-scoped.
res_type :: Type -> Type
res_type ty = res_ty
where
(_, res_ty) = splitPiTy ty
check_lev_poly :: Int -- arity
-> Maybe Id -> DsM ()
check_lev_poly _ Nothing = return ()
check_lev_poly arity (Just id)
= dsNoLevPoly (nTimes arity res_type (idType id))
(text "In the result of the function" <+> quotes (ppr id))
-- arr :: forall b c. (b -> c) -> a b c
do_arr :: DsCmdEnv -> Type -> Type -> CoreExpr -> CoreExpr
do_arr ids b_ty c_ty f = mkApps (arr_id ids) [Type b_ty, Type c_ty, f]
-- (>>>) :: forall b c d. a b c -> a c d -> a b d
do_compose :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_compose ids b_ty c_ty d_ty f g
= mkApps (compose_id ids) [Type b_ty, Type c_ty, Type d_ty, f, g]
-- first :: forall b c d. a b c -> a (b,d) (c,d)
do_first :: DsCmdEnv -> Type -> Type -> Type -> CoreExpr -> CoreExpr
do_first ids b_ty c_ty d_ty f
= mkApps (first_id ids) [Type b_ty, Type c_ty, Type d_ty, f]
-- app :: forall b c. a (a b c, b) c
do_app :: DsCmdEnv -> Type -> Type -> CoreExpr
do_app ids b_ty c_ty = mkApps (app_id ids) [Type b_ty, Type c_ty]
-- (|||) :: forall b d c. a b d -> a c d -> a (Either b c) d
-- note the swapping of d and c
do_choice :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_choice ids b_ty c_ty d_ty f g
= mkApps (choice_id ids) [Type b_ty, Type d_ty, Type c_ty, f, g]
-- loop :: forall b d c. a (b,d) (c,d) -> a b c
-- note the swapping of d and c
do_loop :: DsCmdEnv -> Type -> Type -> Type -> CoreExpr -> CoreExpr
do_loop ids b_ty c_ty d_ty f
= mkApps (loop_id ids) [Type b_ty, Type d_ty, Type c_ty, f]
-- premap :: forall b c d. (b -> c) -> a c d -> a b d
-- premap f g = arr f >>> g
do_premap :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_premap ids b_ty c_ty d_ty f g
= do_compose ids b_ty c_ty d_ty (do_arr ids b_ty c_ty f) g
mkFailExpr :: HsMatchContext Id -> Type -> DsM CoreExpr
mkFailExpr ctxt ty
= mkErrorAppDs pAT_ERROR_ID ty (matchContextErrString ctxt)
-- construct CoreExpr for \ (a :: a_ty, b :: b_ty) -> a
mkFstExpr :: Type -> Type -> DsM CoreExpr
mkFstExpr a_ty b_ty = do
a_var <- newSysLocalDs a_ty
b_var <- newSysLocalDs b_ty
pair_var <- newSysLocalDs (mkCorePairTy a_ty b_ty)
return (Lam pair_var
(coreCasePair pair_var a_var b_var (Var a_var)))
-- construct CoreExpr for \ (a :: a_ty, b :: b_ty) -> b
mkSndExpr :: Type -> Type -> DsM CoreExpr
mkSndExpr a_ty b_ty = do
a_var <- newSysLocalDs a_ty
b_var <- newSysLocalDs b_ty
pair_var <- newSysLocalDs (mkCorePairTy a_ty b_ty)
return (Lam pair_var
(coreCasePair pair_var a_var b_var (Var b_var)))
{-
Build case analysis of a tuple. This cannot be done in the DsM monad,
because the list of variables is typically not yet defined.
-}
-- coreCaseTuple [u1..] v [x1..xn] body
-- = case v of v { (x1, .., xn) -> body }
-- But the matching may be nested if the tuple is very big
coreCaseTuple :: UniqSupply -> Id -> [Id] -> CoreExpr -> CoreExpr
coreCaseTuple uniqs scrut_var vars body
= mkTupleCase uniqs vars body scrut_var (Var scrut_var)
coreCasePair :: Id -> Id -> Id -> CoreExpr -> CoreExpr
coreCasePair scrut_var var1 var2 body
= Case (Var scrut_var) scrut_var (exprType body)
[(DataAlt (tupleDataCon Boxed 2), [var1, var2], body)]
mkCorePairTy :: Type -> Type -> Type
mkCorePairTy t1 t2 = mkBoxedTupleTy [t1, t2]
mkCorePairExpr :: CoreExpr -> CoreExpr -> CoreExpr
mkCorePairExpr e1 e2 = mkCoreTup [e1, e2]
mkCoreUnitExpr :: CoreExpr
mkCoreUnitExpr = mkCoreTup []
{-
The input is divided into a local environment, which is a flat tuple
(unless it's too big), and a stack, which is a right-nested pair.
In general, the input has the form
((x1,...,xn), (s1,...(sk,())...))
where xi are the environment values, and si the ones on the stack,
with s1 being the "top", the first one to be matched with a lambda.
-}
envStackType :: [Id] -> Type -> Type
envStackType ids stack_ty = mkCorePairTy (mkBigCoreVarTupTy ids) stack_ty
-- splitTypeAt n (t1,... (tn,t)...) = ([t1, ..., tn], t)
splitTypeAt :: Int -> Type -> ([Type], Type)
splitTypeAt n ty
| n == 0 = ([], ty)
| otherwise = case tcTyConAppArgs ty of
[t, ty'] -> let (ts, ty_r) = splitTypeAt (n-1) ty' in (t:ts, ty_r)
_ -> pprPanic "splitTypeAt" (ppr ty)
----------------------------------------------
-- buildEnvStack
--
-- ((x1,...,xn),stk)
buildEnvStack :: [Id] -> Id -> CoreExpr
buildEnvStack env_ids stack_id
= mkCorePairExpr (mkBigCoreVarTup env_ids) (Var stack_id)
----------------------------------------------
-- matchEnvStack
--
-- \ ((x1,...,xn),stk) -> body
-- =>
-- \ pair ->
-- case pair of (tup,stk) ->
-- case tup of (x1,...,xn) ->
-- body
matchEnvStack :: [Id] -- x1..xn
-> Id -- stk
-> CoreExpr -- e
-> DsM CoreExpr
matchEnvStack env_ids stack_id body = do
uniqs <- newUniqueSupply
tup_var <- newSysLocalDs (mkBigCoreVarTupTy env_ids)
let match_env = coreCaseTuple uniqs tup_var env_ids body
pair_id <- newSysLocalDs (mkCorePairTy (idType tup_var) (idType stack_id))
return (Lam pair_id (coreCasePair pair_id tup_var stack_id match_env))
----------------------------------------------
-- matchEnv
--
-- \ (x1,...,xn) -> body
-- =>
-- \ tup ->
-- case tup of (x1,...,xn) ->
-- body
matchEnv :: [Id] -- x1..xn
-> CoreExpr -- e
-> DsM CoreExpr
matchEnv env_ids body = do
uniqs <- newUniqueSupply
tup_id <- newSysLocalDs (mkBigCoreVarTupTy env_ids)
return (Lam tup_id (coreCaseTuple uniqs tup_id env_ids body))
----------------------------------------------
-- matchVarStack
--
-- case (x1, ...(xn, s)...) -> e
-- =>
-- case z0 of (x1,z1) ->
-- case zn-1 of (xn,s) ->
-- e
matchVarStack :: [Id] -> Id -> CoreExpr -> DsM (Id, CoreExpr)
matchVarStack [] stack_id body = return (stack_id, body)
matchVarStack (param_id:param_ids) stack_id body = do
(tail_id, tail_code) <- matchVarStack param_ids stack_id body
pair_id <- newSysLocalDs (mkCorePairTy (idType param_id) (idType tail_id))
return (pair_id, coreCasePair pair_id param_id tail_id tail_code)
mkHsEnvStackExpr :: [Id] -> Id -> LHsExpr GhcTc
mkHsEnvStackExpr env_ids stack_id
= mkLHsTupleExpr [mkLHsVarTuple env_ids, nlHsVar stack_id]
-- Translation of arrow abstraction
-- D; xs |-a c : () --> t' ---> c'
-- --------------------------
-- D |- proc p -> c :: a t t' ---> premap (\ p -> ((xs),())) c'
--
-- where (xs) is the tuple of variables bound by p
dsProcExpr
:: LPat GhcTc
-> LHsCmdTop GhcTc
-> DsM CoreExpr
dsProcExpr pat (L _ (HsCmdTop (CmdTopTc _unitTy cmd_ty ids) cmd)) = do
(meth_binds, meth_ids) <- mkCmdEnv ids
let locals = mkVarSet (collectPatBinders pat)
(core_cmd, _free_vars, env_ids)
<- dsfixCmd meth_ids locals unitTy cmd_ty cmd
let env_ty = mkBigCoreVarTupTy env_ids
let env_stk_ty = mkCorePairTy env_ty unitTy
let env_stk_expr = mkCorePairExpr (mkBigCoreVarTup env_ids) mkCoreUnitExpr
fail_expr <- mkFailExpr ProcExpr env_stk_ty
var <- selectSimpleMatchVarL pat
match_code <- matchSimply (Var var) ProcExpr pat env_stk_expr fail_expr
let pat_ty = hsLPatType pat
let proc_code = do_premap meth_ids pat_ty env_stk_ty cmd_ty
(Lam var match_code)
core_cmd
return (mkLets meth_binds proc_code)
dsProcExpr _ _ = panic "dsProcExpr"
{-
Translation of a command judgement of the form
D; xs |-a c : stk --> t
to an expression e such that
D |- e :: a (xs, stk) t
-}
dsLCmd :: DsCmdEnv -> IdSet -> Type -> Type -> LHsCmd GhcTc -> [Id]
-> DsM (CoreExpr, DIdSet)
dsLCmd ids local_vars stk_ty res_ty cmd env_ids
= dsCmd ids local_vars stk_ty res_ty (unLoc cmd) env_ids
dsCmd :: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this command
-> Type -- type of the stack (right-nested tuple)
-> Type -- return type of the command
-> HsCmd GhcTc -- command to desugar
-> [Id] -- list of vars in the input to this command
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
-- D |- fun :: a t1 t2
-- D, xs |- arg :: t1
-- -----------------------------
-- D; xs |-a fun -< arg : stk --> t2
--
-- ---> premap (\ ((xs), _stk) -> arg) fun
dsCmd ids local_vars stack_ty res_ty
(HsCmdArrApp arrow_ty arrow arg HsFirstOrderApp _)
env_ids = do
let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
core_arrow <- dsLExprNoLP arrow
core_arg <- dsLExpr arg
stack_id <- newSysLocalDs stack_ty
core_make_arg <- matchEnvStack env_ids stack_id core_arg
return (do_premap ids
(envStackType env_ids stack_ty)
arg_ty
res_ty
core_make_arg
core_arrow,
exprFreeIdsDSet core_arg `uniqDSetIntersectUniqSet` local_vars)
-- D, xs |- fun :: a t1 t2
-- D, xs |- arg :: t1
-- ------------------------------
-- D; xs |-a fun -<< arg : stk --> t2
--
-- ---> premap (\ ((xs), _stk) -> (fun, arg)) app
dsCmd ids local_vars stack_ty res_ty
(HsCmdArrApp arrow_ty arrow arg HsHigherOrderApp _)
env_ids = do
let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
core_arrow <- dsLExpr arrow
core_arg <- dsLExpr arg
stack_id <- newSysLocalDs stack_ty
core_make_pair <- matchEnvStack env_ids stack_id
(mkCorePairExpr core_arrow core_arg)
return (do_premap ids
(envStackType env_ids stack_ty)
(mkCorePairTy arrow_ty arg_ty)
res_ty
core_make_pair
(do_app ids arg_ty res_ty),
(exprsFreeIdsDSet [core_arrow, core_arg])
`uniqDSetIntersectUniqSet` local_vars)
-- D; ys |-a cmd : (t,stk) --> t'
-- D, xs |- exp :: t
-- ------------------------
-- D; xs |-a cmd exp : stk --> t'
--
-- ---> premap (\ ((xs),stk) -> ((ys),(e,stk))) cmd
dsCmd ids local_vars stack_ty res_ty (HsCmdApp _ cmd arg) env_ids = do
core_arg <- dsLExpr arg
let
arg_ty = exprType core_arg
stack_ty' = mkCorePairTy arg_ty stack_ty
(core_cmd, free_vars, env_ids')
<- dsfixCmd ids local_vars stack_ty' res_ty cmd
stack_id <- newSysLocalDs stack_ty
arg_id <- newSysLocalDsNoLP arg_ty
-- push the argument expression onto the stack
let
stack' = mkCorePairExpr (Var arg_id) (Var stack_id)
core_body = bindNonRec arg_id core_arg
(mkCorePairExpr (mkBigCoreVarTup env_ids') stack')
-- match the environment and stack against the input
core_map <- matchEnvStack env_ids stack_id core_body
return (do_premap ids
(envStackType env_ids stack_ty)
(envStackType env_ids' stack_ty')
res_ty
core_map
core_cmd,
free_vars `unionDVarSet`
(exprFreeIdsDSet core_arg `uniqDSetIntersectUniqSet` local_vars))
-- D; ys |-a cmd : stk t'
-- -----------------------------------------------
-- D; xs |-a \ p1 ... pk -> cmd : (t1,...(tk,stk)...) t'
--
-- ---> premap (\ ((xs), (p1, ... (pk,stk)...)) -> ((ys),stk)) cmd
dsCmd ids local_vars stack_ty res_ty
(HsCmdLam _ (MG { mg_alts
= (L _ [L _ (Match { m_pats = pats
, m_grhss = GRHSs _ [L _ (GRHS _ [] body)] _ })]) }))
env_ids = do
let pat_vars = mkVarSet (collectPatsBinders pats)
let
local_vars' = pat_vars `unionVarSet` local_vars
(pat_tys, stack_ty') = splitTypeAt (length pats) stack_ty
(core_body, free_vars, env_ids')
<- dsfixCmd ids local_vars' stack_ty' res_ty body
param_ids <- mapM newSysLocalDsNoLP pat_tys
stack_id' <- newSysLocalDs stack_ty'
-- the expression is built from the inside out, so the actions
-- are presented in reverse order
let
-- build a new environment, plus what's left of the stack
core_expr = buildEnvStack env_ids' stack_id'
in_ty = envStackType env_ids stack_ty
in_ty' = envStackType env_ids' stack_ty'
fail_expr <- mkFailExpr LambdaExpr in_ty'
-- match the patterns against the parameters
match_code <- matchSimplys (map Var param_ids) LambdaExpr pats core_expr
fail_expr
-- match the parameters against the top of the old stack
(stack_id, param_code) <- matchVarStack param_ids stack_id' match_code
-- match the old environment and stack against the input
select_code <- matchEnvStack env_ids stack_id param_code
return (do_premap ids in_ty in_ty' res_ty select_code core_body,
free_vars `uniqDSetMinusUniqSet` pat_vars)
dsCmd ids local_vars stack_ty res_ty (HsCmdPar _ cmd) env_ids
= dsLCmd ids local_vars stack_ty res_ty cmd env_ids
-- D, xs |- e :: Bool
-- D; xs1 |-a c1 : stk --> t
-- D; xs2 |-a c2 : stk --> t
-- ----------------------------------------
-- D; xs |-a if e then c1 else c2 : stk --> t
--
-- ---> premap (\ ((xs),stk) ->
-- if e then Left ((xs1),stk) else Right ((xs2),stk))
-- (c1 ||| c2)
dsCmd ids local_vars stack_ty res_ty (HsCmdIf _ mb_fun cond then_cmd else_cmd)
env_ids = do
core_cond <- dsLExpr cond
(core_then, fvs_then, then_ids)
<- dsfixCmd ids local_vars stack_ty res_ty then_cmd
(core_else, fvs_else, else_ids)
<- dsfixCmd ids local_vars stack_ty res_ty else_cmd
stack_id <- newSysLocalDs stack_ty
either_con <- dsLookupTyCon eitherTyConName
left_con <- dsLookupDataCon leftDataConName
right_con <- dsLookupDataCon rightDataConName
let mk_left_expr ty1 ty2 e = mkCoreConApps left_con [Type ty1,Type ty2, e]
mk_right_expr ty1 ty2 e = mkCoreConApps right_con [Type ty1,Type ty2, e]
in_ty = envStackType env_ids stack_ty
then_ty = envStackType then_ids stack_ty
else_ty = envStackType else_ids stack_ty
sum_ty = mkTyConApp either_con [then_ty, else_ty]
fvs_cond = exprFreeIdsDSet core_cond
`uniqDSetIntersectUniqSet` local_vars
core_left = mk_left_expr then_ty else_ty
(buildEnvStack then_ids stack_id)
core_right = mk_right_expr then_ty else_ty
(buildEnvStack else_ids stack_id)
core_if <- case mb_fun of
Just fun -> do { fun_apps <- dsSyntaxExpr fun
[core_cond, core_left, core_right]
; matchEnvStack env_ids stack_id fun_apps }
Nothing -> matchEnvStack env_ids stack_id $
mkIfThenElse core_cond core_left core_right
return (do_premap ids in_ty sum_ty res_ty
core_if
(do_choice ids then_ty else_ty res_ty core_then core_else),
fvs_cond `unionDVarSet` fvs_then `unionDVarSet` fvs_else)
{-
Case commands are treated in much the same way as if commands
(see above) except that there are more alternatives. For example
case e of { p1 -> c1; p2 -> c2; p3 -> c3 }
is translated to
premap (\ ((xs)*ts) -> case e of
p1 -> (Left (Left (xs1)*ts))
p2 -> Left ((Right (xs2)*ts))
p3 -> Right ((xs3)*ts))
((c1 ||| c2) ||| c3)
The idea is to extract the commands from the case, build a balanced tree
of choices, and replace the commands with expressions that build tagged
tuples, obtaining a case expression that can be desugared normally.
To build all this, we use triples describing segments of the list of
case bodies, containing the following fields:
* a list of expressions of the form (Left|Right)* ((xs)*ts), to be put
into the case replacing the commands
* a sum type that is the common type of these expressions, and also the
input type of the arrow
* a CoreExpr for an arrow built by combining the translated command
bodies with |||.
-}
dsCmd ids local_vars stack_ty res_ty
(HsCmdCase _ exp (MG { mg_alts = L l matches
, mg_ext = MatchGroupTc arg_tys _
, mg_origin = origin }))
env_ids = do
stack_id <- newSysLocalDs stack_ty
-- Extract and desugar the leaf commands in the case, building tuple
-- expressions that will (after tagging) replace these leaves
let
leaves = concatMap leavesMatch matches
make_branch (leaf, bound_vars) = do
(core_leaf, _fvs, leaf_ids)
<- dsfixCmd ids (bound_vars `unionVarSet` local_vars) stack_ty
res_ty leaf
return ([mkHsEnvStackExpr leaf_ids stack_id],
envStackType leaf_ids stack_ty,
core_leaf)
branches <- mapM make_branch leaves
either_con <- dsLookupTyCon eitherTyConName
left_con <- dsLookupDataCon leftDataConName
right_con <- dsLookupDataCon rightDataConName
let
left_id = HsConLikeOut noExtField (RealDataCon left_con)
right_id = HsConLikeOut noExtField (RealDataCon right_con)
left_expr ty1 ty2 e = noLoc $ HsApp noExtField
(noLoc $ mkHsWrap (mkWpTyApps [ty1, ty2]) left_id ) e
right_expr ty1 ty2 e = noLoc $ HsApp noExtField
(noLoc $ mkHsWrap (mkWpTyApps [ty1, ty2]) right_id) e
-- Prefix each tuple with a distinct series of Left's and Right's,
-- in a balanced way, keeping track of the types.
merge_branches (builds1, in_ty1, core_exp1)
(builds2, in_ty2, core_exp2)
= (map (left_expr in_ty1 in_ty2) builds1 ++
map (right_expr in_ty1 in_ty2) builds2,
mkTyConApp either_con [in_ty1, in_ty2],
do_choice ids in_ty1 in_ty2 res_ty core_exp1 core_exp2)
(leaves', sum_ty, core_choices) = foldb merge_branches branches
-- Replace the commands in the case with these tagged tuples,
-- yielding a HsExpr Id we can feed to dsExpr.
(_, matches') = mapAccumL (replaceLeavesMatch res_ty) leaves' matches
in_ty = envStackType env_ids stack_ty
core_body <- dsExpr (HsCase noExtField exp
(MG { mg_alts = L l matches'
, mg_ext = MatchGroupTc arg_tys sum_ty
, mg_origin = origin }))
-- Note that we replace the HsCase result type by sum_ty,
-- which is the type of matches'
core_matches <- matchEnvStack env_ids stack_id core_body
return (do_premap ids in_ty sum_ty res_ty core_matches core_choices,
exprFreeIdsDSet core_body `uniqDSetIntersectUniqSet` local_vars)
-- D; ys |-a cmd : stk --> t
-- ----------------------------------
-- D; xs |-a let binds in cmd : stk --> t
--
-- ---> premap (\ ((xs),stk) -> let binds in ((ys),stk)) c
dsCmd ids local_vars stack_ty res_ty (HsCmdLet _ lbinds@(L _ binds) body)
env_ids = do
let
defined_vars = mkVarSet (collectLocalBinders binds)
local_vars' = defined_vars `unionVarSet` local_vars
(core_body, _free_vars, env_ids')
<- dsfixCmd ids local_vars' stack_ty res_ty body
stack_id <- newSysLocalDs stack_ty
-- build a new environment, plus the stack, using the let bindings
core_binds <- dsLocalBinds lbinds (buildEnvStack env_ids' stack_id)
-- match the old environment and stack against the input
core_map <- matchEnvStack env_ids stack_id core_binds
return (do_premap ids
(envStackType env_ids stack_ty)
(envStackType env_ids' stack_ty)
res_ty
core_map
core_body,
exprFreeIdsDSet core_binds `uniqDSetIntersectUniqSet` local_vars)
-- D; xs |-a ss : t
-- ----------------------------------
-- D; xs |-a do { ss } : () --> t
--
-- ---> premap (\ (env,stk) -> env) c
dsCmd ids local_vars stack_ty res_ty do_block@(HsCmdDo stmts_ty
(L loc stmts))
env_ids = do
putSrcSpanDs loc $
dsNoLevPoly stmts_ty
(text "In the do-command:" <+> ppr do_block)
(core_stmts, env_ids') <- dsCmdDo ids local_vars res_ty stmts env_ids
let env_ty = mkBigCoreVarTupTy env_ids
core_fst <- mkFstExpr env_ty stack_ty
return (do_premap ids
(mkCorePairTy env_ty stack_ty)
env_ty
res_ty
core_fst
core_stmts,
env_ids')
-- D |- e :: forall e. a1 (e,stk1) t1 -> ... an (e,stkn) tn -> a (e,stk) t
-- D; xs |-a ci :: stki --> ti
-- -----------------------------------
-- D; xs |-a (|e c1 ... cn|) :: stk --> t ---> e [t_xs] c1 ... cn
dsCmd _ local_vars _stack_ty _res_ty (HsCmdArrForm _ op _ _ args) env_ids = do
let env_ty = mkBigCoreVarTupTy env_ids
core_op <- dsLExpr op
(core_args, fv_sets) <- mapAndUnzipM (dsTrimCmdArg local_vars env_ids) args
return (mkApps (App core_op (Type env_ty)) core_args,
unionDVarSets fv_sets)
dsCmd ids local_vars stack_ty res_ty (HsCmdWrap _ wrap cmd) env_ids = do
(core_cmd, env_ids') <- dsCmd ids local_vars stack_ty res_ty cmd env_ids
core_wrap <- dsHsWrapper wrap
return (core_wrap core_cmd, env_ids')
dsCmd _ _ _ _ _ c = pprPanic "dsCmd" (ppr c)
-- D; ys |-a c : stk --> t (ys <= xs)
-- ---------------------
-- D; xs |-a c : stk --> t ---> premap (\ ((xs),stk) -> ((ys),stk)) c
dsTrimCmdArg
:: IdSet -- set of local vars available to this command
-> [Id] -- list of vars in the input to this command
-> LHsCmdTop GhcTc -- command argument to desugar
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
dsTrimCmdArg local_vars env_ids
(L _ (HsCmdTop
(CmdTopTc stack_ty cmd_ty ids) cmd )) = do
(meth_binds, meth_ids) <- mkCmdEnv ids
(core_cmd, free_vars, env_ids')
<- dsfixCmd meth_ids local_vars stack_ty cmd_ty cmd
stack_id <- newSysLocalDs stack_ty
trim_code
<- matchEnvStack env_ids stack_id (buildEnvStack env_ids' stack_id)
let
in_ty = envStackType env_ids stack_ty
in_ty' = envStackType env_ids' stack_ty
arg_code = if env_ids' == env_ids then core_cmd else
do_premap meth_ids in_ty in_ty' cmd_ty trim_code core_cmd
return (mkLets meth_binds arg_code, free_vars)
dsTrimCmdArg _ _ _ = panic "dsTrimCmdArg"
-- Given D; xs |-a c : stk --> t, builds c with xs fed back.
-- Typically needs to be prefixed with arr (\(p, stk) -> ((xs),stk))
dsfixCmd
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this command
-> Type -- type of the stack (right-nested tuple)
-> Type -- return type of the command
-> LHsCmd GhcTc -- command to desugar
-> DsM (CoreExpr, -- desugared expression
DIdSet, -- subset of local vars that occur free
[Id]) -- the same local vars as a list, fed back
dsfixCmd ids local_vars stk_ty cmd_ty cmd
= do { putSrcSpanDs (getLoc cmd) $ dsNoLevPoly cmd_ty
(text "When desugaring the command:" <+> ppr cmd)
; trimInput (dsLCmd ids local_vars stk_ty cmd_ty cmd) }
-- Feed back the list of local variables actually used a command,
-- for use as the input tuple of the generated arrow.
trimInput
:: ([Id] -> DsM (CoreExpr, DIdSet))
-> DsM (CoreExpr, -- desugared expression
DIdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list, fed back to
-- the inner function to form the tuple of
-- inputs to the arrow.
trimInput build_arrow
= fixDs (\ ~(_,_,env_ids) -> do
(core_cmd, free_vars) <- build_arrow env_ids
return (core_cmd, free_vars, dVarSetElems free_vars))
{-
Translation of command judgements of the form
D |-a do { ss } : t
-}
dsCmdDo :: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> Type -- return type of the statement
-> [CmdLStmt GhcTc] -- statements to desugar
-> [Id] -- list of vars in the input to this statement
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
dsCmdDo _ _ _ [] _ = panic "dsCmdDo"
-- D; xs |-a c : () --> t
-- --------------------------
-- D; xs |-a do { c } : t
--
-- ---> premap (\ (xs) -> ((xs), ())) c
dsCmdDo ids local_vars res_ty [L loc (LastStmt _ body _ _)] env_ids = do
putSrcSpanDs loc $ dsNoLevPoly res_ty
(text "In the command:" <+> ppr body)
(core_body, env_ids') <- dsLCmd ids local_vars unitTy res_ty body env_ids
let env_ty = mkBigCoreVarTupTy env_ids
env_var <- newSysLocalDs env_ty
let core_map = Lam env_var (mkCorePairExpr (Var env_var) mkCoreUnitExpr)
return (do_premap ids
env_ty
(mkCorePairTy env_ty unitTy)
res_ty
core_map
core_body,
env_ids')
dsCmdDo ids local_vars res_ty (stmt:stmts) env_ids = do
let bound_vars = mkVarSet (collectLStmtBinders stmt)
let local_vars' = bound_vars `unionVarSet` local_vars
(core_stmts, _, env_ids') <- trimInput (dsCmdDo ids local_vars' res_ty stmts)
(core_stmt, fv_stmt) <- dsCmdLStmt ids local_vars env_ids' stmt env_ids
return (do_compose ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy env_ids')
res_ty
core_stmt
core_stmts,
fv_stmt)
{-
A statement maps one local environment to another, and is represented
as an arrow from one tuple type to another. A statement sequence is
translated to a composition of such arrows.
-}
dsCmdLStmt :: DsCmdEnv -> IdSet -> [Id] -> CmdLStmt GhcTc -> [Id]
-> DsM (CoreExpr, DIdSet)
dsCmdLStmt ids local_vars out_ids cmd env_ids
= dsCmdStmt ids local_vars out_ids (unLoc cmd) env_ids
dsCmdStmt
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- list of vars in the output of this statement
-> CmdStmt GhcTc -- statement to desugar
-> [Id] -- list of vars in the input to this statement
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
-- D; xs1 |-a c : () --> t
-- D; xs' |-a do { ss } : t'
-- ------------------------------
-- D; xs |-a do { c; ss } : t'
--
-- ---> premap (\ ((xs)) -> (((xs1),()),(xs')))
-- (first c >>> arr snd) >>> ss
dsCmdStmt ids local_vars out_ids (BodyStmt c_ty cmd _ _) env_ids = do
(core_cmd, fv_cmd, env_ids1) <- dsfixCmd ids local_vars unitTy c_ty cmd
core_mux <- matchEnv env_ids
(mkCorePairExpr
(mkCorePairExpr (mkBigCoreVarTup env_ids1) mkCoreUnitExpr)
(mkBigCoreVarTup out_ids))
let
in_ty = mkBigCoreVarTupTy env_ids
in_ty1 = mkCorePairTy (mkBigCoreVarTupTy env_ids1) unitTy
out_ty = mkBigCoreVarTupTy out_ids
before_c_ty = mkCorePairTy in_ty1 out_ty
after_c_ty = mkCorePairTy c_ty out_ty
dsNoLevPoly c_ty empty -- I (Richard E, Dec '16) have no idea what to say here
snd_fn <- mkSndExpr c_ty out_ty
return (do_premap ids in_ty before_c_ty out_ty core_mux $
do_compose ids before_c_ty after_c_ty out_ty
(do_first ids in_ty1 c_ty out_ty core_cmd) $
do_arr ids after_c_ty out_ty snd_fn,
extendDVarSetList fv_cmd out_ids)
-- D; xs1 |-a c : () --> t
-- D; xs' |-a do { ss } : t' xs2 = xs' - defs(p)
-- -----------------------------------
-- D; xs |-a do { p <- c; ss } : t'
--
-- ---> premap (\ (xs) -> (((xs1),()),(xs2)))
-- (first c >>> arr (\ (p, (xs2)) -> (xs'))) >>> ss
--
-- It would be simpler and more consistent to do this using second,
-- but that's likely to be defined in terms of first.
dsCmdStmt ids local_vars out_ids (BindStmt _ pat cmd _ _) env_ids = do
let pat_ty = hsLPatType pat
(core_cmd, fv_cmd, env_ids1) <- dsfixCmd ids local_vars unitTy pat_ty cmd
let pat_vars = mkVarSet (collectPatBinders pat)
let
env_ids2 = filterOut (`elemVarSet` pat_vars) out_ids
env_ty2 = mkBigCoreVarTupTy env_ids2
-- multiplexing function
-- \ (xs) -> (((xs1),()),(xs2))
core_mux <- matchEnv env_ids
(mkCorePairExpr
(mkCorePairExpr (mkBigCoreVarTup env_ids1) mkCoreUnitExpr)
(mkBigCoreVarTup env_ids2))
-- projection function
-- \ (p, (xs2)) -> (zs)
env_id <- newSysLocalDs env_ty2
uniqs <- newUniqueSupply
let
after_c_ty = mkCorePairTy pat_ty env_ty2
out_ty = mkBigCoreVarTupTy out_ids
body_expr = coreCaseTuple uniqs env_id env_ids2 (mkBigCoreVarTup out_ids)
fail_expr <- mkFailExpr (StmtCtxt DoExpr) out_ty
pat_id <- selectSimpleMatchVarL pat
match_code
<- matchSimply (Var pat_id) (StmtCtxt DoExpr) pat body_expr fail_expr
pair_id <- newSysLocalDs after_c_ty
let
proj_expr = Lam pair_id (coreCasePair pair_id pat_id env_id match_code)
-- put it all together
let
in_ty = mkBigCoreVarTupTy env_ids
in_ty1 = mkCorePairTy (mkBigCoreVarTupTy env_ids1) unitTy
in_ty2 = mkBigCoreVarTupTy env_ids2
before_c_ty = mkCorePairTy in_ty1 in_ty2
return (do_premap ids in_ty before_c_ty out_ty core_mux $
do_compose ids before_c_ty after_c_ty out_ty
(do_first ids in_ty1 pat_ty in_ty2 core_cmd) $
do_arr ids after_c_ty out_ty proj_expr,
fv_cmd `unionDVarSet` (mkDVarSet out_ids
`uniqDSetMinusUniqSet` pat_vars))
-- D; xs' |-a do { ss } : t
-- --------------------------------------
-- D; xs |-a do { let binds; ss } : t
--
-- ---> arr (\ (xs) -> let binds in (xs')) >>> ss
dsCmdStmt ids local_vars out_ids (LetStmt _ binds) env_ids = do
-- build a new environment using the let bindings
core_binds <- dsLocalBinds binds (mkBigCoreVarTup out_ids)
-- match the old environment against the input
core_map <- matchEnv env_ids core_binds
return (do_arr ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy out_ids)
core_map,
exprFreeIdsDSet core_binds `uniqDSetIntersectUniqSet` local_vars)
-- D; ys |-a do { ss; returnA -< ((xs1), (ys2)) } : ...
-- D; xs' |-a do { ss' } : t
-- ------------------------------------
-- D; xs |-a do { rec ss; ss' } : t
--
-- xs1 = xs' /\ defs(ss)
-- xs2 = xs' - defs(ss)
-- ys1 = ys - defs(ss)
-- ys2 = ys /\ defs(ss)
--
-- ---> arr (\(xs) -> ((ys1),(xs2))) >>>
-- first (loop (arr (\((ys1),~(ys2)) -> (ys)) >>> ss)) >>>
-- arr (\((xs1),(xs2)) -> (xs')) >>> ss'
dsCmdStmt ids local_vars out_ids
(RecStmt { recS_stmts = stmts
, recS_later_ids = later_ids, recS_rec_ids = rec_ids
, recS_ext = RecStmtTc { recS_later_rets = later_rets
, recS_rec_rets = rec_rets } })
env_ids = do
let
later_ids_set = mkVarSet later_ids
env2_ids = filterOut (`elemVarSet` later_ids_set) out_ids
env2_id_set = mkDVarSet env2_ids
env2_ty = mkBigCoreVarTupTy env2_ids
-- post_loop_fn = \((later_ids),(env2_ids)) -> (out_ids)
uniqs <- newUniqueSupply
env2_id <- newSysLocalDs env2_ty
let
later_ty = mkBigCoreVarTupTy later_ids
post_pair_ty = mkCorePairTy later_ty env2_ty
post_loop_body = coreCaseTuple uniqs env2_id env2_ids (mkBigCoreVarTup out_ids)
post_loop_fn <- matchEnvStack later_ids env2_id post_loop_body
--- loop (...)
(core_loop, env1_id_set, env1_ids)
<- dsRecCmd ids local_vars stmts later_ids later_rets rec_ids rec_rets
-- pre_loop_fn = \(env_ids) -> ((env1_ids),(env2_ids))
let
env1_ty = mkBigCoreVarTupTy env1_ids
pre_pair_ty = mkCorePairTy env1_ty env2_ty
pre_loop_body = mkCorePairExpr (mkBigCoreVarTup env1_ids)
(mkBigCoreVarTup env2_ids)
pre_loop_fn <- matchEnv env_ids pre_loop_body
-- arr pre_loop_fn >>> first (loop (...)) >>> arr post_loop_fn
let
env_ty = mkBigCoreVarTupTy env_ids
out_ty = mkBigCoreVarTupTy out_ids
core_body = do_premap ids env_ty pre_pair_ty out_ty
pre_loop_fn
(do_compose ids pre_pair_ty post_pair_ty out_ty
(do_first ids env1_ty later_ty env2_ty
core_loop)
(do_arr ids post_pair_ty out_ty
post_loop_fn))
return (core_body, env1_id_set `unionDVarSet` env2_id_set)
dsCmdStmt _ _ _ _ s = pprPanic "dsCmdStmt" (ppr s)
-- loop (premap (\ ((env1_ids), ~(rec_ids)) -> (env_ids))
-- (ss >>> arr (\ (out_ids) -> ((later_rets),(rec_rets))))) >>>
dsRecCmd
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [CmdLStmt GhcTc] -- list of statements inside the RecCmd
-> [Id] -- list of vars defined here and used later
-> [HsExpr GhcTc] -- expressions corresponding to later_ids
-> [Id] -- list of vars fed back through the loop
-> [HsExpr GhcTc] -- expressions corresponding to rec_ids
-> DsM (CoreExpr, -- desugared statement
DIdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list
dsRecCmd ids local_vars stmts later_ids later_rets rec_ids rec_rets = do
let
later_id_set = mkVarSet later_ids
rec_id_set = mkVarSet rec_ids
local_vars' = rec_id_set `unionVarSet` later_id_set `unionVarSet` local_vars
-- mk_pair_fn = \ (out_ids) -> ((later_rets),(rec_rets))
core_later_rets <- mapM dsExpr later_rets
core_rec_rets <- mapM dsExpr rec_rets
let
-- possibly polymorphic version of vars of later_ids and rec_ids
out_ids = exprsFreeIdsList (core_later_rets ++ core_rec_rets)
out_ty = mkBigCoreVarTupTy out_ids
later_tuple = mkBigCoreTup core_later_rets
later_ty = mkBigCoreVarTupTy later_ids
rec_tuple = mkBigCoreTup core_rec_rets
rec_ty = mkBigCoreVarTupTy rec_ids
out_pair = mkCorePairExpr later_tuple rec_tuple
out_pair_ty = mkCorePairTy later_ty rec_ty
mk_pair_fn <- matchEnv out_ids out_pair
-- ss
(core_stmts, fv_stmts, env_ids) <- dsfixCmdStmts ids local_vars' out_ids stmts
-- squash_pair_fn = \ ((env1_ids), ~(rec_ids)) -> (env_ids)
rec_id <- newSysLocalDs rec_ty
let
env1_id_set = fv_stmts `uniqDSetMinusUniqSet` rec_id_set
env1_ids = dVarSetElems env1_id_set
env1_ty = mkBigCoreVarTupTy env1_ids
in_pair_ty = mkCorePairTy env1_ty rec_ty
core_body = mkBigCoreTup (map selectVar env_ids)
where
selectVar v
| v `elemVarSet` rec_id_set
= mkTupleSelector rec_ids v rec_id (Var rec_id)
| otherwise = Var v
squash_pair_fn <- matchEnvStack env1_ids rec_id core_body
-- loop (premap squash_pair_fn (ss >>> arr mk_pair_fn))
let
env_ty = mkBigCoreVarTupTy env_ids
core_loop = do_loop ids env1_ty later_ty rec_ty
(do_premap ids in_pair_ty env_ty out_pair_ty
squash_pair_fn
(do_compose ids env_ty out_ty out_pair_ty
core_stmts
(do_arr ids out_ty out_pair_ty mk_pair_fn)))
return (core_loop, env1_id_set, env1_ids)
{-
A sequence of statements (as in a rec) is desugared to an arrow between
two environments (no stack)
-}
dsfixCmdStmts
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- output vars of these statements
-> [CmdLStmt GhcTc] -- statements to desugar
-> DsM (CoreExpr, -- desugared expression
DIdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list
dsfixCmdStmts ids local_vars out_ids stmts
= trimInput (dsCmdStmts ids local_vars out_ids stmts)
-- TODO: Add levity polymorphism check for the resulting expression.
-- But I (Richard E.) don't know enough about arrows to do so.
dsCmdStmts
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- output vars of these statements
-> [CmdLStmt GhcTc] -- statements to desugar
-> [Id] -- list of vars in the input to these statements
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
dsCmdStmts ids local_vars out_ids [stmt] env_ids
= dsCmdLStmt ids local_vars out_ids stmt env_ids
dsCmdStmts ids local_vars out_ids (stmt:stmts) env_ids = do
let bound_vars = mkVarSet (collectLStmtBinders stmt)
let local_vars' = bound_vars `unionVarSet` local_vars
(core_stmts, _fv_stmts, env_ids') <- dsfixCmdStmts ids local_vars' out_ids stmts
(core_stmt, fv_stmt) <- dsCmdLStmt ids local_vars env_ids' stmt env_ids
return (do_compose ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy env_ids')
(mkBigCoreVarTupTy out_ids)
core_stmt
core_stmts,
fv_stmt)
dsCmdStmts _ _ _ [] _ = panic "dsCmdStmts []"
-- Match a list of expressions against a list of patterns, left-to-right.
matchSimplys :: [CoreExpr] -- Scrutinees
-> HsMatchContext Name -- Match kind
-> [LPat GhcTc] -- Patterns they should match
-> CoreExpr -- Return this if they all match
-> CoreExpr -- Return this if they don't
-> DsM CoreExpr
matchSimplys [] _ctxt [] result_expr _fail_expr = return result_expr
matchSimplys (exp:exps) ctxt (pat:pats) result_expr fail_expr = do
match_code <- matchSimplys exps ctxt pats result_expr fail_expr
matchSimply exp ctxt pat match_code fail_expr
matchSimplys _ _ _ _ _ = panic "matchSimplys"
-- List of leaf expressions, with set of variables bound in each
leavesMatch :: LMatch GhcTc (Located (body GhcTc))
-> [(Located (body GhcTc), IdSet)]
leavesMatch (L _ (Match { m_pats = pats
, m_grhss = GRHSs _ grhss (L _ binds) }))
= let
defined_vars = mkVarSet (collectPatsBinders pats)
`unionVarSet`
mkVarSet (collectLocalBinders binds)
in
[(body,
mkVarSet (collectLStmtsBinders stmts)
`unionVarSet` defined_vars)
| L _ (GRHS _ stmts body) <- grhss]
leavesMatch _ = panic "leavesMatch"
-- Replace the leaf commands in a match
replaceLeavesMatch
:: Type -- new result type
-> [Located (body' GhcTc)] -- replacement leaf expressions of that type
-> LMatch GhcTc (Located (body GhcTc)) -- the matches of a case command
-> ([Located (body' GhcTc)], -- remaining leaf expressions
LMatch GhcTc (Located (body' GhcTc))) -- updated match
replaceLeavesMatch _res_ty leaves
(L loc
match@(Match { m_grhss = GRHSs x grhss binds }))
= let
(leaves', grhss') = mapAccumL replaceLeavesGRHS leaves grhss
in
(leaves', L loc (match { m_ext = noExtField, m_grhss = GRHSs x grhss' binds }))
replaceLeavesMatch _ _ _ = panic "replaceLeavesMatch"
replaceLeavesGRHS
:: [Located (body' GhcTc)] -- replacement leaf expressions of that type
-> LGRHS GhcTc (Located (body GhcTc)) -- rhss of a case command
-> ([Located (body' GhcTc)], -- remaining leaf expressions
LGRHS GhcTc (Located (body' GhcTc))) -- updated GRHS
replaceLeavesGRHS (leaf:leaves) (L loc (GRHS x stmts _))
= (leaves, L loc (GRHS x stmts leaf))
replaceLeavesGRHS [] _ = panic "replaceLeavesGRHS []"
replaceLeavesGRHS _ _ = panic "replaceLeavesGRHS"
-- Balanced fold of a non-empty list.
foldb :: (a -> a -> a) -> [a] -> a
foldb _ [] = error "foldb of empty list"
foldb _ [x] = x
foldb f xs = foldb f (fold_pairs xs)
where
fold_pairs [] = []
fold_pairs [x] = [x]
fold_pairs (x1:x2:xs) = f x1 x2:fold_pairs xs
{-
Note [Dictionary binders in ConPatOut] See also same Note in GHC.Hs.Utils
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following functions to collect value variables from patterns are
copied from GHC.Hs.Utils, with one change: we also collect the dictionary
bindings (pat_binds) from ConPatOut. We need them for cases like
h :: Arrow a => Int -> a (Int,Int) Int
h x = proc (y,z) -> case compare x y of
GT -> returnA -< z+x
The type checker turns the case into
case compare x y of
GT { p77 = plusInt } -> returnA -< p77 z x
Here p77 is a local binding for the (+) operation.
See comments in GHC.Hs.Utils for why the other version does not include
these bindings.
-}
collectPatBinders :: LPat GhcTc -> [Id]
collectPatBinders pat = collectl pat []
collectPatsBinders :: [LPat GhcTc] -> [Id]
collectPatsBinders pats = foldr collectl [] pats
---------------------
collectl :: LPat GhcTc -> [Id] -> [Id]
-- See Note [Dictionary binders in ConPatOut]
collectl (L _ pat) bndrs
= go pat
where
go (VarPat _ (L _ var)) = var : bndrs
go (WildPat _) = bndrs
go (LazyPat _ pat) = collectl pat bndrs
go (BangPat _ pat) = collectl pat bndrs
go (AsPat _ (L _ a) pat) = a : collectl pat bndrs
go (ParPat _ pat) = collectl pat bndrs
go (ListPat _ pats) = foldr collectl bndrs pats
go (TuplePat _ pats _) = foldr collectl bndrs pats
go (SumPat _ pat _ _) = collectl pat bndrs
go (ConPatIn _ ps) = foldr collectl bndrs (hsConPatArgs ps)
go (ConPatOut {pat_args=ps, pat_binds=ds}) =
collectEvBinders ds
++ foldr collectl bndrs (hsConPatArgs ps)
go (LitPat _ _) = bndrs
go (NPat {}) = bndrs
go (NPlusKPat _ (L _ n) _ _ _ _) = n : bndrs
go (SigPat _ pat _) = collectl pat bndrs
go (CoPat _ _ pat _) = collectl (noLoc pat) bndrs
go (ViewPat _ _ pat) = collectl pat bndrs
go p@(SplicePat {}) = pprPanic "collectl/go" (ppr p)
go p@(XPat {}) = pprPanic "collectl/go" (ppr p)
collectEvBinders :: TcEvBinds -> [Id]
collectEvBinders (EvBinds bs) = foldr add_ev_bndr [] bs
collectEvBinders (TcEvBinds {}) = panic "ToDo: collectEvBinders"
add_ev_bndr :: EvBind -> [Id] -> [Id]
add_ev_bndr (EvBind { eb_lhs = b }) bs | isId b = b:bs
| otherwise = bs
-- A worry: what about coercion variable binders??
collectLStmtsBinders :: [LStmt GhcTc body] -> [Id]
collectLStmtsBinders = concatMap collectLStmtBinders
collectLStmtBinders :: LStmt GhcTc body -> [Id]
collectLStmtBinders = collectStmtBinders . unLoc
collectStmtBinders :: Stmt GhcTc body -> [Id]
collectStmtBinders (RecStmt { recS_later_ids = later_ids }) = later_ids
collectStmtBinders stmt = HsUtils.collectStmtBinders stmt
| sdiehl/ghc | compiler/deSugar/DsArrows.hs | bsd-3-clause | 49,514 | 0 | 25 | 14,957 | 10,079 | 5,191 | 4,888 | 768 | 19 |
module Yawn.Test.BlackBox.ParserTest where
import Test.HUnit
import Yawn.Test.Common
tests :: Test
tests = TestList [
TestLabel "TestSimpleGet" testSimpleGet,
TestLabel "TestSimplePost" testSimplePost,
TestLabel "TestInvalidRequest" testInvalidRequest]
testSimpleGet :: Test
testSimpleGet = TestCase $ do
response <- transmit "GET / HTTP/1.1"
assertEqual "GET /" "HTTP/1.1 200 Ok" response
response2 <- transmit "GET / HTTP/1.0"
assertEqual "GET /" "HTTP/1.0 200 Ok" response2
testSimplePost :: Test
testSimplePost = TestCase $ do
response <- transmit "POST / HTTP/1.1"
assertEqual "POST /" "HTTP/1.1 200 Ok" response
response2 <- transmit "POST / HTTP/1.0"
assertEqual "POST /" "HTTP/1.0 200 Ok" response2
testInvalidRequest :: Test
testInvalidRequest = TestCase $ do
response <- transmit "INVALID / HTTP/1.0"
assertEqual "INVALID /" "HTTP/1.0 400 Bad Request" response | ameingast/yawn | test/src/Yawn/Test/BlackBox/ParserTest.hs | bsd-3-clause | 903 | 0 | 9 | 145 | 198 | 96 | 102 | 24 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.SSH.Sender (sshSenderTests) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Test.Tasty (TestTree, testGroup)
-----------
-- Tests --
-----------
-- | TODO: tests.
sshSenderTests :: TestTree
sshSenderTests = testGroup "SSH/Sender.hs tests"
[
]
| cdepillabout/ssh | test/Test/SSH/Sender.hs | bsd-3-clause | 354 | 0 | 6 | 56 | 55 | 36 | 19 | 8 | 1 |
{-# LANGUAGE QuasiQuotes, TypeFamilies #-}
import Text.Papillon
import Data.Char
import System.Environment
main :: IO ()
main = do
arg : _ <- getArgs
case runError $ expr $ parse arg of
Right (r, _) -> print r
Left _ -> putStrLn "parse error"
[papillon|
op1 :: Int -> Int -> Int
= '*' { (*) }
/ '/' { div }
/ '%' { mod }
;
op2 :: Int -> Int -> Int
= '+' { (+) }
/ '-' { (-) }
;
factor :: Int
= ds:<isDigit>+ { read ds }
/ '(' e:expr ')' { e }
;
term :: Int
= f0:factor fs:(op:op1 f:factor { (`op` f) })*
{ foldl (flip ($)) f0 fs }
;
expr :: Int
= t0:term ts:(op:op2 t:term { (`op` t) })*
{ foldl (flip ($)) t0 ts }
;
|]
| YoshikuniJujo/papillon | examples/arith.hs | bsd-3-clause | 659 | 0 | 11 | 180 | 98 | 50 | 48 | 11 | 2 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[Simplify]{The main module of the simplifier}
-}
{-# LANGUAGE CPP #-}
module Simplify ( simplTopBinds, simplExpr, simplRules ) where
#include "HsVersions.h"
import DynFlags
import SimplMonad
import Type hiding ( substTy, substTyVar, extendTCvSubst )
import SimplEnv
import SimplUtils
import FamInstEnv ( FamInstEnv )
import Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326
import Id
import MkId ( seqId, voidPrimId )
import MkCore ( mkImpossibleExpr, castBottomExpr )
import IdInfo
import Name ( Name, mkSystemVarName, isExternalName )
import Coercion hiding ( substCo, substCoVar )
import OptCoercion ( optCoercion )
import FamInstEnv ( topNormaliseType_maybe )
import DataCon ( DataCon, dataConWorkId, dataConRepStrictness
, isMarkedStrict, dataConRepArgTys ) --, dataConTyCon, dataConTag, fIRST_TAG )
--import TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326
import CoreMonad ( Tick(..), SimplifierMode(..) )
import CoreSyn
import Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd )
import PprCore ( pprCoreExpr )
import CoreUnfold
import CoreUtils
import CoreArity
--import PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326
import Rules ( mkRuleInfo, lookupRule, getRules )
import TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326
import BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) )
import MonadUtils ( foldlM, mapAccumLM, liftIO )
import Maybes ( orElse )
--import Unique ( hasKey ) -- temporalily commented out. See #8326
import Control.Monad
import Outputable
import FastString
import Pair
import Util
import ErrUtils
{-
The guts of the simplifier is in this module, but the driver loop for
the simplifier is in SimplCore.hs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
The simplifier used to guarantee that the output had no shadowing, but
it does not do so any more. (Actually, it never did!) The reason is
documented with simplifyArgs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
Many parts of the simplifier return a bunch of "floats" as well as an
expression. This is wrapped as a datatype SimplUtils.FloatsWith.
All "floats" are let-binds, not case-binds, but some non-rec lets may
be unlifted (with RHS ok-for-speculation).
-----------------------------------------
ORGANISATION OF FUNCTIONS
-----------------------------------------
simplTopBinds
- simplify all top-level binders
- for NonRec, call simplRecOrTopPair
- for Rec, call simplRecBind
------------------------------
simplExpr (applied lambda) ==> simplNonRecBind
simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind
simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind
------------------------------
simplRecBind [binders already simplfied]
- use simplRecOrTopPair on each pair in turn
simplRecOrTopPair [binder already simplified]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
Returns:
- check for PreInlineUnconditionally
- simplLazyBind
simplNonRecBind
Used for: non-top-level non-recursive bindings
beta reductions (which amount to the same thing)
Because it can deal with strict arts, it takes a
"thing-inside" and returns an expression
- check for PreInlineUnconditionally
- simplify binder, including its IdInfo
- if strict binding
simplStrictArg
mkAtomicArgs
completeNonRecX
else
simplLazyBind
addFloats
simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder]
Used for: binding case-binder and constr args in a known-constructor case
- check for PreInLineUnconditionally
- simplify binder
- completeNonRecX
------------------------------
simplLazyBind: [binder already simplified, RHS not]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
non-top-level, but *lazy* non-recursive bindings
[must not be strict or unboxed]
Returns floats + an augmented environment, not an expression
- substituteIdInfo and add result to in-scope
[so that rules are available in rec rhs]
- simplify rhs
- mkAtomicArgs
- float if exposes constructor or PAP
- completeBind
completeNonRecX: [binder and rhs both simplified]
- if the the thing needs case binding (unlifted and not ok-for-spec)
build a Case
else
completeBind
addFloats
completeBind: [given a simplified RHS]
[used for both rec and non-rec bindings, top level and not]
- try PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
Right hand sides and arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In many ways we want to treat
(a) the right hand side of a let(rec), and
(b) a function argument
in the same way. But not always! In particular, we would
like to leave these arguments exactly as they are, so they
will match a RULE more easily.
f (g x, h x)
g (+ x)
It's harder to make the rule match if we ANF-ise the constructor,
or eta-expand the PAP:
f (let { a = g x; b = h x } in (a,b))
g (\y. + x y)
On the other hand if we see the let-defns
p = (g x, h x)
q = + x
then we *do* want to ANF-ise and eta-expand, so that p and q
can be safely inlined.
Even floating lets out is a bit dubious. For let RHS's we float lets
out if that exposes a value, so that the value can be inlined more vigorously.
For example
r = let x = e in (x,x)
Here, if we float the let out we'll expose a nice constructor. We did experiments
that showed this to be a generally good thing. But it was a bad thing to float
lets out unconditionally, because that meant they got allocated more often.
For function arguments, there's less reason to expose a constructor (it won't
get inlined). Just possibly it might make a rule match, but I'm pretty skeptical.
So for the moment we don't float lets out of function arguments either.
Eta expansion
~~~~~~~~~~~~~~
For eta expansion, we want to catch things like
case e of (a,b) -> \x -> case a of (p,q) -> \y -> r
If the \x was on the RHS of a let, we'd eta expand to bring the two
lambdas together. And in general that's a good thing to do. Perhaps
we should eta expand wherever we find a (value) lambda? Then the eta
expansion at a let RHS can concentrate solely on the PAP case.
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
-}
simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simplTopBinds env0 binds0
= do { -- Put all the top-level binders into scope at the start
-- so that if a transformation rule has unexpectedly brought
-- anything into scope, then we don't get a complaint about that.
-- It's rather as if the top-level binders were imported.
-- See note [Glomming] in OccurAnal.
; env1 <- simplRecBndrs env0 (bindersOfBinds binds0)
; env2 <- simpl_binds env1 binds0
; freeTick SimplifierDone
; return env2 }
where
-- We need to track the zapped top-level binders, because
-- they should have their fragile IdInfo zapped (notably occurrence info)
-- That's why we run down binds and bndrs' simultaneously.
--
simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simpl_binds env [] = return env
simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind
; simpl_binds env' binds }
simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs
simpl_bind env (NonRec b r) = do { (env', b') <- addBndrRules env b (lookupRecBndr env b)
; simplRecOrTopPair env' TopLevel NonRecursive b b' r }
{-
************************************************************************
* *
\subsection{Lazy bindings}
* *
************************************************************************
simplRecBind is used for
* recursive bindings only
-}
simplRecBind :: SimplEnv -> TopLevelFlag
-> [(InId, InExpr)]
-> SimplM SimplEnv
simplRecBind env0 top_lvl pairs0
= do { (env_with_info, triples) <- mapAccumLM add_rules env0 pairs0
; env1 <- go (zapFloats env_with_info) triples
; return (env0 `addRecFloats` env1) }
-- addFloats adds the floats from env1,
-- _and_ updates env0 with the in-scope set from env1
where
add_rules :: SimplEnv -> (InBndr,InExpr) -> SimplM (SimplEnv, (InBndr, OutBndr, InExpr))
-- Add the (substituted) rules to the binder
add_rules env (bndr, rhs)
= do { (env', bndr') <- addBndrRules env bndr (lookupRecBndr env bndr)
; return (env', (bndr, bndr', rhs)) }
go env [] = return env
go env ((old_bndr, new_bndr, rhs) : pairs)
= do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs
; go env' pairs }
{-
simplOrTopPair is used for
* recursive bindings (whether top level or not)
* top-level non-recursive bindings
It assumes the binder has already been simplified, but not its IdInfo.
-}
simplRecOrTopPair :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutBndr -> InExpr -- Binder and rhs
-> SimplM SimplEnv -- Returns an env that includes the binding
simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs
= do { dflags <- getDynFlags
; trace_bind dflags $
if preInlineUnconditionally dflags env top_lvl old_bndr rhs
-- Check for unconditional inline
then do tick (PreInlineUnconditionally old_bndr)
return (extendIdSubst env old_bndr (mkContEx env rhs))
else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env }
where
trace_bind dflags thing_inside
| not (dopt Opt_D_verbose_core2core dflags)
= thing_inside
| otherwise
= pprTrace "SimplBind" (ppr old_bndr) thing_inside
-- trace_bind emits a trace for each top-level binding, which
-- helps to locate the tracing for inlining and rule firing
{-
simplLazyBind is used for
* [simplRecOrTopPair] recursive bindings (whether top level or not)
* [simplRecOrTopPair] top-level non-recursive bindings
* [simplNonRecE] non-top-level *lazy* non-recursive bindings
Nota bene:
1. It assumes that the binder is *already* simplified,
and is in scope, and its IdInfo too, except unfolding
2. It assumes that the binder type is lifted.
3. It does not check for pre-inline-unconditionally;
that should have been done already.
-}
simplLazyBind :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutId -- Binder, both pre-and post simpl
-- The OutId has IdInfo, except arity, unfolding
-> InExpr -> SimplEnv -- The RHS and its environment
-> SimplM SimplEnv
-- Precondition: rhs obeys the let/app invariant
simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se
= -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $
do { let rhs_env = rhs_se `setInScope` env
(tvs, body) = case collectTyAndValBinders rhs of
(tvs, [], body)
| surely_not_lam body -> (tvs, body)
_ -> ([], rhs)
surely_not_lam (Lam {}) = False
surely_not_lam (Tick t e)
| not (tickishFloatable t) = surely_not_lam e
-- eta-reduction could float
surely_not_lam _ = True
-- Do not do the "abstract tyyvar" thing if there's
-- a lambda inside, because it defeats eta-reduction
-- f = /\a. \x. g a x
-- should eta-reduce.
; (body_env, tvs') <- simplBinders rhs_env tvs
-- See Note [Floating and type abstraction] in SimplUtils
-- Simplify the RHS
; let rhs_cont = mkRhsStop (substTy body_env (exprType body))
; (body_env1, body1) <- simplExprF body_env body rhs_cont
-- ANF-ise a constructor or PAP rhs
; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1
; (env', rhs')
<- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2)
then -- No floating, revert to body1
do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont
; return (env, rhs') }
else if null tvs then -- Simple floating
do { tick LetFloatFromLet
; return (addFloats env body_env2, body2) }
else -- Do type-abstraction first
do { tick LetFloatFromLet
; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2
; rhs' <- mkLam tvs' body3 rhs_cont
; env' <- foldlM (addPolyBind top_lvl) env poly_binds
; return (env', rhs') }
; completeBind env' top_lvl bndr bndr1 rhs' }
{-
A specialised variant of simplNonRec used when the RHS is already simplified,
notably in knownCon. It uses case-binding where necessary.
-}
simplNonRecX :: SimplEnv
-> InId -- Old binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
simplNonRecX env bndr new_rhs
| isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p }
= return env -- Here c is dead, and we avoid creating
-- the binding c = (a,b)
| Coercion co <- new_rhs
= return (extendTCvSubst env bndr (mkCoercionTy co))
| otherwise
= do { (env', bndr') <- simplBinder env bndr
; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs }
-- simplNonRecX is only used for NotTopLevel things
completeNonRecX :: TopLevelFlag -> SimplEnv
-> Bool
-> InId -- Old binder
-> OutId -- New binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs
= do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs
; (env2, rhs2) <-
if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1
then do { tick LetFloatFromLet
; return (addFloats env env1, rhs1) } -- Add the floats to the main env
else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS
; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 }
{-
{- No, no, no! Do not try preInlineUnconditionally in completeNonRecX
Doing so risks exponential behaviour, because new_rhs has been simplified once already
In the cases described by the folowing commment, postInlineUnconditionally will
catch many of the relevant cases.
-- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
--
-- Similarly, single occurrences can be inlined vigourously
-- e.g. case (f x, g y) of (a,b) -> ....
-- If a,b occur once we can avoid constructing the let binding for them.
Furthermore in the case-binding case preInlineUnconditionally risks extra thunks
-- Consider case I# (quotInt# x y) of
-- I# v -> let w = J# v in ...
-- If we gaily inline (quotInt# x y) for v, we end up building an
-- extra thunk:
-- let w = J# (quotInt# x y) in ...
-- because quotInt# can fail.
| preInlineUnconditionally env NotTopLevel bndr new_rhs
= thing_inside (extendIdSubst env bndr (DoneEx new_rhs))
-}
----------------------------------
prepareRhs takes a putative RHS, checks whether it's a PAP or
constructor application and, if so, converts it to ANF, so that the
resulting thing can be inlined more easily. Thus
x = (f a, g b)
becomes
t1 = f a
t2 = g b
x = (t1,t2)
We also want to deal well cases like this
v = (f e1 `cast` co) e2
Here we want to make e1,e2 trivial and get
x1 = e1; x2 = e2; v = (f x1 `cast` co) v2
That's what the 'go' loop in prepareRhs does
-}
prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Adds new floats to the env iff that allows us to return a good RHS
prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions]
| Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type
, not (isUnLiftedType ty1) -- see Note [Float coercions (unlifted)]
= do { (env', rhs') <- makeTrivialWithInfo top_lvl env sanitised_info rhs
; return (env', Cast rhs' co) }
where
sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info
`setDemandInfo` demandInfo info
info = idInfo id
prepareRhs top_lvl env0 _ rhs0
= do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0
; return (env1, rhs1) }
where
go n_val_args env (Cast rhs co)
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Cast rhs' co) }
go n_val_args env (App fun (Type ty))
= do { (is_exp, env', rhs') <- go n_val_args env fun
; return (is_exp, env', App rhs' (Type ty)) }
go n_val_args env (App fun arg)
= do { (is_exp, env', fun') <- go (n_val_args+1) env fun
; case is_exp of
True -> do { (env'', arg') <- makeTrivial top_lvl env' arg
; return (True, env'', App fun' arg') }
False -> return (False, env, App fun arg) }
go n_val_args env (Var fun)
= return (is_exp, env, Var fun)
where
is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- OccurAnal.occAnalApp
go n_val_args env (Tick t rhs)
-- We want to be able to float bindings past this
-- tick. Non-scoping ticks don't care.
| tickishScoped t == NoScope
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Tick t rhs') }
-- On the other hand, for scoping ticks we need to be able to
-- copy them on the floats, which in turn is only allowed if
-- we can obtain non-counting ticks.
| not (tickishCounts t) || tickishCanSplit t
= do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs
; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr)
floats' = seFloats $ env `addFloats` mapFloats env' tickIt
; return (is_exp, env' { seFloats = floats' }, Tick t rhs') }
go _ env other
= return (False, env, other)
{-
Note [Float coercions]
~~~~~~~~~~~~~~~~~~~~~~
When we find the binding
x = e `cast` co
we'd like to transform it to
x' = e
x = x `cast` co -- A trivial binding
There's a chance that e will be a constructor application or function, or something
like that, so moving the coercion to the usage site may well cancel the coercions
and lead to further optimisation. Example:
data family T a :: *
data instance T Int = T Int
foo :: Int -> Int -> Int
foo m n = ...
where
x = T m
go 0 = 0
go n = case x of { T m -> go (n-m) }
-- This case should optimise
Note [Preserve strictness when floating coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the Note [Float coercions] transformation, keep the strictness info.
Eg
f = e `cast` co -- f has strictness SSL
When we transform to
f' = e -- f' also has strictness SSL
f = f' `cast` co -- f still has strictness SSL
Its not wrong to drop it on the floor, but better to keep it.
Note [Float coercions (unlifted)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT don't do [Float coercions] if 'e' has an unlifted type.
This *can* happen:
foo :: Int = (error (# Int,Int #) "urk")
`cast` CoUnsafe (# Int,Int #) Int
If do the makeTrivial thing to the error call, we'll get
foo = case error (# Int,Int #) "urk" of v -> v `cast` ...
But 'v' isn't in scope!
These strange casts can happen as a result of case-of-case
bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of
(# p,q #) -> p+q
-}
makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec)
makeTrivialArg env (ValArg e) = do { (env', e') <- makeTrivial NotTopLevel env e
; return (env', ValArg e') }
makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg
makeTrivial :: TopLevelFlag -> SimplEnv -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Binds the expression to a variable, if it's not trivial, returning the variable
makeTrivial top_lvl env expr = makeTrivialWithInfo top_lvl env vanillaIdInfo expr
makeTrivialWithInfo :: TopLevelFlag -> SimplEnv -> IdInfo
-> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Propagate strictness and demand info to the new binder
-- Note [Preserve strictness when floating coercions]
-- Returned SimplEnv has same substitution as incoming one
makeTrivialWithInfo top_lvl env info expr
| exprIsTrivial expr -- Already trivial
|| not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise
-- See Note [Cannot trivialise]
= return (env, expr)
| otherwise -- See Note [Take care] below
= do { uniq <- getUniqueM
; let name = mkSystemVarName uniq (fsLit "a")
var = mkLocalIdOrCoVarWithInfo name expr_ty info
; env' <- completeNonRecX top_lvl env False var var expr
; expr' <- simplVar env' var
; return (env', expr') }
-- The simplVar is needed becase we're constructing a new binding
-- a = rhs
-- And if rhs is of form (rhs1 |> co), then we might get
-- a1 = rhs1
-- a = a1 |> co
-- and now a's RHS is trivial and can be substituted out, and that
-- is what completeNonRecX will do
-- To put it another way, it's as if we'd simplified
-- let var = e in var
where
expr_ty = exprType expr
bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool
-- True iff we can have a binding of this expression at this level
-- Precondition: the type is the type of the expression
bindingOk top_lvl _ expr_ty
| isTopLevel top_lvl = not (isUnLiftedType expr_ty)
| otherwise = True
{-
Note [Cannot trivialise]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider tih
f :: Int -> Addr#
foo :: Bar
foo = Bar (f 3)
Then we can't ANF-ise foo, even though we'd like to, because
we can't make a top-level binding for the Addr# (f 3). And if
so we don't want to turn it into
foo = let x = f 3 in Bar x
because we'll just end up inlining x back, and that makes the
simplifier loop. Better not to ANF-ise it at all.
A case in point is literal strings (a MachStr is not regarded as
trivial):
foo = Ptr "blob"#
We don't want to ANF-ise this.
************************************************************************
* *
\subsection{Completing a lazy binding}
* *
************************************************************************
completeBind
* deals only with Ids, not TyVars
* takes an already-simplified binder and RHS
* is used for both recursive and non-recursive bindings
* is used for both top-level and non-top-level bindings
It does the following:
- tries discarding a dead binding
- tries PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
It does *not* attempt to do let-to-case. Why? Because it is used for
- top-level bindings (when let-to-case is impossible)
- many situations where the "rhs" is known to be a WHNF
(so let-to-case is inappropriate).
Nor does it do the atomic-argument thing
-}
completeBind :: SimplEnv
-> TopLevelFlag -- Flag stuck into unfolding
-> InId -- Old binder
-> OutId -> OutExpr -- New binder and RHS
-> SimplM SimplEnv
-- completeBind may choose to do its work
-- * by extending the substitution (e.g. let x = y in ...)
-- * or by adding to the floats in the envt
--
-- Precondition: rhs obeys the let/app invariant
completeBind env top_lvl old_bndr new_bndr new_rhs
| isCoVar old_bndr
= case new_rhs of
Coercion co -> return (extendTCvSubst env old_bndr (mkCoercionTy co))
_ -> return (addNonRec env new_bndr new_rhs)
| otherwise
= ASSERT( isId new_bndr )
do { let old_info = idInfo old_bndr
old_unf = unfoldingInfo old_info
occ_info = occInfo old_info
-- Do eta-expansion on the RHS of the binding
-- See Note [Eta-expanding at let bindings] in SimplUtils
; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs
-- Simplify the unfolding
; new_unfolding <- simplLetUnfolding env top_lvl old_bndr final_rhs old_unf
; dflags <- getDynFlags
; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info
final_rhs new_unfolding
-- Inline and discard the binding
then do { tick (PostInlineUnconditionally old_bndr)
; return (extendIdSubst env old_bndr (DoneEx final_rhs)) }
-- Use the substitution to make quite, quite sure that the
-- substitution will happen, since we are going to discard the binding
else
do { let info1 = idInfo new_bndr `setArityInfo` new_arity
-- Unfolding info: Note [Setting the new unfolding]
info2 = info1 `setUnfoldingInfo` new_unfolding
-- Demand info: Note [Setting the demand info]
--
-- We also have to nuke demand info if for some reason
-- eta-expansion *reduces* the arity of the binding to less
-- than that of the strictness sig. This can happen: see Note [Arity decrease].
info3 | isEvaldUnfolding new_unfolding
|| (case strictnessInfo info2 of
StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty)
= zapDemandInfo info2 `orElse` info2
| otherwise
= info2
final_id = new_bndr `setIdInfo` info3
; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $
return (addNonRec env final_id final_rhs) } }
-- The addNonRec adds it to the in-scope set too
------------------------------
addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv
-- Add a new binding to the environment, complete with its unfolding
-- but *do not* do postInlineUnconditionally, because we have already
-- processed some of the scope of the binding
-- We still want the unfolding though. Consider
-- let
-- x = /\a. let y = ... in Just y
-- in body
-- Then we float the y-binding out (via abstractFloats and addPolyBind)
-- but 'x' may well then be inlined in 'body' in which case we'd like the
-- opportunity to inline 'y' too.
--
-- INVARIANT: the arity is correct on the incoming binders
addPolyBind top_lvl env (NonRec poly_id rhs)
= do { unfolding <- simplLetUnfolding env top_lvl poly_id rhs noUnfolding
-- Assumes that poly_id did not have an INLINE prag
-- which is perhaps wrong. ToDo: think about this
; let final_id = setIdInfo poly_id $
idInfo poly_id `setUnfoldingInfo` unfolding
; return (addNonRec env final_id rhs) }
addPolyBind _ env bind@(Rec _)
= return (extendFloats env bind)
-- Hack: letrecs are more awkward, so we extend "by steam"
-- without adding unfoldings etc. At worst this leads to
-- more simplifier iterations
{- Note [Arity decrease]
~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking the arity of a binding should not decrease. But it *can*
legitimately happen because of RULES. Eg
f = g Int
where g has arity 2, will have arity 2. But if there's a rewrite rule
g Int --> h
where h has arity 1, then f's arity will decrease. Here's a real-life example,
which is in the output of Specialise:
Rec {
$dm {Arity 2} = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm #-}
dInt = MkD .... opInt ...
opInt {Arity 1} = $dm dInt
$s$dm {Arity 0} = \x. op dInt }
Here opInt has arity 1; but when we apply the rule its arity drops to 0.
That's why Specialise goes to a little trouble to pin the right arity
on specialised functions too.
Note [Setting the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the unfolding is a value, the demand info may
go pear-shaped, so we nuke it. Example:
let x = (a,b) in
case x of (p,q) -> h p q x
Here x is certainly demanded. But after we've nuked
the case, we'll get just
let x = (a,b) in h a b x
and now x is not demanded (I'm assuming h is lazy)
This really happens. Similarly
let f = \x -> e in ...f..f...
After inlining f at some of its call sites the original binding may
(for example) be no longer strictly demanded.
The solution here is a bit ad hoc...
************************************************************************
* *
\subsection[Simplify-simplExpr]{The main function: simplExpr}
* *
************************************************************************
The reason for this OutExprStuff stuff is that we want to float *after*
simplifying a RHS, not before. If we do so naively we get quadratic
behaviour as things float out.
To see why it's important to do it after, consider this (real) example:
let t = f x
in fst t
==>
let t = let a = e1
b = e2
in (a,b)
in fst t
==>
let a = e1
b = e2
t = (a,b)
in
a -- Can't inline a this round, cos it appears twice
==>
e1
Each of the ==> steps is a round of simplification. We'd save a
whole round if we float first. This can cascade. Consider
let f = g d
in \x -> ...f...
==>
let f = let d1 = ..d.. in \y -> e
in \x -> ...f...
==>
let d1 = ..d..
in \x -> ...(\y ->e)...
Only in this second round can the \y be applied, and it
might do the same again.
-}
simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty)
where
expr_out_ty :: OutType
expr_out_ty = substTy env (exprType expr)
simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr
-- Simplify an expression, given a continuation
simplExprC env expr cont
= -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $
do { (env', expr') <- simplExprF (zapFloats env) expr cont
; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $
-- pprTrace "simplExprC ret3" (ppr (seInScope env')) $
-- pprTrace "simplExprC ret4" (ppr (seFloats env')) $
return (wrapFloats env' expr') }
--------------------------------------------------
simplExprF :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF env e cont
= {- pprTrace "simplExprF" (vcat
[ ppr e
, text "cont =" <+> ppr cont
, text "inscope =" <+> ppr (seInScope env)
, text "tvsubst =" <+> ppr (seTvSubst env)
, text "idsubst =" <+> ppr (seIdSubst env)
, text "cvsubst =" <+> ppr (seCvSubst env)
{- , ppr (seFloats env) -}
]) $ -}
simplExprF1 env e cont
simplExprF1 :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF1 env (Var v) cont = simplIdF env v cont
simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont
simplExprF1 env (Tick t expr) cont = simplTick env t expr cont
simplExprF1 env (Cast body co) cont = simplCast env body co cont
simplExprF1 env (Coercion co) cont = simplCoercionF env co cont
simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont )
rebuild env (Type (substTy env ty)) cont
simplExprF1 env (App fun arg) cont
= simplExprF env fun $
case arg of
Type ty -> ApplyToTy { sc_arg_ty = substTy env ty
, sc_hole_ty = substTy env (exprType fun)
, sc_cont = cont }
_ -> ApplyToVal { sc_arg = arg, sc_env = env
, sc_dup = NoDup, sc_cont = cont }
simplExprF1 env expr@(Lam {}) cont
= simplLam env zapped_bndrs body cont
-- The main issue here is under-saturated lambdas
-- (\x1. \x2. e) arg1
-- Here x1 might have "occurs-once" occ-info, because occ-info
-- is computed assuming that a group of lambdas is applied
-- all at once. If there are too few args, we must zap the
-- occ-info, UNLESS the remaining binders are one-shot
where
(bndrs, body) = collectBinders expr
zapped_bndrs | need_to_zap = map zap bndrs
| otherwise = bndrs
need_to_zap = any zappable_bndr (drop n_args bndrs)
n_args = countArgs cont
-- NB: countArgs counts all the args (incl type args)
-- and likewise drop counts all binders (incl type lambdas)
zappable_bndr b = isId b && not (isOneShotBndr b)
zap b | isTyVar b = b
| otherwise = zapLamIdInfo b
simplExprF1 env (Case scrut bndr _ alts) cont
= simplExprF env scrut (Select { sc_dup = NoDup, sc_bndr = bndr
, sc_alts = alts
, sc_env = env, sc_cont = cont })
simplExprF1 env (Let (Rec pairs) body) cont
= do { env' <- simplRecBndrs env (map fst pairs)
-- NB: bndrs' don't have unfoldings or rules
-- We add them as we go down
; env'' <- simplRecBind env' NotTopLevel pairs
; simplExprF env'' body cont }
simplExprF1 env (Let (NonRec bndr rhs) body) cont
= simplNonRecE env bndr (rhs, env) ([], body) cont
---------------------------------
simplType :: SimplEnv -> InType -> SimplM OutType
-- Kept monadic just so we can do the seqType
simplType env ty
= -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $
seqType new_ty `seq` return new_ty
where
new_ty = substTy env ty
---------------------------------
simplCoercionF :: SimplEnv -> InCoercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCoercionF env co cont
= do { co' <- simplCoercion env co
; rebuild env (Coercion co') cont }
simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion
simplCoercion env co
= let opt_co = optCoercion (getTCvSubst env) co
in seqCo opt_co `seq` return opt_co
-----------------------------------
-- | Push a TickIt context outwards past applications and cases, as
-- long as this is a non-scoping tick, to let case and application
-- optimisations apply.
simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplTick env tickish expr cont
-- A scoped tick turns into a continuation, so that we can spot
-- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do
-- it this way, then it would take two passes of the simplifier to
-- reduce ((scc t (\x . e)) e').
-- NB, don't do this with counting ticks, because if the expr is
-- bottom, then rebuildCall will discard the continuation.
-- XXX: we cannot do this, because the simplifier assumes that
-- the context can be pushed into a case with a single branch. e.g.
-- scc<f> case expensive of p -> e
-- becomes
-- case expensive of p -> scc<f> e
--
-- So I'm disabling this for now. It just means we will do more
-- simplifier iterations that necessary in some cases.
-- | tickishScoped tickish && not (tickishCounts tickish)
-- = simplExprF env expr (TickIt tickish cont)
-- For unscoped or soft-scoped ticks, we are allowed to float in new
-- cost, so we simply push the continuation inside the tick. This
-- has the effect of moving the tick to the outside of a case or
-- application context, allowing the normal case and application
-- optimisations to fire.
| tickish `tickishScopesLike` SoftScope
= do { (env', expr') <- simplExprF env expr cont
; return (env', mkTick tickish expr')
}
-- Push tick inside if the context looks like this will allow us to
-- do a case-of-case - see Note [case-of-scc-of-case]
| Select {} <- cont, Just expr' <- push_tick_inside
= simplExprF env expr' cont
-- We don't want to move the tick, but we might still want to allow
-- floats to pass through with appropriate wrapping (or not, see
-- wrap_floats below)
--- | not (tickishCounts tickish) || tickishCanSplit tickish
-- = wrap_floats
| otherwise
= no_floating_past_tick
where
-- Try to push tick inside a case, see Note [case-of-scc-of-case].
push_tick_inside =
case expr0 of
Case scrut bndr ty alts
-> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts)
_other -> Nothing
where (ticks, expr0) = stripTicksTop movable (Tick tickish expr)
movable t = not (tickishCounts t) ||
t `tickishScopesLike` NoScope ||
tickishCanSplit t
tickScrut e = foldr mkTick e ticks
-- Alternatives get annotated with all ticks that scope in some way,
-- but we don't want to count entries.
tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope)
ts_scope = map mkNoCount $
filter (not . (`tickishScopesLike` NoScope)) ticks
no_floating_past_tick =
do { let (inc,outc) = splitCont cont
; (env', expr') <- simplExprF (zapFloats env) expr inc
; let tickish' = simplTickish env tickish
; (env'', expr'') <- rebuild (zapFloats env')
(wrapFloats env' expr')
(TickIt tickish' outc)
; return (addFloats env env'', expr'')
}
-- Alternative version that wraps outgoing floats with the tick. This
-- results in ticks being duplicated, as we don't make any attempt to
-- eliminate the tick if we re-inline the binding (because the tick
-- semantics allows unrestricted inlining of HNFs), so I'm not doing
-- this any more. FloatOut will catch any real opportunities for
-- floating.
--
-- wrap_floats =
-- do { let (inc,outc) = splitCont cont
-- ; (env', expr') <- simplExprF (zapFloats env) expr inc
-- ; let tickish' = simplTickish env tickish
-- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0),
-- mkTick (mkNoCount tickish') rhs)
-- -- when wrapping a float with mkTick, we better zap the Id's
-- -- strictness info and arity, because it might be wrong now.
-- ; let env'' = addFloats env (mapFloats env' wrap_float)
-- ; rebuild env'' expr' (TickIt tickish' outc)
-- }
simplTickish env tickish
| Breakpoint n ids <- tickish
= Breakpoint n (map (getDoneId . substId env) ids)
| otherwise = tickish
-- Push type application and coercion inside a tick
splitCont :: SimplCont -> (SimplCont, SimplCont)
splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc)
where (inc,outc) = splitCont tail
splitCont (CastIt co c) = (CastIt co inc, outc)
where (inc,outc) = splitCont c
splitCont other = (mkBoringStop (contHoleType other), other)
getDoneId (DoneId id) = id
getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst
getDoneId other = pprPanic "getDoneId" (ppr other)
-- Note [case-of-scc-of-case]
-- It's pretty important to be able to transform case-of-case when
-- there's an SCC in the way. For example, the following comes up
-- in nofib/real/compress/Encode.hs:
--
-- case scctick<code_string.r1>
-- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje
-- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) ->
-- (ww1_s13f, ww2_s13g, ww3_s13h)
-- }
-- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) ->
-- tick<code_string.f1>
-- (ww_s12Y,
-- ww1_s12Z,
-- PTTrees.PT
-- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf)
-- }
--
-- We really want this case-of-case to fire, because then the 3-tuple
-- will go away (indeed, the CPR optimisation is relying on this
-- happening). But the scctick is in the way - we need to push it
-- inside to expose the case-of-case. So we perform this
-- transformation on the inner case:
--
-- scctick c (case e of { p1 -> e1; ...; pn -> en })
-- ==>
-- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en }
--
-- So we've moved a constant amount of work out of the scc to expose
-- the case. We only do this when the continuation is interesting: in
-- for now, it has to be another Case (maybe generalise this later).
{-
************************************************************************
* *
\subsection{The main rebuilder}
* *
************************************************************************
-}
rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- At this point the substitution in the SimplEnv should be irrelevant
-- only the in-scope set and floats should matter
rebuild env expr cont
= case cont of
Stop {} -> return (env, expr)
TickIt t cont -> rebuild env (mkTick t expr) cont
CastIt co cont -> rebuild env (mkCast expr co) cont
-- NB: mkCast implements the (Coercion co |> g) optimisation
Select { sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont }
-> rebuildCase (se `setFloats` env) expr bndr alts cont
StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont
StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr
-- expr satisfies let/app since it started life
-- in a call to simplNonRecE
; simplLam env' bs body cont }
ApplyToTy { sc_arg_ty = ty, sc_cont = cont}
-> rebuild env (App expr (Type ty)) cont
ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont}
-- See Note [Avoid redundant simplification]
| isSimplified dup_flag -> rebuild env (App expr arg) cont
| otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg
; rebuild env (App expr arg') cont }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
-}
simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCast env body co0 cont0
= do { co1 <- simplCoercion env co0
; cont1 <- addCoerce co1 cont0
; simplExprF env body cont1 }
where
addCoerce co cont = add_coerce co (coercionKind co) cont
add_coerce _co (Pair s1 k1) cont -- co :: ty~ty
| s1 `eqType` k1 = return cont -- is a no-op
add_coerce co1 (Pair s1 _k2) (CastIt co2 cont)
| (Pair _l1 t1) <- coercionKind co2
-- e |> (g1 :: S1~L) |> (g2 :: L~T1)
-- ==>
-- e, if S1=T1
-- e |> (g1 . g2 :: S1~T1) otherwise
--
-- For example, in the initial form of a worker
-- we may find (coerce T (coerce S (\x.e))) y
-- and we'd like it to simplify to e[y/x] in one round
-- of simplification
, s1 `eqType` t1 = return cont -- The coerces cancel out
| otherwise = return (CastIt (mkTransCo co1 co2) cont)
add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail })
-- (f |> g) ty ---> (f ty) |> (g @ ty)
-- This implements the PushT rule from the paper
| isForAllTy s1s2
= do { cont' <- addCoerce new_cast tail
; return (cont { sc_cont = cont' }) }
where
new_cast = mkInstCo co (mkNomReflCo arg_ty)
add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup, sc_cont = cont })
| isFunTy s1s2 -- This implements the Push rule from the paper
, isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg
-- (e |> (g :: s1s2 ~ t1->t2)) f
-- ===>
-- (e (f |> (arg g :: t1~s1))
-- |> (res g :: s2->t2)
--
-- t1t2 must be a function type, t1->t2, because it's applied
-- to something but s1s2 might conceivably not be
--
-- When we build the ApplyTo we can't mix the out-types
-- with the InExpr in the argument, so we simply substitute
-- to make it all consistent. It's a bit messy.
-- But it isn't a common case.
--
-- Example of use: Trac #995
= do { (dup', arg_se', arg') <- simplArg env dup arg_se arg
; cont' <- addCoerce co2 cont
; return (ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1)
, sc_env = arg_se'
, sc_dup = dup'
, sc_cont = cont' }) }
where
-- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and
-- t2 ~ s2 with left and right on the curried form:
-- (->) t1 t2 ~ (->) s1 s2
[co1, co2] = decomposeCo 2 co
add_coerce co _ cont = return (CastIt co cont)
simplArg :: SimplEnv -> DupFlag -> StaticEnv -> CoreExpr
-> SimplM (DupFlag, StaticEnv, OutExpr)
simplArg env dup_flag arg_env arg
| isSimplified dup_flag
= return (dup_flag, arg_env, arg)
| otherwise
= do { arg' <- simplExpr (arg_env `setInScope` env) arg
; return (Simplified, zapSubstEnv arg_env, arg') }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
Note [Zap unfolding when beta-reducing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lambda-bound variables can have stable unfoldings, such as
$j = \x. \b{Unf=Just x}. e
See Note [Case binders and join points] below; the unfolding for lets
us optimise e better. However when we beta-reduce it we want to
revert to using the actual value, otherwise we can end up in the
stupid situation of
let x = blah in
let b{Unf=Just x} = y
in ...b...
Here it'd be far better to drop the unfolding and use the actual RHS.
-}
simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplLam env [] body cont = simplExprF env body cont
-- Beta reduction
simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplLam (extendTCvSubst env bndr arg_ty) bndrs body cont }
simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplNonRecE env' (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont }
where
env' | Coercion co <- arg
= extendTCvSubst env bndr (mkCoercionTy co)
| otherwise
= env
zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing]
| isId bndr, isStableUnfolding (realIdUnfolding bndr)
= setIdUnfolding bndr NoUnfolding
| otherwise = bndr
-- discard a non-counting tick on a lambda. This may change the
-- cost attribution slightly (moving the allocation of the
-- lambda elsewhere), but we don't care: optimisation changes
-- cost attribution all the time.
simplLam env bndrs body (TickIt tickish cont)
| not (tickishCounts tickish)
= simplLam env bndrs body cont
-- Not enough args, so there are real lambdas left to put in the result
simplLam env bndrs body cont
= do { (env', bndrs') <- simplLamBndrs env bndrs
; body' <- simplExpr env' body
; new_lam <- mkLam bndrs' body' cont
; rebuild env' new_lam cont }
simplLamBndrs :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplLamBndrs env bndrs = mapAccumLM simplLamBndr env bndrs
-------------
simplLamBndr :: SimplEnv -> Var -> SimplM (SimplEnv, Var)
-- Used for lambda binders. These sometimes have unfoldings added by
-- the worker/wrapper pass that must be preserved, because they can't
-- be reconstructed from context. For example:
-- f x = case x of (a,b) -> fw a b x
-- fw a b x{=(a,b)} = ...
-- The "{=(a,b)}" is an unfolding we can't reconstruct otherwise.
simplLamBndr env bndr
| isId bndr && hasSomeUnfolding old_unf -- Special case
= do { (env1, bndr1) <- simplBinder env bndr
; unf' <- simplUnfolding env1 NotTopLevel bndr old_unf
; let bndr2 = bndr1 `setIdUnfolding` unf'
; return (modifyInScope env1 bndr2, bndr2) }
| otherwise
= simplBinder env bndr -- Normal case
where
old_unf = idUnfolding bndr
------------------
simplNonRecE :: SimplEnv
-> InBndr -- The binder
-> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda)
-> ([InBndr], InExpr) -- Body of the let/lambda
-- \xs.e
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
-- simplNonRecE is used for
-- * non-top-level non-recursive lets in expressions
-- * beta reduction
--
-- It deals with strict bindings, via the StrictBind continuation,
-- which may abort the whole process
--
-- Precondition: rhs satisfies the let/app invariant
-- Note [CoreSyn let/app invariant] in CoreSyn
--
-- The "body" of the binding comes as a pair of ([InId],InExpr)
-- representing a lambda; so we recurse back to simplLam
-- Why? Because of the binder-occ-info-zapping done before
-- the call to simplLam in simplExprF (Lam ...)
-- First deal with type applications and type lets
-- (/\a. e) (Type ty) and (let a = Type ty in e)
simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont
= ASSERT( isTyVar bndr )
do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg
; simplLam (extendTCvSubst env bndr ty_arg') bndrs body cont }
simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont
= do dflags <- getDynFlags
case () of
_ | preInlineUnconditionally dflags env NotTopLevel bndr rhs
-> do { tick (PreInlineUnconditionally bndr)
; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $
simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont }
| isStrictId bndr -- Includes coercions
-> simplExprF (rhs_se `setFloats` env) rhs
(StrictBind bndr bndrs body env cont)
| otherwise
-> ASSERT( not (isTyVar bndr) )
do { (env1, bndr1) <- simplNonRecBndr env bndr
; (env2, bndr2) <- addBndrRules env1 bndr bndr1
; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se
; simplLam env3 bndrs body cont }
{-
************************************************************************
* *
Variables
* *
************************************************************************
-}
simplVar :: SimplEnv -> InVar -> SimplM OutExpr
-- Look up an InVar in the environment
simplVar env var
| isTyVar var = return (Type (substTyVar env var))
| isCoVar var = return (Coercion (substCoVar env var))
| otherwise
= case substId env var of
DoneId var1 -> return (Var var1)
DoneEx e -> return e
ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e
simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr)
simplIdF env var cont
= case substId env var of
DoneEx e -> simplExprF (zapSubstEnv env) e cont
ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont
DoneId var1 -> completeCall env var1 cont
-- Note [zapSubstEnv]
-- The template is already simplified, so don't re-substitute.
-- This is VITAL. Consider
-- let x = e in
-- let y = \z -> ...x... in
-- \ x -> ...y...
-- We'll clone the inner \x, adding x->x' in the id_subst
-- Then when we inline y, we must *not* replace x by x' in
-- the inlined copy!!
---------------------------------------------------------
-- Dealing with a call site
completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr)
completeCall env var cont
= do { ------------- Try inlining ----------------
dflags <- getDynFlags
; let (lone_variable, arg_infos, call_cont) = contArgs cont
n_val_args = length arg_infos
interesting_cont = interestingCallContext call_cont
unfolding = activeUnfolding env var
maybe_inline = callSiteInline dflags var unfolding
lone_variable arg_infos interesting_cont
; case maybe_inline of {
Just expr -- There is an inlining!
-> do { checkedTick (UnfoldingDone var)
; dump_inline dflags expr cont
; simplExprF (zapSubstEnv env) expr cont }
; Nothing -> do -- No inlining!
{ rule_base <- getSimplRules
; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont
; rebuildCall env info cont
}}}
where
dump_inline dflags unfolding cont
| not (dopt Opt_D_dump_inlinings dflags) = return ()
| not (dopt Opt_D_verbose_core2core dflags)
= when (isExternalName (idName var)) $
liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done:", nest 4 (ppr var)]
| otherwise
= liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done: " <> ppr var,
nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding),
text "Cont: " <+> ppr cont])]
rebuildCall :: SimplEnv
-> ArgInfo
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont
-- When we run out of strictness args, it means
-- that the call is definitely bottom; see SimplUtils.mkArgInfo
-- Then we want to discard the entire strict continuation. E.g.
-- * case (error "hello") of { ... }
-- * (error "Hello") arg
-- * f (error "Hello") where f is strict
-- etc
-- Then, especially in the first of these cases, we'd like to discard
-- the continuation, leaving just the bottoming expression. But the
-- type might not be right, so we may have to add a coerce.
| not (contIsTrivial cont) -- Only do this if there is a non-trivial
= return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it
where -- again and again!
res = argInfoExpr fun rev_args
cont_ty = contResultType cont
rebuildCall env info (CastIt co cont)
= rebuildCall env (addCastTo info co) cont
rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= rebuildCall env (info `addTyArgTo` arg_ty) cont
rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty
, ai_strs = str:strs, ai_discs = disc:discs })
(ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup_flag, sc_cont = cont })
| isSimplified dup_flag -- See Note [Avoid redundant simplification]
= rebuildCall env (addValArgTo info' arg) cont
| str -- Strict argument
= -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $
simplExprF (arg_se `setFloats` env) arg
(StrictArg info' cci cont)
-- Note [Shadowing]
| otherwise -- Lazy argument
-- DO NOT float anything outside, hence simplExprC
-- There is no benefit (unlike in a let-binding), and we'd
-- have to be very careful about bogus strictness through
-- floating a demanded let.
= do { arg' <- simplExprC (arg_se `setInScope` env) arg
(mkLazyArgStop (funArgTy fun_ty) cci)
; rebuildCall env (addValArgTo info' arg') cont }
where
info' = info { ai_strs = strs, ai_discs = discs }
cci | encl_rules = RuleArgCtxt
| disc > 0 = DiscArgCtxt -- Be keener here
| otherwise = BoringCtxt -- Nothing interesting
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont
| null rules
= rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case
| otherwise
= do { -- We've accumulated a simplified call in <fun,rev_args>
-- so try rewrite rules; see Note [RULEs apply to simplified arguments]
-- See also Note [Rules for recursive functions]
; let env' = zapSubstEnv env -- See Note [zapSubstEnv];
-- and NB that 'rev_args' are all fully simplified
; mb_rule <- tryRules env' rules fun (reverse rev_args) cont
; case mb_rule of {
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
-- Rules don't match
; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules
} }
{-
Note [RULES apply to simplified arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very desirable to try RULES once the arguments have been simplified, because
doing so ensures that rule cascades work in one pass. Consider
{-# RULES g (h x) = k x
f (k x) = x #-}
...f (g (h x))...
Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If
we match f's rules against the un-simplified RHS, it won't match. This
makes a particularly big difference when superclass selectors are involved:
op ($p1 ($p2 (df d)))
We want all this to unravel in one sweeep.
Note [Avoid redundant simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because RULES apply to simplified arguments, there's a danger of repeatedly
simplifying already-simplified arguments. An important example is that of
(>>=) d e1 e2
Here e1, e2 are simplified before the rule is applied, but don't really
participate in the rule firing. So we mark them as Simplified to avoid
re-simplifying them.
Note [Shadowing]
~~~~~~~~~~~~~~~~
This part of the simplifier may break the no-shadowing invariant
Consider
f (...(\a -> e)...) (case y of (a,b) -> e')
where f is strict in its second arg
If we simplify the innermost one first we get (...(\a -> e)...)
Simplifying the second arg makes us float the case out, so we end up with
case y of (a,b) -> f (...(\a -> e)...) e'
So the output does not have the no-shadowing invariant. However, there is
no danger of getting name-capture, because when the first arg was simplified
we used an in-scope set that at least mentioned all the variables free in its
static environment, and that is enough.
We can't just do innermost first, or we'd end up with a dual problem:
case x of (a,b) -> f e (...(\a -> e')...)
I spent hours trying to recover the no-shadowing invariant, but I just could
not think of an elegant way to do it. The simplifier is already knee-deep in
continuations. We have to keep the right in-scope set around; AND we have
to get the effect that finding (error "foo") in a strict arg position will
discard the entire application and replace it with (error "foo"). Getting
all this at once is TOO HARD!
************************************************************************
* *
Rewrite rules
* *
************************************************************************
-}
tryRules :: SimplEnv -> [CoreRule]
-> Id -> [ArgSpec] -> SimplCont
-> SimplM (Maybe (CoreExpr, SimplCont))
-- The SimplEnv already has zapSubstEnv applied to it
tryRules env rules fn args call_cont
| null rules
= return Nothing
{- Disabled until we fix #8326
| fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#]
, [_type_arg, val_arg] <- args
, Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont
, isDeadBinder bndr
= do { dflags <- getDynFlags
; let enum_to_tag :: CoreAlt -> CoreAlt
-- Takes K -> e into tagK# -> e
-- where tagK# is the tag of constructor K
enum_to_tag (DataAlt con, [], rhs)
= ASSERT( isEnumerationTyCon (dataConTyCon con) )
(LitAlt tag, [], rhs)
where
tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG))
enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt)
new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts
new_bndr = setIdType bndr intPrimTy
-- The binder is dead, but should have the right type
; return (Just (val_arg, Select dup new_bndr new_alts se cont)) }
-}
| otherwise
= do { dflags <- getDynFlags
; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env)
fn (argInfoAppArgs args) rules of {
Nothing -> return Nothing ; -- No rule matches
Just (rule, rule_rhs) ->
do { checkedTick (RuleFired (ru_name rule))
; let cont' = pushSimplifiedArgs env
(drop (ruleArity rule) args)
call_cont
-- (ruleArity rule) says how many args the rule consumed
; dump dflags rule rule_rhs
; return (Just (rule_rhs, cont')) }}}
where
dump dflags rule rule_rhs
| dopt Opt_D_dump_rule_rewrites dflags
= log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat
[ text "Rule:" <+> ftext (ru_name rule)
, text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args))
, text "After: " <+> pprCoreExpr rule_rhs
, text "Cont: " <+> ppr call_cont ]
| dopt Opt_D_dump_rule_firings dflags
= log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $
ftext (ru_name rule)
| otherwise
= return ()
log_rule dflags flag hdr details
= liftIO . dumpSDoc dflags alwaysQualify flag "" $
sep [text hdr, nest 4 details]
{-
Note [Optimising tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an enumeration data type:
data Foo = A | B | C
Then we want to transform
case tagToEnum# x of ==> case x of
A -> e1 DEFAULT -> e1
B -> e2 1# -> e2
C -> e3 2# -> e3
thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT
alternative we retain it (remember it comes first). If not the case must
be exhaustive, and we reflect that in the transformed version by adding
a DEFAULT. Otherwise Lint complains that the new case is not exhaustive.
See #8317.
Note [Rules for recursive functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You might think that we shouldn't apply rules for a loop breaker:
doing so might give rise to an infinite loop, because a RULE is
rather like an extra equation for the function:
RULE: f (g x) y = x+y
Eqn: f a y = a-y
But it's too drastic to disable rules for loop breakers.
Even the foldr/build rule would be disabled, because foldr
is recursive, and hence a loop breaker:
foldr k z (build g) = g k z
So it's up to the programmer: rules can cause divergence
************************************************************************
* *
Rebuilding a case expression
* *
************************************************************************
Note [Case elimination]
~~~~~~~~~~~~~~~~~~~~~~~
The case-elimination transformation discards redundant case expressions.
Start with a simple situation:
case x# of ===> let y# = x# in e
y# -> e
(when x#, y# are of primitive type, of course). We can't (in general)
do this for algebraic cases, because we might turn bottom into
non-bottom!
The code in SimplUtils.prepareAlts has the effect of generalise this
idea to look for a case where we're scrutinising a variable, and we
know that only the default case can match. For example:
case x of
0# -> ...
DEFAULT -> ...(case x of
0# -> ...
DEFAULT -> ...) ...
Here the inner case is first trimmed to have only one alternative, the
DEFAULT, after which it's an instance of the previous case. This
really only shows up in eliminating error-checking code.
Note that SimplUtils.mkCase combines identical RHSs. So
case e of ===> case e of DEFAULT -> r
True -> r
False -> r
Now again the case may be elminated by the CaseElim transformation.
This includes things like (==# a# b#)::Bool so that we simplify
case ==# a# b# of { True -> x; False -> x }
to just
x
This particular example shows up in default methods for
comparison operations (e.g. in (>=) for Int.Int32)
Note [Case elimination: lifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a case over a lifted type has a single alternative, and is being used
as a strict 'let' (all isDeadBinder bndrs), we may want to do this
transformation:
case e of r ===> let r = e in ...r...
_ -> ...r...
(a) 'e' is already evaluated (it may so if e is a variable)
Specifically we check (exprIsHNF e). In this case
we can just allocate the WHNF directly with a let.
or
(b) 'x' is not used at all and e is ok-for-speculation
The ok-for-spec bit checks that we don't lose any
exceptions or divergence.
NB: it'd be *sound* to switch from case to let if the
scrutinee was not yet WHNF but was guaranteed to
converge; but sticking with case means we won't build a
thunk
or
(c) 'x' is used strictly in the body, and 'e' is a variable
Then we can just substitute 'e' for 'x' in the body.
See Note [Eliminating redundant seqs]
For (b), the "not used at all" test is important. Consider
case (case a ># b of { True -> (p,q); False -> (q,p) }) of
r -> blah
The scrutinee is ok-for-speculation (it looks inside cases), but we do
not want to transform to
let r = case a ># b of { True -> (p,q); False -> (q,p) }
in blah
because that builds an unnecessary thunk.
Note [Eliminating redundant seqs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have this:
case x of r { _ -> ..r.. }
where 'r' is used strictly in (..r..), the case is effectively a 'seq'
on 'x', but since 'r' is used strictly anyway, we can safely transform to
(...x...)
Note that this can change the error behaviour. For example, we might
transform
case x of { _ -> error "bad" }
--> error "bad"
which is might be puzzling if 'x' currently lambda-bound, but later gets
let-bound to (error "good").
Nevertheless, the paper "A semantics for imprecise exceptions" allows
this transformation. If you want to fix the evaluation order, use
'pseq'. See Trac #8900 for an example where the loss of this
transformation bit us in practice.
See also Note [Empty case alternatives] in CoreSyn.
Just for reference, the original code (added Jan 13) looked like this:
|| case_bndr_evald_next rhs
case_bndr_evald_next :: CoreExpr -> Bool
-- See Note [Case binder next]
case_bndr_evald_next (Var v) = v == case_bndr
case_bndr_evald_next (Cast e _) = case_bndr_evald_next e
case_bndr_evald_next (App e _) = case_bndr_evald_next e
case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e
case_bndr_evald_next _ = False
(This came up when fixing Trac #7542. See also Note [Eta reduction of
an eval'd function] in CoreUtils.)
Note [Case elimination: unlifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case a +# b of r -> ...r...
Then we do case-elimination (to make a let) followed by inlining,
to get
.....(a +# b)....
If we have
case indexArray# a i of r -> ...r...
we might like to do the same, and inline the (indexArray# a i).
But indexArray# is not okForSpeculation, so we don't build a let
in rebuildCase (lest it get floated *out*), so the inlining doesn't
happen either.
This really isn't a big deal I think. The let can be
Further notes about case elimination
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider: test :: Integer -> IO ()
test = print
Turns out that this compiles to:
Print.test
= \ eta :: Integer
eta1 :: Void# ->
case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT ->
case hPutStr stdout
(PrelNum.jtos eta ($w[] @ Char))
eta1
of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }}
Notice the strange '<' which has no effect at all. This is a funny one.
It started like this:
f x y = if x < 0 then jtos x
else if y==0 then "" else jtos x
At a particular call site we have (f v 1). So we inline to get
if v < 0 then jtos x
else if 1==0 then "" else jtos x
Now simplify the 1==0 conditional:
if v<0 then jtos v else jtos v
Now common-up the two branches of the case:
case (v<0) of DEFAULT -> jtos v
Why don't we drop the case? Because it's strict in v. It's technically
wrong to drop even unnecessary evaluations, and in practice they
may be a result of 'seq' so we *definitely* don't want to drop those.
I don't really know how to improve this situation.
-}
---------------------------------------------------------
-- Eliminate the case if possible
rebuildCase, reallyRebuildCase
:: SimplEnv
-> OutExpr -- Scrutinee
-> InId -- Case binder
-> [InAlt] -- Alternatives (inceasing order)
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
--------------------------------------------------
-- 1. Eliminate the case if there's a known constructor
--------------------------------------------------
rebuildCase env scrut case_bndr alts cont
| Lit lit <- scrut -- No need for same treatment as constructors
-- because literals are inlined more vigorously
, not (litIsLifted lit)
= do { tick (KnownBranch case_bndr)
; case findAlt (LitAlt lit) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (_, bs, rhs) -> simple_rhs bs rhs }
| Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut
-- Works when the scrutinee is a variable with a known unfolding
-- as well as when it's an explicit constructor application
= do { tick (KnownBranch case_bndr)
; case findAlt (DataAlt con) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs
Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args
case_bndr bs rhs cont
}
where
simple_rhs bs rhs = ASSERT( null bs )
do { env' <- simplNonRecX env case_bndr scrut
-- scrut is a constructor application,
-- hence satisfies let/app invariant
; simplExprF env' rhs cont }
--------------------------------------------------
-- 2. Eliminate the case if scrutinee is evaluated
--------------------------------------------------
rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont
-- See if we can get rid of the case altogether
-- See Note [Case elimination]
-- mkCase made sure that if all the alternatives are equal,
-- then there is now only one (DEFAULT) rhs
-- 2a. Dropping the case altogether, if
-- a) it binds nothing (so it's really just a 'seq')
-- b) evaluating the scrutinee has no side effects
| is_plain_seq
, exprOkForSideEffects scrut
-- The entire case is dead, so we can drop it
-- if the scrutinee converges without having imperative
-- side effects or raising a Haskell exception
-- See Note [PrimOp can_fail and has_side_effects] in PrimOp
= simplExprF env rhs cont
-- 2b. Turn the case into a let, if
-- a) it binds only the case-binder
-- b) unlifted case: the scrutinee is ok-for-speculation
-- lifted case: the scrutinee is in HNF (or will later be demanded)
| all_dead_bndrs
, if is_unlifted
then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case]
else exprIsHNF scrut -- See Note [Case elimination: lifted case]
|| scrut_is_demanded_var scrut
= do { tick (CaseElim case_bndr)
; env' <- simplNonRecX env case_bndr scrut
; simplExprF env' rhs cont }
-- 2c. Try the seq rules if
-- a) it binds only the case binder
-- b) a rule for seq applies
-- See Note [User-defined RULES for seq] in MkId
| is_plain_seq
= do { let scrut_ty = exprType scrut
rhs_ty = substTy env (exprType rhs)
out_args = [ TyArg { as_arg_ty = scrut_ty
, as_hole_ty = seq_id_ty }
, TyArg { as_arg_ty = rhs_ty
, as_hole_ty = piResultTy seq_id_ty scrut_ty }
, ValArg scrut]
rule_cont = ApplyToVal { sc_dup = NoDup, sc_arg = rhs
, sc_env = env, sc_cont = cont }
env' = zapSubstEnv env
-- Lazily evaluated, so we don't do most of this
; rule_base <- getSimplRules
; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args rule_cont
; case mb_rule of
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
Nothing -> reallyRebuildCase env scrut case_bndr alts cont }
where
is_unlifted = isUnLiftedType (idType case_bndr)
all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId]
is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect
seq_id_ty = idType seqId
scrut_is_demanded_var :: CoreExpr -> Bool
-- See Note [Eliminating redundant seqs]
scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s
scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr)
scrut_is_demanded_var _ = False
rebuildCase env scrut case_bndr alts cont
= reallyRebuildCase env scrut case_bndr alts cont
--------------------------------------------------
-- 3. Catch-all case
--------------------------------------------------
reallyRebuildCase env scrut case_bndr alts cont
= do { -- Prepare the continuation;
-- The new subst_env is in place
(env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- Simplify the alternatives
; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont
; dflags <- getDynFlags
; let alts_ty' = contResultType dup_cont
; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts'
-- Notice that rebuild gets the in-scope set from env', not alt_env
-- (which in any case is only build in simplAlts)
-- The case binder *not* scope over the whole returned case-expression
; rebuild env' case_expr nodup_cont }
{-
simplCaseBinder checks whether the scrutinee is a variable, v. If so,
try to eliminate uses of v in the RHSs in favour of case_bndr; that
way, there's a chance that v will now only be used once, and hence
inlined.
Historical note: we use to do the "case binder swap" in the Simplifier
so there were additional complications if the scrutinee was a variable.
Now the binder-swap stuff is done in the occurrence analyer; see
OccurAnal Note [Binder swap].
Note [knownCon occ info]
~~~~~~~~~~~~~~~~~~~~~~~~
If the case binder is not dead, then neither are the pattern bound
variables:
case <any> of x { (a,b) ->
case x of { (p,q) -> p } }
Here (a,b) both look dead, but come alive after the inner case is eliminated.
The point is that we bring into the envt a binding
let x = (a,b)
after the outer case, and that makes (a,b) alive. At least we do unless
the case binder is guaranteed dead.
Note [Case alternative occ info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are simply reconstructing a case (the common case), we always
zap the occurrence info on the binders in the alternatives. Even
if the case binder is dead, the scrutinee is usually a variable, and *that*
can bring the case-alternative binders back to life.
See Note [Add unfolding for scrutinee]
Note [Improving seq]
~~~~~~~~~~~~~~~~~~~
Consider
type family F :: * -> *
type instance F Int = Int
... case e of x { DEFAULT -> rhs } ...
where x::F Int. Then we'd like to rewrite (F Int) to Int, getting
case e `cast` co of x'::Int
I# x# -> let x = x' `cast` sym co
in rhs
so that 'rhs' can take advantage of the form of x'.
Notice that Note [Case of cast] (in OccurAnal) may then apply to the result.
Nota Bene: We only do the [Improving seq] transformation if the
case binder 'x' is actually used in the rhs; that is, if the case
is *not* a *pure* seq.
a) There is no point in adding the cast to a pure seq.
b) There is a good reason not to: doing so would interfere
with seq rules (Note [Built-in RULES for seq] in MkId).
In particular, this [Improving seq] thing *adds* a cast
while [Built-in RULES for seq] *removes* one, so they
just flip-flop.
You might worry about
case v of x { __DEFAULT ->
... case (v `cast` co) of y { I# -> ... }}
This is a pure seq (since x is unused), so [Improving seq] won't happen.
But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get
case v of x { __DEFAULT ->
... case (x `cast` co) of y { I# -> ... }}
Now the outer case is not a pure seq, so [Improving seq] will happen,
and then the inner case will disappear.
The need for [Improving seq] showed up in Roman's experiments. Example:
foo :: F Int -> Int -> Int
foo t n = t `seq` bar n
where
bar 0 = 0
bar n = bar (n - case t of TI i -> i)
Here we'd like to avoid repeated evaluating t inside the loop, by
taking advantage of the `seq`.
At one point I did transformation in LiberateCase, but it's more
robust here. (Otherwise, there's a danger that we'll simply drop the
'seq' altogether, before LiberateCase gets to see it.)
-}
simplAlts :: SimplEnv
-> OutExpr
-> InId -- Case binder
-> [InAlt] -- Non-empty
-> SimplCont
-> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation
-- Like simplExpr, this just returns the simplified alternatives;
-- it does not return an environment
-- The returned alternatives can be empty, none are possible
simplAlts env scrut case_bndr alts cont'
= do { let env0 = zapFloats env
; (env1, case_bndr1) <- simplBinder env0 case_bndr
; fam_envs <- getFamEnvs
; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut
case_bndr case_bndr1 alts
; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts
-- NB: it's possible that the returned in_alts is empty: this is handled
-- by the caller (rebuildCase) in the missingAlt function
; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts
; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $
return (scrut', case_bndr', alts') }
------------------------------------
improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv
-> OutExpr -> InId -> OutId -> [InAlt]
-> SimplM (SimplEnv, OutExpr, OutId)
-- Note [Improving seq]
improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)]
| not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq]
, Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1)
= do { case_bndr2 <- newId (fsLit "nt") ty2
; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co)
env2 = extendIdSubst env case_bndr rhs
; return (env2, scrut `Cast` co, case_bndr2) }
improveSeq _ env scrut _ case_bndr1 _
= return (env, scrut, case_bndr1)
------------------------------------
simplAlt :: SimplEnv
-> Maybe OutExpr -- The scrutinee
-> [AltCon] -- These constructors can't be present when
-- matching the DEFAULT alternative
-> OutId -- The case binder
-> SimplCont
-> InAlt
-> SimplM OutAlt
simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs)
= ASSERT( null bndrs )
do { let env' = addBinderUnfolding env case_bndr'
(mkOtherCon imposs_deflt_cons)
-- Record the constructors that the case-binder *can't* be.
; rhs' <- simplExprC env' rhs cont'
; return (DEFAULT, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs)
= ASSERT( null bndrs )
do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit)
; rhs' <- simplExprC env' rhs cont'
; return (LitAlt lit, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs)
= do { -- Deal with the pattern-bound variables
-- Mark the ones that are in ! positions in the
-- data constructor as certainly-evaluated.
-- NB: simplLamBinders preserves this eval info
; let vs_with_evals = add_evals (dataConRepStrictness con)
; (env', vs') <- simplLamBndrs env vs_with_evals
-- Bind the case-binder to (con args)
; let inst_tys' = tyConAppArgs (idType case_bndr')
con_app :: OutExpr
con_app = mkConApp2 con inst_tys' vs'
; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app
; rhs' <- simplExprC env'' rhs cont'
; return (DataAlt con, vs', rhs') }
where
-- add_evals records the evaluated-ness of the bound variables of
-- a case pattern. This is *important*. Consider
-- data T = T !Int !Int
--
-- case x of { T a b -> T (a+1) b }
--
-- We really must record that b is already evaluated so that we don't
-- go and re-evaluate it when constructing the result.
-- See Note [Data-con worker strictness] in MkId.hs
add_evals the_strs
= go vs the_strs
where
go [] [] = []
go (v:vs') strs | isTyVar v = v : go vs' strs
go (v:vs') (str:strs)
| isMarkedStrict str = eval v : go vs' strs
| otherwise = zap v : go vs' strs
go _ _ = pprPanic "cat_evals"
(ppr con $$
ppr vs $$
ppr_with_length the_strs $$
ppr_with_length (dataConRepArgTys con) $$
ppr_with_length (dataConRepStrictness con))
where
ppr_with_length list
= ppr list <+> parens (text "length =" <+> ppr (length list))
-- NB: If this panic triggers, note that
-- NoStrictnessMark doesn't print!
zap v = zapIdOccInfo v -- See Note [Case alternative occ info]
eval v = zap v `setIdUnfolding` evaldUnfolding
addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv
addAltUnfoldings env scrut case_bndr con_app
= do { dflags <- getDynFlags
; let con_app_unf = mkSimpleUnfolding dflags con_app
env1 = addBinderUnfolding env case_bndr con_app_unf
-- See Note [Add unfolding for scrutinee]
env2 = case scrut of
Just (Var v) -> addBinderUnfolding env1 v con_app_unf
Just (Cast (Var v) co) -> addBinderUnfolding env1 v $
mkSimpleUnfolding dflags (Cast con_app (mkSymCo co))
_ -> env1
; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app])
; return env2 }
addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv
addBinderUnfolding env bndr unf
| debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf
= WARN( not (eqType (idType bndr) (exprType tmpl)),
ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) )
modifyInScope env (bndr `setIdUnfolding` unf)
| otherwise
= modifyInScope env (bndr `setIdUnfolding` unf)
zapBndrOccInfo :: Bool -> Id -> Id
-- Consider case e of b { (a,b) -> ... }
-- Then if we bind b to (a,b) in "...", and b is not dead,
-- then we must zap the deadness info on a,b
zapBndrOccInfo keep_occ_info pat_id
| keep_occ_info = pat_id
| otherwise = zapIdOccInfo pat_id
{-
Note [Add unfolding for scrutinee]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general it's unlikely that a variable scrutinee will appear
in the case alternatives case x of { ...x unlikely to appear... }
because the binder-swap in OccAnal has got rid of all such occcurrences
See Note [Binder swap] in OccAnal.
BUT it is still VERY IMPORTANT to add a suitable unfolding for a
variable scrutinee, in simplAlt. Here's why
case x of y
(a,b) -> case b of c
I# v -> ...(f y)...
There is no occurrence of 'b' in the (...(f y)...). But y gets
the unfolding (a,b), and *that* mentions b. If f has a RULE
RULE f (p, I# q) = ...
we want that rule to match, so we must extend the in-scope env with a
suitable unfolding for 'y'. It's *essential* for rule matching; but
it's also good for case-elimintation -- suppose that 'f' was inlined
and did multi-level case analysis, then we'd solve it in one
simplifier sweep instead of two.
Exactly the same issue arises in SpecConstr;
see Note [Add scrutinee to ValueEnv too] in SpecConstr
HOWEVER, given
case x of y { Just a -> r1; Nothing -> r2 }
we do not want to add the unfolding x -> y to 'x', which might seem cool,
since 'y' itself has different unfoldings in r1 and r2. Reason: if we
did that, we'd have to zap y's deadness info and that is a very useful
piece of information.
So instead we add the unfolding x -> Just a, and x -> Nothing in the
respective RHSs.
************************************************************************
* *
\subsection{Known constructor}
* *
************************************************************************
We are a bit careful with occurrence info. Here's an example
(\x* -> case x of (a*, b) -> f a) (h v, e)
where the * means "occurs once". This effectively becomes
case (h v, e) of (a*, b) -> f a)
and then
let a* = h v; b = e in f a
and then
f (h v)
All this should happen in one sweep.
-}
knownCon :: SimplEnv
-> OutExpr -- The scrutinee
-> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces)
-> InId -> [InBndr] -> InExpr -- The alternative
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont
= do { env' <- bind_args env bs dc_args
; env'' <- bind_case_bndr env'
; simplExprF env'' rhs cont }
where
zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId
-- Ugh!
bind_args env' [] _ = return env'
bind_args env' (b:bs') (Type ty : args)
= ASSERT( isTyVar b )
bind_args (extendTCvSubst env' b ty) bs' args
bind_args env' (b:bs') (Coercion co : args)
= ASSERT( isCoVar b )
bind_args (extendTCvSubst env' b (mkCoercionTy co)) bs' args
bind_args env' (b:bs') (arg : args)
= ASSERT( isId b )
do { let b' = zap_occ b
-- Note that the binder might be "dead", because it doesn't
-- occur in the RHS; and simplNonRecX may therefore discard
-- it via postInlineUnconditionally.
-- Nevertheless we must keep it if the case-binder is alive,
-- because it may be used in the con_app. See Note [knownCon occ info]
; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant
; bind_args env'' bs' args }
bind_args _ _ _ =
pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$
text "scrut:" <+> ppr scrut
-- It's useful to bind bndr to scrut, rather than to a fresh
-- binding x = Con arg1 .. argn
-- because very often the scrut is a variable, so we avoid
-- creating, and then subsequently eliminating, a let-binding
-- BUT, if scrut is a not a variable, we must be careful
-- about duplicating the arg redexes; in that case, make
-- a new con-app from the args
bind_case_bndr env
| isDeadBinder bndr = return env
| exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut))
| otherwise = do { dc_args <- mapM (simplVar env) bs
-- dc_ty_args are aready OutTypes,
-- but bs are InBndrs
; let con_app = Var (dataConWorkId dc)
`mkTyApps` dc_ty_args
`mkApps` dc_args
; simplNonRecX env bndr con_app }
-------------------
missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- This isn't strictly an error, although it is unusual.
-- It's possible that the simplifer might "see" that
-- an inner case has no accessible alternatives before
-- it "sees" that the entire branch of an outer case is
-- inaccessible. So we simply put an error case here instead.
missingAlt env case_bndr _ cont
= WARN( True, text "missingAlt" <+> ppr case_bndr )
return (env, mkImpossibleExpr (contResultType cont))
{-
************************************************************************
* *
\subsection{Duplicating continuations}
* *
************************************************************************
-}
prepareCaseCont :: SimplEnv
-> [InAlt] -> SimplCont
-> SimplM (SimplEnv,
SimplCont, -- Dupable part
SimplCont) -- Non-dupable part
-- We are considering
-- K[case _ of { p1 -> r1; ...; pn -> rn }]
-- where K is some enclosing continuation for the case
-- Goal: split K into two pieces Kdup,Knodup so that
-- a) Kdup can be duplicated
-- b) Knodup[Kdup[e]] = K[e]
-- The idea is that we'll transform thus:
-- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] }
--
-- We may also return some extra bindings in SimplEnv (that scope over
-- the entire continuation)
--
-- When case-of-case is off, just make the entire continuation non-dupable
prepareCaseCont env alts cont
| not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont)
| not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont))
| otherwise = mkDupableCont env cont
where
many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative
many_alts [] = False -- See Note [Bottom alternatives]
many_alts [_] = False
many_alts (alt:alts)
| is_bot_alt alt = many_alts alts
| otherwise = not (all is_bot_alt alts)
is_bot_alt (_,_,rhs) = exprIsBottom rhs
{-
Note [Bottom alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have
case (case x of { A -> error .. ; B -> e; C -> error ..)
of alts
then we can just duplicate those alts because the A and C cases
will disappear immediately. This is more direct than creating
join points and inlining them away; and in some cases we would
not even create the join points (see Note [Single-alternative case])
and we would keep the case-of-case which is silly. See Trac #4930.
-}
mkDupableCont :: SimplEnv -> SimplCont
-> SimplM (SimplEnv, SimplCont, SimplCont)
mkDupableCont env cont
| contIsDupable cont
= return (env, cont, mkBoringStop (contResultType cont))
mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn
mkDupableCont env (CastIt ty cont)
= do { (env', dup, nodup) <- mkDupableCont env cont
; return (env', CastIt ty dup, nodup) }
-- Duplicating ticks for now, not sure if this is good or not
mkDupableCont env cont@(TickIt{})
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env cont@(StrictBind {})
= return (env, mkBoringStop (contHoleType cont), cont)
-- See Note [Duplicating StrictBind]
mkDupableCont env (StrictArg info cci cont)
-- See Note [Duplicating StrictArg]
= do { (env', dup, nodup) <- mkDupableCont env cont
; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info)
; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) }
mkDupableCont env cont@(ApplyToTy { sc_cont = tail })
= do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail
; return (env', cont { sc_cont = dup_cont }, nodup_cont ) }
mkDupableCont env (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_env = se, sc_cont = cont })
= -- e.g. [...hole...] (...arg...)
-- ==>
-- let a = ...arg...
-- in [...hole...] a
do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont
; (_, se', arg') <- simplArg env' dup se arg
; (env'', arg'') <- makeTrivial NotTopLevel env' arg'
; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = se'
, sc_dup = OkToDup, sc_cont = dup_cont }
; return (env'', app_cont, nodup_cont) }
mkDupableCont env cont@(Select { sc_bndr = case_bndr, sc_alts = [(_, bs, _rhs)] })
-- See Note [Single-alternative case]
-- | not (exprIsDupable rhs && contIsDupable case_cont)
-- | not (isDeadBinder case_bndr)
| all isDeadBinder bs -- InIds
&& not (isUnLiftedType (idType case_bndr))
-- Note [Single-alternative-unlifted]
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env (Select { sc_bndr = case_bndr, sc_alts = alts
, sc_env = se, sc_cont = cont })
= -- e.g. (case [...hole...] of { pi -> ei })
-- ===>
-- let ji = \xij -> ei
-- in case [...hole...] of { pi -> ji xij }
do { tick (CaseOfCase case_bndr)
; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- NB: We call prepareCaseCont here. If there is only one
-- alternative, then dup_cont may be big, but that's ok
-- because we push it into the single alternative, and then
-- use mkDupableAlt to turn that simplified alternative into
-- a join point if it's too big to duplicate.
-- And this is important: see Note [Fusing case continuations]
; let alt_env = se `setInScope` env'
; (alt_env', case_bndr') <- simplBinder alt_env case_bndr
; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts
-- Safe to say that there are no handled-cons for the DEFAULT case
-- NB: simplBinder does not zap deadness occ-info, so
-- a dead case_bndr' will still advertise its deadness
-- This is really important because in
-- case e of b { (# p,q #) -> ... }
-- b is always dead, and indeed we are not allowed to bind b to (# p,q #),
-- which might happen if e was an explicit unboxed pair and b wasn't marked dead.
-- In the new alts we build, we have the new case binder, so it must retain
-- its deadness.
-- NB: we don't use alt_env further; it has the substEnv for
-- the alternatives, and we don't want that
; (env'', alts'') <- mkDupableAlts env' case_bndr' alts'
; return (env'', -- Note [Duplicated env]
Select { sc_dup = OkToDup
, sc_bndr = case_bndr', sc_alts = alts''
, sc_env = zapSubstEnv env''
, sc_cont = mkBoringStop (contHoleType nodup_cont) },
nodup_cont) }
mkDupableAlts :: SimplEnv -> OutId -> [InAlt]
-> SimplM (SimplEnv, [InAlt])
-- Absorbs the continuation into the new alternatives
mkDupableAlts env case_bndr' the_alts
= go env the_alts
where
go env0 [] = return (env0, [])
go env0 (alt:alts)
= do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt
; (env2, alts') <- go env1 alts
; return (env2, alt' : alts' ) }
mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr)
-> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr))
mkDupableAlt env case_bndr (con, bndrs', rhs') = do
dflags <- getDynFlags
if exprIsDupable dflags rhs' -- Note [Small alternative rhs]
then return (env, (con, bndrs', rhs'))
else
do { let rhs_ty' = exprType rhs'
scrut_ty = idType case_bndr
case_bndr_w_unf
= case con of
DEFAULT -> case_bndr
DataAlt dc -> setIdUnfolding case_bndr unf
where
-- See Note [Case binders and join points]
unf = mkInlineUnfolding Nothing rhs
rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs'
LitAlt {} -> WARN( True, text "mkDupableAlt"
<+> ppr case_bndr <+> ppr con )
case_bndr
-- The case binder is alive but trivial, so why has
-- it not been substituted away?
used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs'
| otherwise = bndrs' ++ [case_bndr_w_unf]
abstract_over bndr
| isTyVar bndr = True -- Abstract over all type variables just in case
| otherwise = not (isDeadBinder bndr)
-- The deadness info on the new Ids is preserved by simplBinders
; (final_bndrs', final_args) -- Note [Join point abstraction]
<- if (any isId used_bndrs')
then return (used_bndrs', varsToCoreExprs used_bndrs')
else do { rw_id <- newId (fsLit "w") voidPrimTy
; return ([setOneShotLambda rw_id], [Var voidPrimId]) }
; join_bndr <- newId (fsLit "$j") (mkPiTypes final_bndrs' rhs_ty')
-- Note [Funky mkPiTypes]
; let -- We make the lambdas into one-shot-lambdas. The
-- join point is sure to be applied at most once, and doing so
-- prevents the body of the join point being floated out by
-- the full laziness pass
really_final_bndrs = map one_shot final_bndrs'
one_shot v | isId v = setOneShotLambda v
| otherwise = v
join_rhs = mkLams really_final_bndrs rhs'
join_arity = exprArity join_rhs
join_call = mkApps (Var join_bndr) final_args
; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs)
; return (env', (con, bndrs', join_call)) }
-- See Note [Duplicated env]
{-
Note [Fusing case continuations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's important to fuse two successive case continuations when the
first has one alternative. That's why we call prepareCaseCont here.
Consider this, which arises from thunk splitting (see Note [Thunk
splitting] in WorkWrap):
let
x* = case (case v of {pn -> rn}) of
I# a -> I# a
in body
The simplifier will find
(Var v) with continuation
Select (pn -> rn) (
Select [I# a -> I# a] (
StrictBind body Stop
So we'll call mkDupableCont on
Select [I# a -> I# a] (StrictBind body Stop)
There is just one alternative in the first Select, so we want to
simplify the rhs (I# a) with continuation (StricgtBind body Stop)
Supposing that body is big, we end up with
let $j a = <let x = I# a in body>
in case v of { pn -> case rn of
I# a -> $j a }
This is just what we want because the rn produces a box that
the case rn cancels with.
See Trac #4957 a fuller example.
Note [Case binders and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
case (case .. ) of c {
I# c# -> ....c....
If we make a join point with c but not c# we get
$j = \c -> ....c....
But if later inlining scrutines the c, thus
$j = \c -> ... case c of { I# y -> ... } ...
we won't see that 'c' has already been scrutinised. This actually
happens in the 'tabulate' function in wave4main, and makes a significant
difference to allocation.
An alternative plan is this:
$j = \c# -> let c = I# c# in ...c....
but that is bad if 'c' is *not* later scrutinised.
So instead we do both: we pass 'c' and 'c#' , and record in c's inlining
(a stable unfolding) that it's really I# c#, thus
$j = \c# -> \c[=I# c#] -> ...c....
Absence analysis may later discard 'c'.
NB: take great care when doing strictness analysis;
see Note [Lamba-bound unfoldings] in DmdAnal.
Also note that we can still end up passing stuff that isn't used. Before
strictness analysis we have
let $j x y c{=(x,y)} = (h c, ...)
in ...
After strictness analysis we see that h is strict, we end up with
let $j x y c{=(x,y)} = ($wh x y, ...)
and c is unused.
Note [Duplicated env]
~~~~~~~~~~~~~~~~~~~~~
Some of the alternatives are simplified, but have not been turned into a join point
So they *must* have an zapped subst-env. So we can't use completeNonRecX to
bind the join point, because it might to do PostInlineUnconditionally, and
we'd lose that when zapping the subst-env. We could have a per-alt subst-env,
but zapping it (as we do in mkDupableCont, the Select case) is safe, and
at worst delays the join-point inlining.
Note [Small alternative rhs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is worth checking for a small RHS because otherwise we
get extra let bindings that may cause an extra iteration of the simplifier to
inline back in place. Quite often the rhs is just a variable or constructor.
The Ord instance of Maybe in PrelMaybe.hs, for example, took several extra
iterations because the version with the let bindings looked big, and so wasn't
inlined, but after the join points had been inlined it looked smaller, and so
was inlined.
NB: we have to check the size of rhs', not rhs.
Duplicating a small InAlt might invalidate occurrence information
However, if it *is* dupable, we return the *un* simplified alternative,
because otherwise we'd need to pair it up with an empty subst-env....
but we only have one env shared between all the alts.
(Remember we must zap the subst-env before re-simplifying something).
Rather than do this we simply agree to re-simplify the original (small) thing later.
Note [Funky mkPiTypes]
~~~~~~~~~~~~~~~~~~~~~~
Notice the funky mkPiTypes. If the contructor has existentials
it's possible that the join point will be abstracted over
type variables as well as term variables.
Example: Suppose we have
data T = forall t. C [t]
Then faced with
case (case e of ...) of
C t xs::[t] -> rhs
We get the join point
let j :: forall t. [t] -> ...
j = /\t \xs::[t] -> rhs
in
case (case e of ...) of
C t xs::[t] -> j t xs
Note [Join point abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Join points always have at least one value argument,
for several reasons
* If we try to lift a primitive-typed something out
for let-binding-purposes, we will *caseify* it (!),
with potentially-disastrous strictness results. So
instead we turn it into a function: \v -> e
where v::Void#. The value passed to this function is void,
which generates (almost) no code.
* CPR. We used to say "&& isUnLiftedType rhs_ty'" here, but now
we make the join point into a function whenever used_bndrs'
is empty. This makes the join-point more CPR friendly.
Consider: let j = if .. then I# 3 else I# 4
in case .. of { A -> j; B -> j; C -> ... }
Now CPR doesn't w/w j because it's a thunk, so
that means that the enclosing function can't w/w either,
which is a lose. Here's the example that happened in practice:
kgmod :: Int -> Int -> Int
kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0
then 78
else 5
* Let-no-escape. We want a join point to turn into a let-no-escape
so that it is implemented as a jump, and one of the conditions
for LNE is that it's not updatable. In CoreToStg, see
Note [What is a non-escaping let]
* Floating. Since a join point will be entered once, no sharing is
gained by floating out, but something might be lost by doing
so because it might be allocated.
I have seen a case alternative like this:
True -> \v -> ...
It's a bit silly to add the realWorld dummy arg in this case, making
$j = \s v -> ...
True -> $j s
(the \v alone is enough to make CPR happy) but I think it's rare
There's a slight infelicity here: we pass the overall
case_bndr to all the join points if it's used in *any* RHS,
because we don't know its usage in each RHS separately
Note [Duplicating StrictArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The original plan had (where E is a big argument)
e.g. f E [..hole..]
==> let $j = \a -> f E a
in $j [..hole..]
But this is terrible! Here's an example:
&& E (case x of { T -> F; F -> T })
Now, && is strict so we end up simplifying the case with
an ArgOf continuation. If we let-bind it, we get
let $j = \v -> && E v
in simplExpr (case x of { T -> F; F -> T })
(ArgOf (\r -> $j r)
And after simplifying more we get
let $j = \v -> && E v
in case x of { T -> $j F; F -> $j T }
Which is a Very Bad Thing
What we do now is this
f E [..hole..]
==> let a = E
in f a [..hole..]
Now if the thing in the hole is a case expression (which is when
we'll call mkDupableCont), we'll push the function call into the
branches, which is what we want. Now RULES for f may fire, and
call-pattern specialisation. Here's an example from Trac #3116
go (n+1) (case l of
1 -> bs'
_ -> Chunk p fpc (o+1) (l-1) bs')
If we can push the call for 'go' inside the case, we get
call-pattern specialisation for 'go', which is *crucial* for
this program.
Here is the (&&) example:
&& E (case x of { T -> F; F -> T })
==> let a = E in
case x of { T -> && a F; F -> && a T }
Much better!
Notice that
* Arguments to f *after* the strict one are handled by
the ApplyToVal case of mkDupableCont. Eg
f [..hole..] E
* We can only do the let-binding of E because the function
part of a StrictArg continuation is an explicit syntax
tree. In earlier versions we represented it as a function
(CoreExpr -> CoreEpxr) which we couldn't take apart.
Do *not* duplicate StrictBind and StritArg continuations. We gain
nothing by propagating them into the expressions, and we do lose a
lot.
The desire not to duplicate is the entire reason that
mkDupableCont returns a pair of continuations.
Note [Duplicating StrictBind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unlike StrictArg, there doesn't seem anything to gain from
duplicating a StrictBind continuation, so we don't.
Note [Single-alternative cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This case is just like the ArgOf case. Here's an example:
data T a = MkT !a
...(MkT (abs x))...
Then we get
case (case x of I# x' ->
case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
Because the (case x) has only one alternative, we'll transform to
case x of I# x' ->
case (case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
But now we do *NOT* want to make a join point etc, giving
case x of I# x' ->
let $j = \y -> MkT y
in case x' <# 0# of
True -> $j (I# (negate# x'))
False -> $j (I# x')
In this case the $j will inline again, but suppose there was a big
strict computation enclosing the orginal call to MkT. Then, it won't
"see" the MkT any more, because it's big and won't get duplicated.
And, what is worse, nothing was gained by the case-of-case transform.
So, in circumstances like these, we don't want to build join points
and push the outer case into the branches of the inner one. Instead,
don't duplicate the continuation.
When should we use this strategy? We should not use it on *every*
single-alternative case:
e.g. case (case ....) of (a,b) -> (# a,b #)
Here we must push the outer case into the inner one!
Other choices:
* Match [(DEFAULT,_,_)], but in the common case of Int,
the alternative-filling-in code turned the outer case into
case (...) of y { I# _ -> MkT y }
* Match on single alternative plus (not (isDeadBinder case_bndr))
Rationale: pushing the case inwards won't eliminate the construction.
But there's a risk of
case (...) of y { (a,b) -> let z=(a,b) in ... }
Now y looks dead, but it'll come alive again. Still, this
seems like the best option at the moment.
* Match on single alternative plus (all (isDeadBinder bndrs))
Rationale: this is essentially seq.
* Match when the rhs is *not* duplicable, and hence would lead to a
join point. This catches the disaster-case above. We can test
the *un-simplified* rhs, which is fine. It might get bigger or
smaller after simplification; if it gets smaller, this case might
fire next time round. NB also that we must test contIsDupable
case_cont *too, because case_cont might be big!
HOWEVER: I found that this version doesn't work well, because
we can get let x = case (...) of { small } in ...case x...
When x is inlined into its full context, we find that it was a bad
idea to have pushed the outer case inside the (...) case.
There is a cost to not doing case-of-case; see Trac #10626.
Note [Single-alternative-unlifted]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's another single-alternative where we really want to do case-of-case:
data Mk1 = Mk1 Int# | Mk2 Int#
M1.f =
\r [x_s74 y_s6X]
case
case y_s6X of tpl_s7m {
M1.Mk1 ipv_s70 -> ipv_s70;
M1.Mk2 ipv_s72 -> ipv_s72;
}
of
wild_s7c
{ __DEFAULT ->
case
case x_s74 of tpl_s7n {
M1.Mk1 ipv_s77 -> ipv_s77;
M1.Mk2 ipv_s79 -> ipv_s79;
}
of
wild1_s7b
{ __DEFAULT -> ==# [wild1_s7b wild_s7c];
};
};
So the outer case is doing *nothing at all*, other than serving as a
join-point. In this case we really want to do case-of-case and decide
whether to use a real join point or just duplicate the continuation:
let $j s7c = case x of
Mk1 ipv77 -> (==) s7c ipv77
Mk1 ipv79 -> (==) s7c ipv79
in
case y of
Mk1 ipv70 -> $j ipv70
Mk2 ipv72 -> $j ipv72
Hence: check whether the case binder's type is unlifted, because then
the outer case is *not* a seq.
************************************************************************
* *
Unfoldings
* *
************************************************************************
-}
simplLetUnfolding :: SimplEnv-> TopLevelFlag
-> InId
-> OutExpr
-> Unfolding -> SimplM Unfolding
simplLetUnfolding env top_lvl id new_rhs unf
| isStableUnfolding unf
= simplUnfolding env top_lvl id unf
| otherwise
= bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags InlineRhs (isTopLevel top_lvl) bottoming new_rhs) }
-- We make an unfolding *even for loop-breakers*.
-- Reason: (a) It might be useful to know that they are WHNF
-- (b) In TidyPgm we currently assume that, if we want to
-- expose the unfolding then indeed we *have* an unfolding
-- to expose. (We could instead use the RHS, but currently
-- we don't.) The simple thing is always to have one.
where
bottoming = isBottomingId id
simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> Unfolding -> SimplM Unfolding
-- Note [Setting the new unfolding]
simplUnfolding env top_lvl id unf
= case unf of
NoUnfolding -> return unf
OtherCon {} -> return unf
DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args }
-> do { (env', bndrs') <- simplBinders rule_env bndrs
; args' <- mapM (simplExpr env') args
; return (mkDFunUnfolding bndrs' con args') }
CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide }
| isStableSource src
-> do { expr' <- simplExpr rule_env expr
; case guide of
UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things
-> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok
, ug_boring_ok = inlineBoringOk expr' }
-- Refresh the boring-ok flag, in case expr'
-- has got small. This happens, notably in the inlinings
-- for dfuns for single-method classes; see
-- Note [Single-method classes] in TcInstDcls.
-- A test case is Trac #4138
in return (mkCoreUnfolding src is_top_lvl expr' guide')
-- See Note [Top-level flag on inline rules] in CoreUnfold
_other -- Happens for INLINABLE things
-> bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags src is_top_lvl bottoming expr') } }
-- If the guidance is UnfIfGoodArgs, this is an INLINABLE
-- unfolding, and we need to make sure the guidance is kept up
-- to date with respect to any changes in the unfolding.
| otherwise -> return noUnfolding -- Discard unstable unfoldings
where
bottoming = isBottomingId id
is_top_lvl = isTopLevel top_lvl
act = idInlineActivation id
rule_env = updMode (updModeForStableUnfoldings act) env
-- See Note [Simplifying inside stable unfoldings] in SimplUtils
{-
Note [Force bottoming field]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to force bottoming, or the new unfolding holds
on to the old unfolding (which is part of the id).
Note [Setting the new unfolding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* If there's an INLINE pragma, we simplify the RHS gently. Maybe we
should do nothing at all, but simplifying gently might get rid of
more crap.
* If not, we make an unfolding from the new RHS. But *only* for
non-loop-breakers. Making loop breakers not have an unfolding at all
means that we can avoid tests in exprIsConApp, for example. This is
important: if exprIsConApp says 'yes' for a recursive thing, then we
can get into an infinite loop
If there's an stable unfolding on a loop breaker (which happens for
INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the
user did say 'INLINE'. May need to revisit this choice.
************************************************************************
* *
Rules
* *
************************************************************************
Note [Rules in a letrec]
~~~~~~~~~~~~~~~~~~~~~~~~
After creating fresh binders for the binders of a letrec, we
substitute the RULES and add them back onto the binders; this is done
*before* processing any of the RHSs. This is important. Manuel found
cases where he really, really wanted a RULE for a recursive function
to apply in that function's own right-hand side.
See Note [Loop breaking and RULES] in OccAnal.
-}
addBndrRules :: SimplEnv -> InBndr -> OutBndr -> SimplM (SimplEnv, OutBndr)
-- Rules are added back into the bin
addBndrRules env in_id out_id
| null old_rules
= return (env, out_id)
| otherwise
= do { new_rules <- simplRules env (Just (idName out_id)) old_rules
; let final_id = out_id `setIdSpecialisation` mkRuleInfo new_rules
; return (modifyInScope env final_id, final_id) }
where
old_rules = ruleInfoRules (idSpecialisation in_id)
simplRules :: SimplEnv -> Maybe Name -> [CoreRule] -> SimplM [CoreRule]
simplRules env mb_new_nm rules
= mapM simpl_rule rules
where
simpl_rule rule@(BuiltinRule {})
= return rule
simpl_rule rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs })
= do { (env', bndrs') <- simplBinders env bndrs
; let rule_env = updMode updModeForRules env'
; args' <- mapM (simplExpr rule_env) args
; rhs' <- simplExpr rule_env rhs
; return (rule { ru_bndrs = bndrs'
, ru_fn = mb_new_nm `orElse` fn_name
, ru_args = args'
, ru_rhs = rhs' }) }
| gridaphobe/ghc | compiler/simplCore/Simplify.hs | bsd-3-clause | 123,812 | 20 | 25 | 38,015 | 15,365 | 8,135 | 7,230 | -1 | -1 |
{-
Module : Main
Description : UI and top level game loop
Module which handles UI and top level game loop.
-}
module Main(
main
, module Exported
) where
import Pentago.Data.Matrix as Exported
import Pentago.Data.Pentago as Exported hiding (Player)
import Pentago.Data.Tree as Exported
import Pentago.AI.MinMax as Exported
import qualified Pentago.AI.Pentago as AP
import Control.Applicative
import Control.Monad
import Control.Monad.State
import Data.Char
import Text.ParserCombinators.Parsec
import System.Random
type GameStateType = SmartGameState
initialGameState :: GameStateType
initialGameState = initialSmartGameState
aiDifficulty :: Int
aiDifficulty = 3
main :: IO ()
main = fst <$> runStateT mainMenu
(MainMenuState
(Player humanPlayerWrapper "Human 0")
(Player (aiPlayerWrapper $ AP.trivialAIPlayer aiDifficulty) "AI 1"))
-- main = trialGame
-- |IO Monad which runs a game between two AI players.
{- trialGame = runStateT runGame
$ SessionState initialGameState (mkStdGen 0)
(Player (aiPlayerWrapper $ AP.trivialAIPlayer 3) "AI 0")
(Player (aiPlayerWrapper $ AP.trivialAIPlayer 3) "AI 1") -}
-- main menu
data MainMenuState = MainMenuState {
firstPlayer :: Player GameStateType,
secondPlayer :: Player GameStateType
}
mainMenuString :: String
mainMenuString =
"1) Start game" ++ "\n"
++ "2) Configure" ++ "\n"
++ "3) Exit" ++ "\n"
mainMenu :: StateT MainMenuState IO ()
mainMenu = do
liftIO $ putStr mainMenuString
menuOption <- head <$> liftIO getLine
liftIO $ putStrLn ""
if menuOption == '1'
then do
firstPlayer' <- firstPlayer <$> get
secondPlayer' <- secondPlayer <$> get
lift $ do
stdGen <- newStdGen
_ <- runStateT runGame
$ SessionState initialGameState stdGen firstPlayer' secondPlayer'
return ()
mainMenu
else Control.Monad.when (menuOption == '2') $
do
configurationMenu
mainMenu
-- configuration menu
switchPlayer :: (GameState s) => Player s -> Player s
switchPlayer player =
if playerName == "Human"
then Player (aiPlayerWrapper $ AP.trivialAIPlayer aiDifficulty) ("AI " ++ idx)
else Player humanPlayerWrapper ("Human " ++ idx)
where (playerName:(idx:[])) = words $ name player
configurationMenuString :: String
configurationMenuString =
"1) Switch first player" ++ "\n"
++ "2) Switch second player" ++ "\n"
++ "3) Go to main menu" ++ "\n"
showCurrentState :: MainMenuState -> IO ()
showCurrentState mainMenuState = do
putStrLn $ "1. player: " ++ (name . firstPlayer $ mainMenuState)
putStrLn $ "2. player: " ++ (name . secondPlayer $ mainMenuState)
configurationMenuMainLoop :: IO Char
configurationMenuMainLoop = do
putStr configurationMenuString
head <$> getLine
-- |Configuration menu allowing user to choose player types.
configurationMenu :: StateT MainMenuState IO ()
configurationMenu = do
mainMenuState <- get
let curFirstPlayer = firstPlayer mainMenuState
curSecondPlayer = secondPlayer mainMenuState
which <- lift $ do
showCurrentState mainMenuState
putStrLn ""
menuOption <- configurationMenuMainLoop
putStrLn ""
return $
if menuOption == '1'
then 1
else if menuOption == '2'
then 2
else 3
if which == (1 :: Int)
then do
put $ MainMenuState (switchPlayer curFirstPlayer) curSecondPlayer
configurationMenu
else Control.Monad.when (which == 2) $
do
put $ MainMenuState curFirstPlayer (switchPlayer curSecondPlayer)
configurationMenu
-- runGame
data Player s = Player {
playerWrapper :: PlayerWrapper s -- ^Wrapper for player function
, name :: String -- ^Human readable player name
}
data SessionState = SessionState {
gameState :: GameStateType,
randomGen :: StdGen,
curPlayer :: Player GameStateType,
nextPlayer :: Player GameStateType
}
-- |Runs a game between two players displaying current board betwen moves.
runGame :: StateT SessionState IO ()
runGame = do
sessionState <- get
let curGameState = gameState sessionState
liftIO . putStr . prettyShowBoard . getBoardArray $ curGameState
if isFinished curGameState
then
let
result = getResult curGameState
winMessage = case result of
Just Draw -> "The game has ended in a draw."
Just WhiteWin -> "The white player has won."
Just BlackWin -> "The black player has won."
Nothing -> error "getResult has returned Nothing."
in
liftIO . putStrLn $ winMessage
else do
let curPlayerWrapper = playerWrapper . curPlayer $ sessionState
(newGameState, newPlayerState) <- liftIO
. runStateT (curPlayerWrapper curGameState)
$ randomGen sessionState
put $ SessionState
newGameState
newPlayerState
(nextPlayer sessionState)
(curPlayer sessionState)
runGame
type PlayerWrapperMonad = StateT StdGen IO
-- |Wrapper for Pentago.AI.Pentago.Player function which unifies monads used by
-- AI and human player.
type PlayerWrapper s = AP.Player PlayerWrapperMonad s
aiPlayerWrapper :: (GameState s) => AP.AIPlayer s StdGen -> PlayerWrapper s
aiPlayerWrapper aiPlayer board =
do
gen <- get
let (newState, newGen) = runState (aiPlayer board) gen
put newGen
return newState
humanPlayer :: (GameState s) => AP.HumanPlayer s
humanPlayer currentGameState = do
putStrLn moveHelp
moveOrder <- readMoveOrder
return $ makeMove moveOrder currentGameState
humanPlayerWrapper :: (GameState s) => PlayerWrapper s
humanPlayerWrapper = lift . humanPlayer
moveHelp :: String
moveHelp = "Provide move order of form posX posY quadrant rotation, "
++ "where pos in [0,5], quadrant in {RT, LT, LB, RB}, rotation in {L,R}]"
parsePosition :: Parser Int
parsePosition = do
posX <- digit
let diff = ord posX - ord '0'
if diff > 5
then
fail "Read position is too large."
else
return diff
parseQuadrant :: Parser Quadrant
parseQuadrant = do
lr <- oneOf "RL"
tb <- oneOf "TB"
let quadrant = [lr, tb]
return $
if quadrant == "RT"
then RightTop
else if quadrant == "LT"
then LeftTop
else if quadrant == "LB"
then LeftBottom
else RightBottom
parseRotation :: Parser RotationDirection
parseRotation = do
lr <- oneOf "RL"
return $ if lr == 'R' then RightRotation else LeftRotation
parseMoveOrder :: Parser MoveOrder
parseMoveOrder = do
spaces
posX <- parsePosition
spaces
posY <- parsePosition
spaces
quadrant <- parseQuadrant
spaces
rotation <- parseRotation
spaces
return ((posX, posY), (quadrant, rotation))
readMoveOrder :: IO MoveOrder
readMoveOrder = do
line <- getLine
case parse parseMoveOrder "MoveOrder Parser" line of
Left err -> print err >> readMoveOrder
Right moveOrder -> return moveOrder
| gregorias/Pentago | src/Main.hs | bsd-3-clause | 6,757 | 0 | 15 | 1,416 | 1,654 | 839 | 815 | 188 | 5 |
-- | Module Parser transforms text files in a Tiles array (strongly typed).
module Parser where
import Hyrule
import Data.Array
import Text.Trifecta
import Control.Applicative ((<|>))
data TerrainError = TerrainError
data ObjectError = ObjectError
parseTerrain :: AreaType -> Parser Terrain
parseTerrain Overworld = do
terrain' <- letter
case terrain' of
'g' -> return Grass
's' -> return Sand
'f' -> return Forest
'm' -> return Mountain
'w' -> return Water
_ -> fail "Terreno desconhecido"
parseTerrain (Dungeon _) = do
terrain' <- letter
case terrain' of
'd' -> return WDungeon
'n' -> return NWDungeon
_ -> fail "Terreno desconhecido"
parseObject :: AreaType -> Parser Object
parseObject Overworld = do
object' <- letter <|> digit <|> char '_'
case object' of
'S' -> return MasterSword
'_' -> return Empty
'H' -> return Home
'D' -> return DummyGate
'1' -> return . Gate $ Dungeon 1
'2' -> return . Gate $ Dungeon 2
'3' -> return . Gate $ Dungeon 3
_ -> fail "Objeto desconhecido"
parseObject (Dungeon _) = do
object' <- letter <|> char '_'
case object' of
'P' -> return Pendant
'O' -> return . Gate $ Overworld
'_' -> return Empty
_ -> fail "Objeto desconhecido"
parseTiles :: AreaType -> AreaSize -> Parser (Array Position Tile)
parseTiles areaType' areaSize = do
rows <- some parseLine
if length rows == areaSize
then return $ listArray ((0,0), (areaSize - 1, areaSize - 1)) $ concat rows
else fail $ "O mapa deve possuir " ++ show areaSize ++ " linhas"
where
parseLine = do
tiles <- some parseTile
newline
let row = tiles
if length row == areaSize
then return row <?> "Row of Tiles"
else fail $ "O mapa deve possuir " ++ show areaSize ++ " colunas"
parseTile = do
terrain' <- parseTerrain areaType'
object' <- parseObject areaType'
(return $ Tile terrain' object') <?> "Tile"
parseMap :: AreaType -> AreaSize -> String -> Area
parseMap areaType' areaSize str = extract $ parseString (parseTiles areaType' areaSize) mempty str
where
extract (Success p) = Area areaType' p
extract (Failure e) = error $ "Nao foi possível realizar o parser do mapa. Erro: " ++ show e
| trxeste/wrk | haskell/TrabalhoIA/src/Parser.hs | bsd-3-clause | 2,283 | 0 | 13 | 569 | 728 | 350 | 378 | 63 | 11 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Feldspar.Core.Frontend
( module Data.Patch
, Syntactic
, Internal
, FeldDomain
, Data
, Syntax
, module Frontend
, FeldOpts
, defaultFeldOpts
, reifyFeld
, reifyFeldM
, reifyFeldUnOpt
, showExpr
, printExpr
, printExpr2
, printExprWith
, printExpr2With
, printExprUnOpt
, drawUntyped
, drawUntypedWith
, showAST
, drawAST
, drawASTUnOpt
, writeHtmlAST
, showDecor
, drawDecor
, writeHtmlDecor
, eval
, evalTarget
, desugar
, sugar
, resugar
-- * QuickCheck
, (===>)
, (====)
-- * Type constraints
, tData
, tArr1
, tArr2
-- * Functions
, ilog2
, nlz
) where
import Prelude as P
import Control.Monad.State
import Test.QuickCheck
import Data.Patch
import Data.Tree.View
import qualified Data.Map as Map
import Language.Syntactic hiding
(desugar, sugar, resugar, showAST, drawAST, writeHtmlAST, stringTree)
import qualified Language.Syntactic as Syntactic
import qualified Language.Syntactic.Constructs.Decoration as Syntactic
import Language.Syntactic.Constructs.Binding
import Language.Syntactic.Constructs.Binding.HigherOrder
import Language.Syntactic.Sharing.SimpleCodeMotion
import Language.Syntactic.Sharing.CodeMotion2
import Language.Syntactic.Sharing.SimpleCodeMotion3
import Feldspar.Range
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
import Feldspar.Core.Middleend.FromTyped
import Feldspar.Core.UntypedRepresentation (stringTree)
import Feldspar.Core.Constructs
import Feldspar.Core.Constructs.Binding (cLambda)
import Feldspar.Core.Frontend.Array as Frontend
import Feldspar.Core.Frontend.Binding as Frontend
import Feldspar.Core.Frontend.Bits as Frontend
import Feldspar.Core.Frontend.Complex as Frontend
import Feldspar.Core.Frontend.Condition as Frontend
import Feldspar.Core.Frontend.Conversion as Frontend
import Feldspar.Core.Frontend.Elements as Frontend
import Feldspar.Core.Frontend.Eq as Frontend
import Feldspar.Core.Frontend.Error as Frontend
import Feldspar.Core.Frontend.FFI as Frontend
import Feldspar.Core.Frontend.Floating as Frontend
import Feldspar.Core.Frontend.Fractional as Frontend
import Feldspar.Core.Frontend.Future as Frontend
import Feldspar.Core.Frontend.Integral as Frontend
import Feldspar.Core.Frontend.Literal as Frontend
import Feldspar.Core.Frontend.Logic as Frontend
import Feldspar.Core.Frontend.Loop as Frontend
import Feldspar.Core.Frontend.NoInline as Frontend
import Feldspar.Core.Frontend.Num as Frontend
import Feldspar.Core.Frontend.Ord as Frontend
import Feldspar.Core.Frontend.Par as Frontend
import Feldspar.Core.Frontend.Save as Frontend
import Feldspar.Core.Frontend.SizeProp as Frontend
import Feldspar.Core.Frontend.SourceInfo as Frontend
import Feldspar.Core.Frontend.Switch as Frontend
import Feldspar.Core.Frontend.RealFloat as Frontend
import Feldspar.Core.Frontend.Tuple as Frontend
prjDict :: PrjDict (Decor Info FeldDom)
prjDict = PrjDict
(prjVariable prjDictFO . decorExpr)
(prjLambda prjDictFO . decorExpr)
mkId :: FeldOpts -> MkInjDict (Decor Info FeldDom)
mkId opts a b
| simpleMatch (const . sharable) a
, Just Dict <- typeDict a
, Dict <- exprDictSub pTypeable b
, Info {infoType = bType} <- getInfo b
= case bType of
FunType{} | P.not (SICS `inTarget` opts) -> Nothing
_ -> Just InjDict
{ injVariable = Decor (getInfo a) . injC . c' . Variable
, injLambda = let info = ((mkInfoTy (FunType typeRep bType)) {infoSize = (infoSize (getInfo a), infoSize (getInfo b))})
in Decor info . injC . cLambda
, injLet = Decor (getInfo b) $ injC Let
}
mkId _ _ _ = Nothing
hoister opts
| CSE `inTarget` opts
= cm1 . optimize opts . stripDecor <=< cm2
| otherwise = cm3
where cm1 = codeMotion (simpleMatch (const . hoistOver)) prjDict (mkId opts)
cm2 = codeMotion2 (simpleMatch (const . hoistOver)) prjDict (mkId opts)
cm3 = codeMotion3 10 (simpleMatch (const . hoistOver)) prjDict (mkId opts) mkSubEnvDefault
reifyFeldM :: (SyntacticFeld a, MonadState VarId m)
=> FeldOpts
-> BitWidth n
-> a
-> m (ASTF (Decor Info FeldDom) (Internal a))
reifyFeldM opts n =
( return
. optimize opts
. stripDecor
<=< hoister opts
. optimize opts
. targetSpecialization n
<=< reifyM
. fromFeld
. Syntactic.desugar
)
-- Note that it's important to do 'codeMotion' after 'optimize'. There may be
-- sub-expressions that appear more than once in the original program, but
-- where 'optimize' removes all but one occurrence. If 'codeMotion' was run
-- first, these sub-expressions would be let bound, preventing subsequent
-- optimizations.
-- | Reification and optimization of a Feldspar program
reifyFeld :: SyntacticFeld a
=> FeldOpts
-> BitWidth n
-> a
-> ASTF (Decor Info FeldDom) (Internal a)
reifyFeld opts n = flip evalState 0 . reifyFeldM opts n
-- | Reification of a Feldspar program
reifyFeldUnOpt :: SyntacticFeld a
=> FeldOpts -> BitWidth n
-> a
-> ASTF FeldDom (Internal a)
reifyFeldUnOpt _ n = flip evalState 0 .
( return
. targetSpecialization n
<=< reifyM
. fromFeld
. Syntactic.desugar
)
showExpr :: SyntacticFeld a => a -> String
showExpr = render . reifyFeld defaultFeldOpts N32
-- | Print an optimized untyped expression
printExpr2 :: SyntacticFeld a => a -> IO ()
printExpr2 = printExpr2With defaultFeldOpts
-- | Draw the untyped syntax tree using unicode art
drawUntyped :: SyntacticFeld a => a -> IO ()
drawUntyped = drawUntypedWith defaultFeldOpts
-- | Draw the untyped syntax tree using unicode art
drawUntypedWith :: SyntacticFeld a => FeldOpts -> a -> IO ()
drawUntypedWith opts = drawTree . stringTree . untype opts . reifyFeld opts N32
-- | Print an optimized expression
printExpr :: SyntacticFeld a => a -> IO ()
printExpr = print . reifyFeld defaultFeldOpts N32
-- | Print an optimized untyped expression with options
printExpr2With :: SyntacticFeld a => FeldOpts -> a -> IO ()
printExpr2With opts = print . untype opts . reifyFeld opts N32
-- | Print an optimized expression with options
printExprWith :: SyntacticFeld a => FeldOpts -> a -> IO ()
printExprWith opts = print . reifyFeld opts N32
-- | Print an unoptimized expression
printExprUnOpt :: SyntacticFeld a => a -> IO ()
printExprUnOpt = print . reifyFeldUnOpt defaultFeldOpts N32
-- | Show the syntax tree using Unicode art
showAST :: SyntacticFeld a => a -> String
showAST = Syntactic.showAST . reifyFeld defaultFeldOpts N32
-- | Draw the syntax tree on the terminal using Unicode art
drawAST :: SyntacticFeld a => a -> IO ()
drawAST = Syntactic.drawAST . reifyFeld defaultFeldOpts N32
drawASTUnOpt :: SyntacticFeld a => a -> IO ()
drawASTUnOpt = Syntactic.drawAST . reifyFeldUnOpt defaultFeldOpts N32
-- | Write the syntax tree to an HTML file with foldable nodes
writeHtmlAST :: SyntacticFeld a => FilePath -> a -> IO ()
writeHtmlAST file = Syntactic.writeHtmlAST file . reifyFeld defaultFeldOpts N32
-- | Draw a syntax tree decorated with type and size information
showDecor :: SyntacticFeld a => a -> String
showDecor = Syntactic.showDecorWith show . reifyFeld defaultFeldOpts N32
-- | Draw a syntax tree decorated with type and size information
drawDecor :: SyntacticFeld a => a -> IO ()
drawDecor = Syntactic.drawDecorWith show . reifyFeld defaultFeldOpts N32
-- | Write the syntax tree decorated with type and size information to an HTML file with foldable nodes
writeHtmlDecor :: SyntacticFeld a => FilePath -> a -> IO ()
writeHtmlDecor file = Syntactic.writeHtmlDecorWith showInfo file . reifyFeld defaultFeldOpts N32
where
showInfo :: Show (Info b) => Info b -> String
showInfo Info{..} = unlines [ "Type: " ++ show infoType
, "Size: " ++ show infoSize
, "Vars: " ++ show (Map.keys infoVars)
, "Src: " ++ show infoSource
]
eval :: SyntacticFeld a => a -> Internal a
eval = evalBind . reifyFeld defaultFeldOpts N32
evalTarget
:: ( SyntacticFeld a
, BoundedInt (GenericInt U n)
, BoundedInt (GenericInt S n)
)
=> BitWidth n -> a -> Internal a
evalTarget n = evalBind . reifyFeld defaultFeldOpts n
-- TODO This doesn't work yet, because 'targetSpecialization' is not implemented
desugar :: SyntacticFeld a => a -> Data (Internal a)
desugar = Syntactic.resugar
sugar :: SyntacticFeld a => Data (Internal a) -> a
sugar = Syntactic.resugar
resugar :: (SyntacticFeld a, SyntacticFeld b, Internal a ~ Internal b) => a -> b
resugar = Syntactic.resugar
--------------------------------------------------------------------------------
-- * QuickCheck
--------------------------------------------------------------------------------
instance (Type a, Arbitrary a) => Arbitrary (Data a)
where
arbitrary = fmap value arbitrary
instance Testable (Data Bool)
where
property = property . eval
(===>) :: Testable prop => Data Bool -> prop -> Property
a ===> b = eval a ==> b
-- | Test that two function of the same arity have the same semantics
class Equal a
where
(====) :: a -> a -> Property
instance (P.Eq a, Show a) => Equal a
where
x ==== y = x === y
instance (Show a, Arbitrary a, Equal b) => Equal (a -> b)
where
f ==== g = property (\x -> f x ==== g x)
--------------------------------------------------------------------------------
-- * Type annotations
--------------------------------------------------------------------------------
tData :: Patch a a -> Patch (Data a) (Data a)
tData _ = id
tArr1 :: Patch a a -> Patch (Data [a]) (Data [a])
tArr1 _ = id
tArr2 :: Patch a a -> Patch (Data [[a]]) (Data [[a]])
tArr2 _ = id
--------------------------------------------------------------------------------
-- * Functions
--------------------------------------------------------------------------------
-- | Integer logarithm in base 2
-- Based on an algorithm in Hacker's Delight
ilog2 :: (Bits a) => Data a -> Data Index
ilog2 x = bitSize x - 1 - nlz x
-- | Count leading zeros
-- Based on an algorithm in Hacker's Delight
nlz :: (Bits a) => Data a -> Data Index
nlz x = bitCount $ complement $ foldl go x $ takeWhile (P.< bitSize' x) $ P.map (2 P.^) [(0::Integer)..]
where
go b s = share b $ \b' -> b' .|. (b' .>>. value s)
-- TODO share is probably not needed when observable sharing is implemented
| emwap/feldspar-language | src/Feldspar/Core/Frontend.hs | bsd-3-clause | 12,977 | 0 | 22 | 2,861 | 2,797 | 1,547 | 1,250 | -1 | -1 |
module ETA.TypeCheck.TcForeign
( tcForeignImports
, tcForeignExports
-- Low-level exports for hooks
, isForeignImport, isForeignExport
, tcFImport --, tcFExport
-- , tcForeignImports'
, tcCheckFIType, checkJavaTarget, checkForeignArgs, checkForeignRes
, normaliseFfiType
, nonIOok, mustBeIO
, checkSafe, noCheckSafe
-- , tcForeignExports'
-- , tcCheckFEType
) where
import ETA.BasicTypes.DataCon
import ETA.BasicTypes.Unique
import ETA.BasicTypes.SrcLoc
import ETA.BasicTypes.Name
import ETA.BasicTypes.VarSet
import ETA.BasicTypes.Id
import ETA.BasicTypes.RdrName
import ETA.TypeCheck.FamInst
import ETA.TypeCheck.TcRnMonad
import ETA.TypeCheck.TcHsType
import ETA.TypeCheck.TcExpr
import ETA.TypeCheck.TcEnv
import ETA.TypeCheck.TcType
import ETA.Prelude.TysWiredIn (unitTyCon)
import ETA.Prelude.PrelNames
import ETA.Prelude.ForeignCall
import ETA.Main.Hooks
import ETA.Main.ErrUtils
import ETA.Main.DynFlags
import ETA.Types.FamInstEnv
import ETA.Types.Type
import ETA.Types.TypeRep
import ETA.Types.Coercion
import ETA.Types.TyCon
import ETA.Debug
import ETA.HsSyn.HsSyn
import ETA.Utils.Bag
import ETA.Utils.Outputable
import ETA.Utils.FastString
import ETA.Utils.Maybes
import Data.Maybe(fromMaybe)
-- Defines a binding
isForeignImport :: LForeignDecl name -> Bool
isForeignImport (L _ ForeignImport {}) = True
isForeignImport _ = False
-- Exports a binding
isForeignExport :: LForeignDecl name -> Bool
isForeignExport (L _ ForeignExport {}) = True
isForeignExport _ = False
tcForeignImports :: [LForeignDecl Name] -> TcM ([Id], [LForeignDecl Id], Bag GlobalRdrElt)
tcForeignImports decls
= getHooked tcForeignImportsHook tcForeignImports' >>= ($ decls)
tcForeignImports' :: [LForeignDecl Name] -> TcM ([Id], [LForeignDecl Id], Bag GlobalRdrElt)
tcForeignImports' decls = do
(ids, decls, gres) <- mapAndUnzip3M tcFImport $ filter isForeignImport decls
return (ids, decls, unionManyBags gres)
printDebug h s = do
dflags <- getDynFlags
liftIO . putStrLn . showSDoc dflags $ (ptext $ sLit h) <+> s
tcFImport :: LForeignDecl Name -> TcM (Id, LForeignDecl Id, Bag GlobalRdrElt)
tcFImport (L declLoc fi@(ForeignImport (L nameLoc name) hsType _ impDecl))
= setSrcSpan declLoc . addErrCtxt (foreignDeclCtxt fi) $ do
sigType <- tcHsSigType (ForSigCtxt name) hsType
--printDebug "tcFImport: sigType" $ ppr sigType
(normCo, normSigType, gres) <- normaliseFfiType sigType
--printDebug "tcFImport: normSigType" $ ppr normSigType
let (_, ty) = tcSplitForAllTys normSigType
(theta, ty') = tcSplitPhiTy ty
(argTypes, resType) = tcSplitFunTys ty'
id = mkLocalId name sigType
traceTc "tcFIImport" $ ppr theta <+> ppr argTypes <+> ppr resType
--printDebug "tcFImport: normSigType" $ ppr argTypes <+> ppr resType
impDecl' <- tcCheckFIType theta argTypes resType impDecl
let fiDecl = ForeignImport (L nameLoc id) undefined
(mkSymCo normCo) impDecl'
return (id, L declLoc fiDecl, gres)
tcFImport d = pprPanic "tcFImport" (ppr d)
normaliseFfiType :: Type -> TcM (Coercion, Type, Bag GlobalRdrElt)
normaliseFfiType ty = do
famEnvs <- tcGetFamInstEnvs
normaliseFfiType' famEnvs ty
normaliseFfiType' :: FamInstEnvs -> Type -> TcM (Coercion, Type, Bag GlobalRdrElt)
normaliseFfiType' env ty0 = go initRecTc ty0
where go :: RecTcChecker -> Type -> TcM (Coercion, Type, Bag GlobalRdrElt)
go recNts ty
| Just ty' <- coreView ty = go recNts ty'
go recNts ty@(TyConApp tc tys)
-- TODO: Address funPtrs
| tcKey == ioTyConKey
= childrenOnly False
| tcKey == javaTyConKey
= childrenOnly True
| isNewTyCon tc
, Just recNts' <- checkRecTc recNts tc
= do
rdrEnv <- getGlobalRdrEnv
case checkNewtypeFFI rdrEnv tc of
Nothing -> nothing
Just gre -> do
(co', ty', gres) <- go recNts' ntRhs
return (mkTransCo ntCo co', ty', gre `consBag` gres)
| isFamilyTyCon tc
, (co, nty) <- normaliseTcApp env Representational tc tys
, not (isReflCo co)
= do (co', ty', gres) <- go recNts nty
return (mkTransCo co co', ty', gres)
| otherwise
= nothing
where tcKey = getUnique tc
childrenOnly isJava = do
xs <- mapM (go recNts) tys
let (cos, tys', gres) = unzip3 xs
cos' = zipWith3 downgradeRole (tyConRoles tc)
((if isJava then [Nominal] else [])
++ repeat Representational) cos
co' = mkTyConAppCo Representational tc cos'
return ( co'
, mkTyConApp tc tys'
, unionManyBags gres )
ntCo = mkUnbranchedAxInstCo Representational (newTyConCo tc)
tys
ntRhs = newTyConInstRhs tc tys
nothing = return (Refl Representational ty, ty, emptyBag)
go recNts (FunTy ty1 ty2) = do
(coi1, nty1, gres1) <- go recNts ty1
(coi2, nty2, gres2) <- go recNts ty2
return (mkFunCo Representational coi1 coi2, mkFunTy nty1 nty2,
gres1 `unionBags` gres2)
go recNts (ForAllTy tyVar ty) = do
(coi, nty, gres) <- go recNts ty
return (mkForAllCo tyVar coi, ForAllTy tyVar nty, gres)
go _ ty@(TyVarTy {}) = return (Refl Representational ty, ty, emptyBag)
go _ ty@(LitTy {}) = return (Refl Representational ty, ty, emptyBag)
go _ ty@(AppTy {}) = return (Refl Representational ty, ty, emptyBag)
checkNewtypeFFI :: GlobalRdrEnv -> TyCon -> Maybe GlobalRdrElt
checkNewtypeFFI rdrEnv tc
| Just con <- tyConSingleDataCon_maybe tc
, [gre] <- lookupGRE_Name rdrEnv (dataConName con)
= Just gre
| otherwise
= Nothing
foreignDeclCtxt :: ForeignDecl Name -> SDoc
foreignDeclCtxt fo
= hang (str "When checking declaration:")
2 (ppr fo)
tcCheckFIType :: ThetaType -> [Type] -> Type -> ForeignImport -> TcM ForeignImport
tcCheckFIType thetaType argTypes resType idecl@(CImport (L lc cconv) (L ls safety) mh
targetSpec src)
| CFunction target <- targetSpec
= case cconv of
PrimCallConv -> do
dflags <- getDynFlags
checkTc (xopt Opt_GHCForeignImportPrim dflags)
(text "Use GHCForeignImportPrim to allow `foreign import prim'.")
-- TODO: Validate the target string
checkJavaTarget target
checkTc (playSafe safety)
(text $ "The safe/unsafe annotation should not be used with "
++ "`foreign import prim'.")
checkForeignArgs (isFFIPrimArgumentTy dflags) argTypes
checkForeignRes nonIOok checkSafe (isFFIPrimResultTy dflags) resType
return idecl
JavaCallConv -> do
-- TODO: Validate the target string for @new, @field
-- TODO: Validate ThetaType
dflags <- getDynFlags
checkJavaTarget target
let javaClassVars = extendsVars thetaType
checkForeignArgs (isFFIArgumentTy dflags safety javaClassVars) argTypes
checkForeignRes nonIOok checkSafe (isFFIImportResultTy dflags) resType
return idecl
_ -> pprPanic "tcCheckFIType: Unsupported calling convention." (ppr idecl)
| CWrapper target isAbstract <- targetSpec
, JavaCallConv <- cconv
= do
-- TODO: Validate target
dflags <- getDynFlags
let javaClassVars = extendsVars thetaType
-- TODO: Typecheck foreign wrappers properly
-- checkForeignArgs (isFFIArgumentTy dflags safety javaClassVars) argTypes
-- checkForeignRes nonIOok checkSafe (isFFIImportResultTy dflags) resType
return idecl
| otherwise = pprPanic "tcCheckFIType: Unsupported calling convention." (ppr idecl)
tcCheckFIType _ _ _ idecl = pprPanic "tcCheckFIType: Unsupported calling convention." (ppr idecl)
check :: Validity -> (MsgDoc -> MsgDoc) -> TcM ()
check IsValid _ = return ()
check (NotValid doc) err_fn = addErrTc (err_fn doc)
checkForeignArgs :: (Type -> Validity) -> [Type] -> TcM ()
checkForeignArgs pred tys = mapM_ go tys
where
go ty = check (pred ty) (illegalForeignTyErr argument)
illegalForeignTyErr :: SDoc -> SDoc -> SDoc
illegalForeignTyErr argOrRes extra
= hang msg 2 extra
where
msg = hsep [ str "Unacceptable", argOrRes
, str "type in foreign declaration:"]
checkForeignRes :: Bool -> Bool -> (Type -> Validity) -> Type -> TcM ()
checkForeignRes nonIOResultOk checkSafe predResType ty
| Just (_, resType) <- tcSplitIOType_maybe ty
= do
traceTc "checkForeignRes[IO]" $ ppr resType
check (predResType resType) (illegalForeignTyErr result)
| Just (_, tagType, resType) <- tcSplitJavaType_maybe ty
= do
traceTc "checkForeignRes[Java]" $ ppr tagType <+> ppr resType
check (predResType resType) (illegalForeignTyErr result)
-- Case for non-IO result type with FFI Import
| not nonIOResultOk = addErrTc
. illegalForeignTyErr result
$ str "IO result type expected"
| otherwise = do
traceTc "checkForeignRes[Other]" $ ppr ty
dflags <- getDynFlags
case predResType ty of
-- Handle normal typecheck fail, we want to handle this first and
-- only report safe haskell errors if the normal type check is OK.
NotValid msg -> addErrTc $ illegalForeignTyErr result msg
-- handle safe infer fail
_ | checkSafe && safeInferOn dflags -> recordUnsafeInfer
-- handle safe language typecheck fail
_ | checkSafe && safeLanguageOn dflags ->
addErrTc $ illegalForeignTyErr result safeHsErr
-- sucess! non-IO return is fine
_ -> return ()
where safeHsErr = str $ "Safe Haskell is on, all FFI imports must be in the"
++ " IO monad"
argument, result :: SDoc
argument = text "argument"
result = text "result"
checkSafe, noCheckSafe :: Bool
checkSafe = True
noCheckSafe = False
nonIOok, mustBeIO :: Bool
nonIOok = True
mustBeIO = False
checkJavaTarget :: CCallTarget -> TcM ()
checkJavaTarget (StaticTarget str _ _) = do
-- TODO: Validate the name
return ()
isAnyTy :: Type -> Bool
isAnyTy = isTc anyTyConKey
isTc :: Unique -> Type -> Bool
isTc uniq ty = case tcSplitTyConApp_maybe ty of
Just (tc, _) -> uniq == getUnique tc
Nothing -> False
tcForeignExports :: [LForeignDecl Name]
-> TcM (LHsBinds TcId, [LForeignDecl TcId], Bag GlobalRdrElt)
tcForeignExports decls =
getHooked tcForeignExportsHook tcForeignExports' >>= ($ decls)
tcForeignExports' :: [LForeignDecl Name]
-> TcM (LHsBinds TcId, [LForeignDecl TcId], Bag GlobalRdrElt)
tcForeignExports' decls = foldlM combine (emptyLHsBinds, [], emptyBag)
(filter isForeignExport decls)
where combine (binds, fs, gres1) (L loc fe) = do
(b, f, gres2) <- setSrcSpan loc (tcFExport fe)
return (b `consBag` binds, L loc f : fs, gres1 `unionBags` gres2)
tcFExport :: ForeignDecl Name -> TcM (LHsBind Id, ForeignDecl Id, Bag GlobalRdrElt)
tcFExport fo@(ForeignExport (L loc nm) hs_ty _ spec)
= addErrCtxt (foreignDeclCtxt fo) $ do
sig_ty <- tcHsSigType (ForSigCtxt nm) hs_ty
rhs <- tcPolyExpr (nlHsVar nm) sig_ty
(norm_co, norm_sig_ty, gres) <- normaliseFfiType sig_ty
spec' <- tcCheckFEType norm_sig_ty spec
id <- mkStableIdFromName nm sig_ty loc mkForeignExportOcc
return (mkVarBind id rhs, ForeignExport (L loc id) undefined norm_co spec', gres)
tcFExport d = pprPanic "tcFExport" (ppr d)
tcCheckFEType :: Type -> ForeignExport -> TcM ForeignExport
tcCheckFEType sigType exportspec = do
-- (CExport (L l (CExportStatic str cconv)) src)
checkForeignArgs isFFIExternalTy argTypes
checkForeignRes nonIOok noCheckSafe isFFIExportResultTy resType
return exportspec
where (_, ty) = tcSplitForAllTys sigType
(thetaType, ty') = tcSplitPhiTy ty
(argTypes, resType) = tcSplitFunTys ty'
javaClassVars = extendsVars thetaType
| alexander-at-github/eta | compiler/ETA/TypeCheck/TcForeign.hs | bsd-3-clause | 12,447 | 0 | 19 | 3,274 | 3,449 | 1,743 | 1,706 | 253 | 9 |
-- | News page controller. This page simply downloads from
-- haskellnews.org which already has a pre-prepared page of news to
-- display.
module HL.C.News where
import HL.C
import HL.M.News
import HL.V.News
-- | News controller.
getNewsR :: C Html
getNewsR =
do html <- io getHaskellNews
blaze (newsV html)
| chrisdone/hl | src/HL/C/News.hs | bsd-3-clause | 318 | 0 | 9 | 60 | 63 | 36 | 27 | 8 | 1 |
-- A point is a point in the xy plane, represented by x and y coordinates
-- E.g. (Point 0.0 0.0) is the origin, (Point (-1) (1)) is in the top left
-- quadrant.
data Point = Point Double Double
deriving (Show, Eq)
-- A line segment is a straight line of finite length, defined by its
-- two end points. E.g. (LineSegment (Point 0 0) (Point 1 1)) is a
-- line segment from the origin to the coordinate (1, 1)
data LineSegment = LineSegment Point Point
deriving (Show, Eq)
-- A Path is a 2D path in the xy-plane. The idea is that Path can be
-- extended to support straight lines, curves, and arbitrary paths,
-- but currently there is only one data constructor for Path: Line.
data Path =
-- Line represents an infinite straight line defined by its slope a
-- and its y intercept b, ie. by the equation y = ax + b
Line Double Double
deriving (Show, Eq)
| markstoehr/cs161 | _site/fls/Lab2_flymake.hs | cc0-1.0 | 879 | 0 | 6 | 192 | 78 | 47 | 31 | 7 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Targets
-- Copyright : (c) Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : duncan@community.haskell.org
--
-- Handling for user-specified targets
-----------------------------------------------------------------------------
module Distribution.Client.Targets (
-- * User targets
UserTarget(..),
readUserTargets,
-- * Package specifiers
PackageSpecifier(..),
pkgSpecifierTarget,
pkgSpecifierConstraints,
-- * Resolving user targets to package specifiers
resolveUserTargets,
-- ** Detailed interface
UserTargetProblem(..),
readUserTarget,
reportUserTargetProblems,
expandUserTarget,
PackageTarget(..),
fetchPackageTarget,
readPackageTarget,
PackageTargetProblem(..),
reportPackageTargetProblems,
disambiguatePackageTargets,
disambiguatePackageName,
-- * User constraints
UserQualifier(..),
UserConstraintScope(..),
UserConstraint(..),
userConstraintPackageName,
readUserConstraint,
userToPackageConstraint,
) where
import Prelude ()
import Distribution.Client.Compat.Prelude
import Distribution.Package
( Package(..), PackageName, unPackageName, mkPackageName
, PackageIdentifier(..), packageName, packageVersion )
import Distribution.Types.Dependency
import Distribution.Client.Types
( PackageLocation(..)
, ResolvedPkgLoc, UnresolvedSourcePackage )
import Distribution.Solver.Types.ConstraintSource
import Distribution.Solver.Types.LabeledPackageConstraint
import Distribution.Solver.Types.OptionalStanza
import Distribution.Solver.Types.PackageConstraint
import Distribution.Solver.Types.PackagePath
import Distribution.Solver.Types.PackageIndex (PackageIndex)
import qualified Distribution.Solver.Types.PackageIndex as PackageIndex
import Distribution.Solver.Types.SourcePackage
import qualified Distribution.Client.World as World
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Distribution.Client.Tar as Tar
import Distribution.Client.FetchUtils
import Distribution.Client.Utils ( tryFindPackageDesc )
import Distribution.Client.GlobalFlags
( RepoContext(..) )
import Distribution.PackageDescription
( GenericPackageDescription, parseFlagAssignment )
import Distribution.Version
( nullVersion, thisVersion, anyVersion, isAnyVersion )
import Distribution.Text
( Text(..), display )
import Distribution.Verbosity (Verbosity)
import Distribution.Simple.Utils
( die', warn, lowercase )
#ifdef CABAL_PARSEC
import Distribution.PackageDescription.Parsec
( readGenericPackageDescription, parseGenericPackageDescriptionMaybe )
#else
import Distribution.PackageDescription.Parse
( readGenericPackageDescription, parseGenericPackageDescription, ParseResult(..) )
import Distribution.Simple.Utils
( fromUTF8, ignoreBOM )
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
#endif
-- import Data.List ( find, nub )
import Data.Either
( partitionEithers )
import qualified Data.Map as Map
import qualified Data.ByteString.Lazy as BS
import qualified Distribution.Client.GZipUtils as GZipUtils
import Control.Monad (mapM)
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP
( (+++), (<++) )
import Distribution.ParseUtils
( readPToMaybe )
import System.FilePath
( takeExtension, dropExtension, takeDirectory, splitPath )
import System.Directory
( doesFileExist, doesDirectoryExist )
import Network.URI
( URI(..), URIAuth(..), parseAbsoluteURI )
-- ------------------------------------------------------------
-- * User targets
-- ------------------------------------------------------------
-- | Various ways that a user may specify a package or package collection.
--
data UserTarget =
-- | A partially specified package, identified by name and possibly with
-- an exact version or a version constraint.
--
-- > cabal install foo
-- > cabal install foo-1.0
-- > cabal install 'foo < 2'
--
UserTargetNamed Dependency
-- | A special virtual package that refers to the collection of packages
-- recorded in the world file that the user specifically installed.
--
-- > cabal install world
--
| UserTargetWorld
-- | A specific package that is unpacked in a local directory, often the
-- current directory.
--
-- > cabal install .
-- > cabal install ../lib/other
--
-- * Note: in future, if multiple @.cabal@ files are allowed in a single
-- directory then this will refer to the collection of packages.
--
| UserTargetLocalDir FilePath
-- | A specific local unpacked package, identified by its @.cabal@ file.
--
-- > cabal install foo.cabal
-- > cabal install ../lib/other/bar.cabal
--
| UserTargetLocalCabalFile FilePath
-- | A specific package that is available as a local tarball file
--
-- > cabal install dist/foo-1.0.tar.gz
-- > cabal install ../build/baz-1.0.tar.gz
--
| UserTargetLocalTarball FilePath
-- | A specific package that is available as a remote tarball file
--
-- > cabal install http://code.haskell.org/~user/foo/foo-0.9.tar.gz
--
| UserTargetRemoteTarball URI
deriving (Show,Eq)
-- ------------------------------------------------------------
-- * Package specifier
-- ------------------------------------------------------------
-- | A fully or partially resolved reference to a package.
--
data PackageSpecifier pkg =
-- | A partially specified reference to a package (either source or
-- installed). It is specified by package name and optionally some
-- required properties. Use a dependency resolver to pick a specific
-- package satisfying these properties.
--
NamedPackage PackageName [PackageProperty]
-- | A fully specified source package.
--
| SpecificSourcePackage pkg
deriving (Eq, Show, Generic)
instance Binary pkg => Binary (PackageSpecifier pkg)
pkgSpecifierTarget :: Package pkg => PackageSpecifier pkg -> PackageName
pkgSpecifierTarget (NamedPackage name _) = name
pkgSpecifierTarget (SpecificSourcePackage pkg) = packageName pkg
pkgSpecifierConstraints :: Package pkg
=> PackageSpecifier pkg -> [LabeledPackageConstraint]
pkgSpecifierConstraints (NamedPackage name props) = map toLpc props
where
toLpc prop = LabeledPackageConstraint
(PackageConstraint (scopeToplevel name) prop)
ConstraintSourceUserTarget
pkgSpecifierConstraints (SpecificSourcePackage pkg) =
[LabeledPackageConstraint pc ConstraintSourceUserTarget]
where
pc = PackageConstraint
(ScopeTarget $ packageName pkg)
(PackagePropertyVersion $ thisVersion (packageVersion pkg))
-- ------------------------------------------------------------
-- * Parsing and checking user targets
-- ------------------------------------------------------------
readUserTargets :: Verbosity -> [String] -> IO [UserTarget]
readUserTargets verbosity targetStrs = do
(problems, targets) <- liftM partitionEithers
(mapM readUserTarget targetStrs)
reportUserTargetProblems verbosity problems
return targets
data UserTargetProblem
= UserTargetUnexpectedFile String
| UserTargetNonexistantFile String
| UserTargetUnexpectedUriScheme String
| UserTargetUnrecognisedUri String
| UserTargetUnrecognised String
| UserTargetBadWorldPkg
deriving Show
readUserTarget :: String -> IO (Either UserTargetProblem UserTarget)
readUserTarget targetstr =
case testNamedTargets targetstr of
Just (Dependency pkgn verrange)
| pkgn == mkPackageName "world"
-> return $ if verrange == anyVersion
then Right UserTargetWorld
else Left UserTargetBadWorldPkg
Just dep -> return (Right (UserTargetNamed dep))
Nothing -> do
fileTarget <- testFileTargets targetstr
case fileTarget of
Just target -> return target
Nothing ->
case testUriTargets targetstr of
Just target -> return target
Nothing -> return (Left (UserTargetUnrecognised targetstr))
where
testNamedTargets = readPToMaybe parseDependencyOrPackageId
testFileTargets filename = do
isDir <- doesDirectoryExist filename
isFile <- doesFileExist filename
parentDirExists <- case takeDirectory filename of
[] -> return False
dir -> doesDirectoryExist dir
let result
| isDir
= Just (Right (UserTargetLocalDir filename))
| isFile && extensionIsTarGz filename
= Just (Right (UserTargetLocalTarball filename))
| isFile && takeExtension filename == ".cabal"
= Just (Right (UserTargetLocalCabalFile filename))
| isFile
= Just (Left (UserTargetUnexpectedFile filename))
| parentDirExists
= Just (Left (UserTargetNonexistantFile filename))
| otherwise
= Nothing
return result
testUriTargets str =
case parseAbsoluteURI str of
Just uri@URI {
uriScheme = scheme,
uriAuthority = Just URIAuth { uriRegName = host }
}
| scheme /= "http:" && scheme /= "https:" ->
Just (Left (UserTargetUnexpectedUriScheme targetstr))
| null host ->
Just (Left (UserTargetUnrecognisedUri targetstr))
| otherwise ->
Just (Right (UserTargetRemoteTarball uri))
_ -> Nothing
extensionIsTarGz f = takeExtension f == ".gz"
&& takeExtension (dropExtension f) == ".tar"
parseDependencyOrPackageId :: Parse.ReadP r Dependency
parseDependencyOrPackageId = parse
+++ liftM pkgidToDependency parse
where
pkgidToDependency :: PackageIdentifier -> Dependency
pkgidToDependency p = case packageVersion p of
v | v == nullVersion -> Dependency (packageName p) anyVersion
| otherwise -> Dependency (packageName p) (thisVersion v)
reportUserTargetProblems :: Verbosity -> [UserTargetProblem] -> IO ()
reportUserTargetProblems verbosity problems = do
case [ target | UserTargetUnrecognised target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "Unrecognised target '" ++ name ++ "'."
| name <- target ]
++ "Targets can be:\n"
++ " - package names, e.g. 'pkgname', 'pkgname-1.0.1', 'pkgname < 2.0'\n"
++ " - the special 'world' target\n"
++ " - cabal files 'pkgname.cabal' or package directories 'pkgname/'\n"
++ " - package tarballs 'pkgname.tar.gz' or 'http://example.com/pkgname.tar.gz'"
case [ () | UserTargetBadWorldPkg <- problems ] of
[] -> return ()
_ -> die' verbosity "The special 'world' target does not take any version."
case [ target | UserTargetNonexistantFile target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "The file does not exist '" ++ name ++ "'."
| name <- target ]
case [ target | UserTargetUnexpectedFile target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "Unrecognised file target '" ++ name ++ "'."
| name <- target ]
++ "File targets can be either package tarballs 'pkgname.tar.gz' "
++ "or cabal files 'pkgname.cabal'."
case [ target | UserTargetUnexpectedUriScheme target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "URL target not supported '" ++ name ++ "'."
| name <- target ]
++ "Only 'http://' and 'https://' URLs are supported."
case [ target | UserTargetUnrecognisedUri target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "Unrecognise URL target '" ++ name ++ "'."
| name <- target ]
-- ------------------------------------------------------------
-- * Resolving user targets to package specifiers
-- ------------------------------------------------------------
-- | Given a bunch of user-specified targets, try to resolve what it is they
-- refer to. They can either be specific packages (local dirs, tarballs etc)
-- or they can be named packages (with or without version info).
--
resolveUserTargets :: Package pkg
=> Verbosity
-> RepoContext
-> FilePath
-> PackageIndex pkg
-> [UserTarget]
-> IO [PackageSpecifier UnresolvedSourcePackage]
resolveUserTargets verbosity repoCtxt worldFile available userTargets = do
-- given the user targets, get a list of fully or partially resolved
-- package references
packageTargets <- mapM (readPackageTarget verbosity)
=<< mapM (fetchPackageTarget verbosity repoCtxt) . concat
=<< mapM (expandUserTarget verbosity worldFile) userTargets
-- users are allowed to give package names case-insensitively, so we must
-- disambiguate named package references
let (problems, packageSpecifiers) =
disambiguatePackageTargets available availableExtra packageTargets
-- use any extra specific available packages to help us disambiguate
availableExtra = [ packageName pkg
| PackageTargetLocation pkg <- packageTargets ]
reportPackageTargetProblems verbosity problems
return packageSpecifiers
-- ------------------------------------------------------------
-- * Package targets
-- ------------------------------------------------------------
-- | An intermediate between a 'UserTarget' and a resolved 'PackageSpecifier'.
-- Unlike a 'UserTarget', a 'PackageTarget' refers only to a single package.
--
data PackageTarget pkg =
PackageTargetNamed PackageName [PackageProperty] UserTarget
-- | A package identified by name, but case insensitively, so it needs
-- to be resolved to the right case-sensitive name.
| PackageTargetNamedFuzzy PackageName [PackageProperty] UserTarget
| PackageTargetLocation pkg
deriving (Show, Functor, Foldable, Traversable)
-- ------------------------------------------------------------
-- * Converting user targets to package targets
-- ------------------------------------------------------------
-- | Given a user-specified target, expand it to a bunch of package targets
-- (each of which refers to only one package).
--
expandUserTarget :: Verbosity
-> FilePath
-> UserTarget
-> IO [PackageTarget (PackageLocation ())]
expandUserTarget verbosity worldFile userTarget = case userTarget of
UserTargetNamed (Dependency name vrange) ->
let props = [ PackagePropertyVersion vrange
| not (isAnyVersion vrange) ]
in return [PackageTargetNamedFuzzy name props userTarget]
UserTargetWorld -> do
worldPkgs <- World.getContents verbosity worldFile
--TODO: should we warn if there are no world targets?
return [ PackageTargetNamed name props userTarget
| World.WorldPkgInfo (Dependency name vrange) flags <- worldPkgs
, let props = [ PackagePropertyVersion vrange
| not (isAnyVersion vrange) ]
++ [ PackagePropertyFlags flags
| not (null flags) ] ]
UserTargetLocalDir dir ->
return [ PackageTargetLocation (LocalUnpackedPackage dir) ]
UserTargetLocalCabalFile file -> do
let dir = takeDirectory file
_ <- tryFindPackageDesc verbosity dir (localPackageError dir) -- just as a check
return [ PackageTargetLocation (LocalUnpackedPackage dir) ]
UserTargetLocalTarball tarballFile ->
return [ PackageTargetLocation (LocalTarballPackage tarballFile) ]
UserTargetRemoteTarball tarballURL ->
return [ PackageTargetLocation (RemoteTarballPackage tarballURL ()) ]
localPackageError :: FilePath -> String
localPackageError dir =
"Error reading local package.\nCouldn't find .cabal file in: " ++ dir
-- ------------------------------------------------------------
-- * Fetching and reading package targets
-- ------------------------------------------------------------
-- | Fetch any remote targets so that they can be read.
--
fetchPackageTarget :: Verbosity
-> RepoContext
-> PackageTarget (PackageLocation ())
-> IO (PackageTarget ResolvedPkgLoc)
fetchPackageTarget verbosity repoCtxt = traverse $
fetchPackage verbosity repoCtxt . fmap (const Nothing)
-- | Given a package target that has been fetched, read the .cabal file.
--
-- This only affects targets given by location, named targets are unaffected.
--
readPackageTarget :: Verbosity
-> PackageTarget ResolvedPkgLoc
-> IO (PackageTarget UnresolvedSourcePackage)
readPackageTarget verbosity = traverse modifyLocation
where
modifyLocation location = case location of
LocalUnpackedPackage dir -> do
pkg <- tryFindPackageDesc verbosity dir (localPackageError dir) >>=
readGenericPackageDescription verbosity
return $ SourcePackage {
packageInfoId = packageId pkg,
packageDescription = pkg,
packageSource = fmap Just location,
packageDescrOverride = Nothing
}
LocalTarballPackage tarballFile ->
readTarballPackageTarget location tarballFile tarballFile
RemoteTarballPackage tarballURL tarballFile ->
readTarballPackageTarget location tarballFile (show tarballURL)
RepoTarballPackage _repo _pkgid _ ->
error "TODO: readPackageTarget RepoTarballPackage"
-- For repo tarballs this info should be obtained from the index.
readTarballPackageTarget location tarballFile tarballOriginalLoc = do
(filename, content) <- extractTarballPackageCabalFile
tarballFile tarballOriginalLoc
case parsePackageDescription' content of
Nothing -> die' verbosity $ "Could not parse the cabal file "
++ filename ++ " in " ++ tarballFile
Just pkg ->
return $ SourcePackage {
packageInfoId = packageId pkg,
packageDescription = pkg,
packageSource = fmap Just location,
packageDescrOverride = Nothing
}
extractTarballPackageCabalFile :: FilePath -> String
-> IO (FilePath, BS.ByteString)
extractTarballPackageCabalFile tarballFile tarballOriginalLoc =
either (die' verbosity . formatErr) return
. check
. accumEntryMap
. Tar.filterEntries isCabalFile
. Tar.read
. GZipUtils.maybeDecompress
=<< BS.readFile tarballFile
where
formatErr msg = "Error reading " ++ tarballOriginalLoc ++ ": " ++ msg
accumEntryMap = Tar.foldlEntries
(\m e -> Map.insert (Tar.entryTarPath e) e m)
Map.empty
check (Left e) = Left (show e)
check (Right m) = case Map.elems m of
[] -> Left noCabalFile
[file] -> case Tar.entryContent file of
Tar.NormalFile content _ -> Right (Tar.entryPath file, content)
_ -> Left noCabalFile
_files -> Left multipleCabalFiles
where
noCabalFile = "No cabal file found"
multipleCabalFiles = "Multiple cabal files found"
isCabalFile e = case splitPath (Tar.entryPath e) of
[ _dir, file] -> takeExtension file == ".cabal"
[".", _dir, file] -> takeExtension file == ".cabal"
_ -> False
parsePackageDescription' :: BS.ByteString -> Maybe GenericPackageDescription
#ifdef CABAL_PARSEC
parsePackageDescription' bs =
parseGenericPackageDescriptionMaybe (BS.toStrict bs)
#else
parsePackageDescription' content =
case parseGenericPackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of
ParseOk _ pkg -> Just pkg
_ -> Nothing
#endif
-- ------------------------------------------------------------
-- * Checking package targets
-- ------------------------------------------------------------
data PackageTargetProblem
= PackageNameUnknown PackageName UserTarget
| PackageNameAmbiguous PackageName [PackageName] UserTarget
deriving Show
-- | Users are allowed to give package names case-insensitively, so we must
-- disambiguate named package references.
--
disambiguatePackageTargets :: Package pkg'
=> PackageIndex pkg'
-> [PackageName]
-> [PackageTarget pkg]
-> ( [PackageTargetProblem]
, [PackageSpecifier pkg] )
disambiguatePackageTargets availablePkgIndex availableExtra targets =
partitionEithers (map disambiguatePackageTarget targets)
where
disambiguatePackageTarget packageTarget = case packageTarget of
PackageTargetLocation pkg -> Right (SpecificSourcePackage pkg)
PackageTargetNamed pkgname props userTarget
| null (PackageIndex.lookupPackageName availablePkgIndex pkgname)
-> Left (PackageNameUnknown pkgname userTarget)
| otherwise -> Right (NamedPackage pkgname props)
PackageTargetNamedFuzzy pkgname props userTarget ->
case disambiguatePackageName packageNameEnv pkgname of
None -> Left (PackageNameUnknown
pkgname userTarget)
Ambiguous pkgnames -> Left (PackageNameAmbiguous
pkgname pkgnames userTarget)
Unambiguous pkgname' -> Right (NamedPackage pkgname' props)
-- use any extra specific available packages to help us disambiguate
packageNameEnv :: PackageNameEnv
packageNameEnv = mappend (indexPackageNameEnv availablePkgIndex)
(extraPackageNameEnv availableExtra)
-- | Report problems to the user. That is, if there are any problems
-- then raise an exception.
reportPackageTargetProblems :: Verbosity
-> [PackageTargetProblem] -> IO ()
reportPackageTargetProblems verbosity problems = do
case [ pkg | PackageNameUnknown pkg originalTarget <- problems
, not (isUserTagetWorld originalTarget) ] of
[] -> return ()
pkgs -> die' verbosity $ unlines
[ "There is no package named '" ++ display name ++ "'. "
| name <- pkgs ]
++ "You may need to run 'cabal update' to get the latest "
++ "list of available packages."
case [ (pkg, matches) | PackageNameAmbiguous pkg matches _ <- problems ] of
[] -> return ()
ambiguities -> die' verbosity $ unlines
[ "The package name '" ++ display name
++ "' is ambiguous. It could be: "
++ intercalate ", " (map display matches)
| (name, matches) <- ambiguities ]
case [ pkg | PackageNameUnknown pkg UserTargetWorld <- problems ] of
[] -> return ()
pkgs -> warn verbosity $
"The following 'world' packages will be ignored because "
++ "they refer to packages that cannot be found: "
++ intercalate ", " (map display pkgs) ++ "\n"
++ "You can suppress this warning by correcting the world file."
where
isUserTagetWorld UserTargetWorld = True; isUserTagetWorld _ = False
-- ------------------------------------------------------------
-- * Disambiguating package names
-- ------------------------------------------------------------
data MaybeAmbiguous a = None | Unambiguous a | Ambiguous [a]
-- | Given a package name and a list of matching names, figure out which one it
-- might be referring to. If there is an exact case-sensitive match then that's
-- ok. If it matches just one package case-insensitively then that's also ok.
-- The only problem is if it matches multiple packages case-insensitively, in
-- that case it is ambiguous.
--
disambiguatePackageName :: PackageNameEnv
-> PackageName
-> MaybeAmbiguous PackageName
disambiguatePackageName (PackageNameEnv pkgNameLookup) name =
case nub (pkgNameLookup name) of
[] -> None
[name'] -> Unambiguous name'
names -> case find (name==) names of
Just name' -> Unambiguous name'
Nothing -> Ambiguous names
newtype PackageNameEnv = PackageNameEnv (PackageName -> [PackageName])
instance Monoid PackageNameEnv where
mempty = PackageNameEnv (const [])
mappend = (<>)
instance Semigroup PackageNameEnv where
PackageNameEnv lookupA <> PackageNameEnv lookupB =
PackageNameEnv (\name -> lookupA name ++ lookupB name)
indexPackageNameEnv :: PackageIndex pkg -> PackageNameEnv
indexPackageNameEnv pkgIndex = PackageNameEnv pkgNameLookup
where
pkgNameLookup pname =
map fst (PackageIndex.searchByName pkgIndex $ unPackageName pname)
extraPackageNameEnv :: [PackageName] -> PackageNameEnv
extraPackageNameEnv names = PackageNameEnv pkgNameLookup
where
pkgNameLookup pname =
[ pname'
| let lname = lowercase (unPackageName pname)
, pname' <- names
, lowercase (unPackageName pname') == lname ]
-- ------------------------------------------------------------
-- * Package constraints
-- ------------------------------------------------------------
-- | Version of 'Qualifier' that a user may specify on the
-- command line.
data UserQualifier =
-- | Top-level dependency.
UserQualToplevel
-- | Setup dependency.
| UserQualSetup PackageName
-- | Executable dependency.
| UserQualExe PackageName PackageName
deriving (Eq, Show, Generic)
instance Binary UserQualifier
-- | Version of 'ConstraintScope' that a user may specify on the
-- command line.
data UserConstraintScope =
-- | Scope that applies to the package when it has the specified qualifier.
UserQualified UserQualifier PackageName
-- | Scope that applies to the package when it has a setup qualifier.
| UserAnySetupQualifier PackageName
-- | Scope that applies to the package when it has any qualifier.
| UserAnyQualifier PackageName
deriving (Eq, Show, Generic)
instance Binary UserConstraintScope
fromUserQualifier :: UserQualifier -> Qualifier
fromUserQualifier UserQualToplevel = QualToplevel
fromUserQualifier (UserQualSetup name) = QualSetup name
fromUserQualifier (UserQualExe name1 name2) = QualExe name1 name2
fromUserConstraintScope :: UserConstraintScope -> ConstraintScope
fromUserConstraintScope (UserQualified q pn) =
ScopeQualified (fromUserQualifier q) pn
fromUserConstraintScope (UserAnySetupQualifier pn) = ScopeAnySetupQualifier pn
fromUserConstraintScope (UserAnyQualifier pn) = ScopeAnyQualifier pn
-- | Version of 'PackageConstraint' that the user can specify on
-- the command line.
data UserConstraint =
UserConstraint UserConstraintScope PackageProperty
deriving (Eq, Show, Generic)
instance Binary UserConstraint
userConstraintPackageName :: UserConstraint -> PackageName
userConstraintPackageName (UserConstraint scope _) = scopePN scope
where
scopePN (UserQualified _ pn) = pn
scopePN (UserAnyQualifier pn) = pn
scopePN (UserAnySetupQualifier pn) = pn
userToPackageConstraint :: UserConstraint -> PackageConstraint
userToPackageConstraint (UserConstraint scope prop) =
PackageConstraint (fromUserConstraintScope scope) prop
readUserConstraint :: String -> Either String UserConstraint
readUserConstraint str =
case readPToMaybe parse str of
Nothing -> Left msgCannotParse
Just c -> Right c
where
msgCannotParse =
"expected a (possibly qualified) package name followed by a " ++
"constraint, which is either a version range, 'installed', " ++
"'source', 'test', 'bench', or flags"
instance Text UserConstraint where
disp (UserConstraint scope prop) =
dispPackageConstraint $ PackageConstraint (fromUserConstraintScope scope) prop
parse =
let parseConstraintScope :: Parse.ReadP a UserConstraintScope
parseConstraintScope =
do
_ <- Parse.string "any."
pn <- parse
return (UserAnyQualifier pn)
+++
do
_ <- Parse.string "setup."
pn <- parse
return (UserAnySetupQualifier pn)
+++
do
-- Qualified name
pn <- parse
(return (UserQualified UserQualToplevel pn)
+++
do _ <- Parse.string ":setup."
pn2 <- parse
return (UserQualified (UserQualSetup pn) pn2))
-- -- TODO: Re-enable parsing of UserQualExe once we decide on a syntax.
--
-- +++
-- do _ <- Parse.string ":"
-- pn2 <- parse
-- _ <- Parse.string ":exe."
-- pn3 <- parse
-- return (UserQualExe pn pn2, pn3)
in do
scope <- parseConstraintScope
-- Package property
let keyword str x = Parse.skipSpaces1 >> Parse.string str >> return x
prop <- ((parse >>= return . PackagePropertyVersion)
+++
keyword "installed" PackagePropertyInstalled
+++
keyword "source" PackagePropertySource
+++
keyword "test" (PackagePropertyStanzas [TestStanzas])
+++
keyword "bench" (PackagePropertyStanzas [BenchStanzas]))
-- Note: the parser is left-biased here so that we
-- don't get an ambiguous parse from 'installed',
-- 'source', etc. being regarded as flags.
<++
(Parse.skipSpaces1 >> parseFlagAssignment
>>= return . PackagePropertyFlags)
-- Result
return (UserConstraint scope prop)
| mydaum/cabal | cabal-install/Distribution/Client/Targets.hs | bsd-3-clause | 31,363 | 0 | 21 | 8,451 | 5,370 | 2,801 | 2,569 | 512 | 12 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE MultiParamTypeClasses, Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Groups.Helpers
-- Copyright : Quentin Moser <moserq@gmail.com>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : orphaned
-- Stability : stable
-- Portability : unportable
--
-- Utility functions for "XMonad.Layout.Groups".
--
-----------------------------------------------------------------------------
module XMonad.Layout.Groups.Helpers ( -- * Usage
-- $usage
-- ** Layout-generic actions
swapUp
, swapDown
, swapMaster
, focusUp
, focusDown
, focusMaster
, toggleFocusFloat
-- ** 'G.Groups'-secific actions
, swapGroupUp
, swapGroupDown
, swapGroupMaster
, focusGroupUp
, focusGroupDown
, focusGroupMaster
, moveToGroupUp
, moveToGroupDown
, moveToNewGroupUp
, moveToNewGroupDown
, splitGroup ) where
import XMonad hiding ((|||))
import qualified XMonad.StackSet as W
import qualified XMonad.Layout.Groups as G
import XMonad.Actions.MessageFeedback
import Control.Monad (unless)
import qualified Data.Map as M
-- $usage
--
-- This module provides helpers functions for use with "XMonad.Layout.Groups"-based
-- layouts. You can use its contents by adding
--
-- > import XMonad.Layout.Groups.Helpers
--
-- to the top of your @.\/.xmonad\/xmonad.hs@.
--
-- "XMonad.Layout.Groups"-based layouts do not have the same notion
-- of window ordering as the rest of XMonad. For this reason, the usual
-- ways of reordering windows and moving focus do not work with them.
-- "XMonad.Layout.Groups" provides 'Message's that can be used to obtain
-- the right effect.
--
-- But what if you want to use both 'G.Groups' and other layouts?
-- This module provides actions that try to send 'G.GroupsMessage's, and
-- fall back to the classic way if the current layout doesn't hande them.
-- They are in the section called \"Layout-generic actions\".
--
-- The sections \"Groups-specific actions\" contains actions that don't make
-- sense for non-'G.Groups'-based layouts. These are simply wrappers around
-- the equivalent 'G.GroupsMessage's, but are included so you don't have to
-- write @sendMessage $ Modify $ ...@ everytime.
--
-- This module exports many operations with the same names as
-- 'G.ModifySpec's from "XMonad.Layout.Groups", so if you want
-- to import both, we suggest to import "XMonad.Layout.Groups"
-- qualified:
--
-- > import qualified XMonad.Layout.Groups as G
--
-- For more information on how to extend your layour hook and key bindings, see
-- "XMonad.Doc.Extending".
-- ** Layout-generic actions
-- #Layout-generic actions#
alt :: G.ModifySpec -> (WindowSet -> WindowSet) -> X ()
alt f g = alt2 (G.Modify f) $ windows g
alt2 :: G.GroupsMessage -> X () -> X ()
alt2 m x = do b <- send m
unless b x
-- | Swap the focused window with the previous one
swapUp :: X ()
swapUp = alt G.swapUp W.swapUp
-- | Swap the focused window with the next one
swapDown :: X ()
swapDown = alt G.swapDown W.swapDown
-- | Swap the focused window with the master window
swapMaster :: X ()
swapMaster = alt G.swapMaster W.swapMaster
-- | If the focused window is floating, focus the next floating
-- window. otherwise, focus the next non-floating one.
focusUp :: X ()
focusUp = ifFloat focusFloatUp focusNonFloatUp
-- | If the focused window is floating, focus the next floating
-- window. otherwise, focus the next non-floating one.
focusDown :: X ()
focusDown = ifFloat focusFloatDown focusNonFloatDown
-- | Move focus to the master window
focusMaster :: X ()
focusMaster = alt G.focusMaster W.shiftMaster
-- | Move focus between the floating and non-floating layers
toggleFocusFloat :: X ()
toggleFocusFloat = ifFloat focusNonFloat focusFloatUp
-- *** Floating layer helpers
getFloats :: X [Window]
getFloats = gets $ M.keys . W.floating . windowset
getWindows :: X [Window]
getWindows = gets $ W.integrate' . W.stack . W.workspace . W.current . windowset
ifFloat :: X () -> X () -> X ()
ifFloat x1 x2 = withFocused $ \w -> do floats <- getFloats
if elem w floats then x1 else x2
focusNonFloat :: X ()
focusNonFloat = alt2 G.Refocus helper
where helper = withFocused $ \w -> do
ws <- getWindows
floats <- getFloats
let (before, after) = span (/=w) ws
case filter (flip notElem floats) $ after ++ before of
[] -> return ()
w':_ -> focus w'
focusHelper :: (Bool -> Bool) -- ^ if you want to focus a floating window, 'id'.
-- if you want a non-floating one, 'not'.
-> ([Window] -> [Window]) -- ^ if you want the next window, 'id'.
-- if you want the previous one, 'reverse'.
-> X ()
focusHelper f g = withFocused $ \w -> do
ws <- getWindows
let (before, _:after) = span (/=w) ws
let toFocus = g $ after ++ before
floats <- getFloats
case filter (f . flip elem floats) toFocus of
[] -> return ()
w':_ -> focus w'
focusNonFloatUp :: X ()
focusNonFloatUp = alt2 (G.Modify G.focusUp) $ focusHelper not reverse
focusNonFloatDown :: X ()
focusNonFloatDown = alt2 (G.Modify G.focusDown) $ focusHelper not id
focusFloatUp :: X ()
focusFloatUp = focusHelper id reverse
focusFloatDown :: X ()
focusFloatDown = focusHelper id id
-- ** Groups-specific actions
wrap :: G.ModifySpec -> X ()
wrap x = sendMessage (G.Modify x)
-- | Swap the focused group with the previous one
swapGroupUp :: X ()
swapGroupUp = wrap G.swapGroupUp
-- | Swap the focused group with the next one
swapGroupDown :: X ()
swapGroupDown = wrap G.swapGroupDown
-- | Swap the focused group with the master group
swapGroupMaster :: X ()
swapGroupMaster = wrap G.swapGroupMaster
-- | Move the focus to the previous group
focusGroupUp :: X ()
focusGroupUp = wrap G.focusGroupUp
-- | Move the focus to the next group
focusGroupDown :: X ()
focusGroupDown = wrap G.focusGroupDown
-- | Move the focus to the master group
focusGroupMaster :: X ()
focusGroupMaster = wrap G.focusGroupMaster
-- | Move the focused window to the previous group. The 'Bool' argument
-- determines what will be done if the focused window is in the very first
-- group: Wrap back to the end ('True'), or create a new group before
-- it ('False').
moveToGroupUp :: Bool -> X ()
moveToGroupUp b = wrap (G.moveToGroupUp b)
-- | Move the focused window to the next group. The 'Bool' argument
-- determines what will be done if the focused window is in the very last
-- group: Wrap back to the beginning ('True'), or create a new group after
-- it ('False').
moveToGroupDown :: Bool -> X ()
moveToGroupDown b = wrap (G.moveToGroupDown b)
-- | Move the focused window to a new group before the current one
moveToNewGroupUp :: X ()
moveToNewGroupUp = wrap G.moveToNewGroupUp
-- | Move the focused window to a new group after the current one
moveToNewGroupDown :: X ()
moveToNewGroupDown = wrap G.moveToNewGroupDown
-- | Split the focused group in two at the position of the focused
-- window.
splitGroup :: X ()
splitGroup = wrap G.splitGroup
| f1u77y/xmonad-contrib | XMonad/Layout/Groups/Helpers.hs | bsd-3-clause | 8,103 | 0 | 16 | 2,343 | 1,305 | 712 | 593 | 105 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK hide #-}
module Network.Xmpp.IM.Message where
import Data.Default
import Data.Function
import Data.List
import Data.Text (Text)
import Data.XML.Pickle
import Data.XML.Types
import Network.Xmpp.Marshal
import Network.Xmpp.Types
data MessageBody = MessageBody { bodyLang :: Maybe LangTag
, bodyContent :: Text
}
data MessageThread = MessageThread { threadID :: Text
, threadParent :: Maybe Text
}
data MessageSubject = MessageSubject { subjectLang :: Maybe LangTag
, subjectContent :: Text
}
-- | The instant message (IM) specific part of a message.
data InstantMessage = InstantMessage { imThread :: Maybe MessageThread
, imSubject :: [MessageSubject]
, imBody :: [MessageBody]
}
-- | Empty instant message.
instantMessage :: InstantMessage
instantMessage = InstantMessage { imThread = Nothing
, imSubject = []
, imBody = []
}
instance Default InstantMessage where
def = instantMessage
-- | Get the IM specific parts of a message. Returns 'Nothing' when the received
-- payload is not valid IM data.
getIM :: Message -> Maybe InstantMessage
getIM im = either (const Nothing) Just . unpickle xpIM $ messagePayload im
sanitizeIM :: InstantMessage -> InstantMessage
sanitizeIM im = im{imBody = nubBy ((==) `on` bodyLang) $ imBody im}
-- | Append IM data to a message. Additional IM bodies with the same Langtag are
-- discarded.
withIM :: Message -> InstantMessage -> Message
withIM m im = m{ messagePayload = messagePayload m
++ pickleTree xpIM (sanitizeIM im) }
imToElements :: InstantMessage -> [Element]
imToElements im = pickle xpIM (sanitizeIM im)
-- | Generate a simple message
simpleIM :: Jid -- ^ recipient
-> Text -- ^ body
-> Message
simpleIM to bd = withIM message{messageTo = Just to}
instantMessage{imBody = [MessageBody Nothing bd]}
-- | Generate an answer from a received message. The recepient is
-- taken from the original sender, the sender is set to 'Nothing',
-- message ID, language tag, message type as well as subject and
-- thread are inherited.
--
-- Additional IM bodies with the same Langtag are discarded.
answerIM :: [MessageBody] -> Message -> Maybe Message
answerIM bd msg = case getIM msg of
Nothing -> Nothing
Just im -> Just $ flip withIM (im{imBody = bd}) $
message { messageID = messageID msg
, messageFrom = Nothing
, messageTo = messageFrom msg
, messageLangTag = messageLangTag msg
, messageType = messageType msg
}
--------------------------
-- Picklers --------------
--------------------------
xpIM :: PU [Element] InstantMessage
xpIM = xpWrap (\(t, s, b) -> InstantMessage t s b)
(\(InstantMessage t s b) -> (t, s, b))
. xpClean
$ xp3Tuple
xpMessageThread
xpMessageSubject
xpMessageBody
xpMessageSubject :: PU [Element] [MessageSubject]
xpMessageSubject = xpUnliftElems .
xpWrap (map $ \(l, s) -> MessageSubject l s)
(map $ \(MessageSubject l s) -> (l,s))
$ xpElems "{jabber:client}subject" xpLangTag $ xpContent xpId
xpMessageBody :: PU [Element] [MessageBody]
xpMessageBody = xpUnliftElems .
xpWrap (map $ \(l, s) -> MessageBody l s)
(map $ \(MessageBody l s) -> (l,s))
$ xpElems "{jabber:client}body" xpLangTag $ xpContent xpId
xpMessageThread :: PU [Element] (Maybe MessageThread)
xpMessageThread = xpUnliftElems
. xpOption
. xpWrap (\(t, p) -> MessageThread p t)
(\(MessageThread p t) -> (t,p))
$ xpElem "{jabber:client}thread"
(xpAttrImplied "parent" xpId)
(xpContent xpId)
| Philonous/pontarius-xmpp | source/Network/Xmpp/IM/Message.hs | bsd-3-clause | 4,325 | 0 | 13 | 1,462 | 959 | 535 | 424 | 75 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
-- | The web server.
module Ircbrowse.Server where
import Ircbrowse.Types
import qualified Ircbrowse.Controllers as C
import Snap.App
import Snap.Http.Server hiding (Config)
import Snap.Util.FileServe
-- | Run the server.
runServer :: Config -> Pool -> IO ()
runServer config pool = do
setUnicodeLocale "en_US"
httpServe server (serve config pool)
where server = setPort 10001 defaultConfig
-- | Serve the controllers.
serve :: Config -> Pool -> Snap ()
serve config pool = route routes where
routes = [("/js/",serveDirectory "static/js")
,("/css/",serveDirectory "static/css")
,("/js/",serveDirectory "static/js")
,("/browse/:channel",run C.browse)
,("/nick-cloud/:channel",run C.nickCloud)
,("/social",run C.socialGraph)
,("/day/:channel/:year/:month/:day",run (C.browseDay False))
,("/day/:channel/today/:mode",run (C.browseDay True))
,("/day/:channel/today",run (C.browseDay True))
,("/nick/:nick",run C.nickProfile)
,("/nicks/:channel/:mode",run C.allNicks)
,("/nicks/:channel",run C.allNicks)
,("/quotes.rss",run C.quotes)
,("/pdfs/:channel/:unique",run C.pdfs)
,("/pdfs/:channel",run C.pdfs)
,("/stats/:channel",run C.stats)
,("/calendar/:channel",run C.calendar)
,("/:channel",run C.stats)
,("/selection/:channel",run C.browseSpecified)
,("/export/:filename",run C.export)
,("/",run C.overview)
]
run = runHandler PState config pool
| plow-technologies/ircbrowse | src/Ircbrowse/Server.hs | bsd-3-clause | 1,704 | 0 | 12 | 434 | 465 | 261 | 204 | 37 | 1 |
-- Module : Network.AWS.Data
-- Copyright : (c) 2013-2015 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
-- | Serialisation classes and primitives for the various
-- formats used to communicate with AWS.
module Network.AWS.Data
(
-- * ByteString
module Network.AWS.Data.Internal.ByteString
, module Network.AWS.Data.Internal.Base64
-- * Text
, module Network.AWS.Data.Internal.Text
-- * Numeric
, module Network.AWS.Data.Internal.Numeric
-- * Time
, module Network.AWS.Data.Internal.Time
-- * Sensitive
, module Network.AWS.Data.Internal.Sensitive
-- * HTTP
-- ** Body
, module Network.AWS.Data.Internal.Body
-- ** Headers
, module Network.AWS.Data.Internal.Header
-- ** Path
, module Network.AWS.Data.Internal.Path
-- ** Query
, module Network.AWS.Data.Internal.Query
-- ** URI
, module Network.AWS.Data.Internal.URI
-- * XML
, module Network.AWS.Data.Internal.XML
-- * JSON
, module Network.AWS.Data.Internal.JSON
-- * Collections
, module Network.AWS.Data.Internal.List
, module Network.AWS.Data.Internal.Map
) where
import Network.AWS.Data.Internal.Base64
import Network.AWS.Data.Internal.Body
import Network.AWS.Data.Internal.ByteString
import Network.AWS.Data.Internal.Header
import Network.AWS.Data.Internal.JSON
import Network.AWS.Data.Internal.List
import Network.AWS.Data.Internal.Map
import Network.AWS.Data.Internal.Numeric
import Network.AWS.Data.Internal.Path
import Network.AWS.Data.Internal.Query
import Network.AWS.Data.Internal.Sensitive
import Network.AWS.Data.Internal.Text
import Network.AWS.Data.Internal.Time
import Network.AWS.Data.Internal.URI
import Network.AWS.Data.Internal.XML
| romanb/amazonka | core/src/Network/AWS/Data.hs | mpl-2.0 | 2,135 | 0 | 5 | 404 | 289 | 226 | 63 | 32 | 0 |
{----------------------------------------------------------------------------
Abstract syntax of JOOS, based on:
David A. Watt. JOOS action semantics. Version 1, available from
http://www.dcs.gla.ac.uk/~daw/publications/JOOS.ps, October 1997.
Modifications:
o StatFocus
o StringLiterals
----------------------------------------------------------------------------}
module Datatypes where
import TermRep
import Monad
data Assignment = Assignment Identifier Expression
deriving (Eq, Show)
data InstanceCreation = InstanceCreation Identifier Arguments
deriving (Eq, Show)
data MethodInvocation = ExpressionInvocation Expression Identifier Arguments
| SuperInvocation Identifier Arguments
deriving (Eq, Show)
data Arguments = Arguments [Expression]
deriving (Eq, Show)
data Expression = Literal Literal
| Identifier Identifier
| This
| PrefixExpr PrefixOperator Expression
| InfixExpr Expression InfixOperator Expression
| AndOrExpr Expression AndOr Expression
| InstanceOf Expression Identifier
| TypeCast Type Expression
| BracketExpr Expression
| AssignmentExpr Assignment
| InstanceCreationExpr InstanceCreation
| MethodInvocationExpr MethodInvocation
deriving (Eq, Show)
data AndOr = AND | OR
deriving (Eq, Show)
data PrefixOperator = Neg | Fac
deriving (Eq, Show)
data InfixOperator = Eq | NEQ | Lt | Gt | LEQ | GEQ
| PLUS | MINUS | MUL | DIV | MOD
deriving (Eq, Show)
data Literal = BooleanLit BooleanLiteral
| IntegerLit IntegerLiteral
| Null
| StringLit StringLiteral
deriving (Eq, Show)
data BooleanLiteral = TRUE | FALSE
deriving (Eq, Show)
type IntegerLiteral = Integer
type StringLiteral = String
type Identifier = String
data BlockStatements = BlockStatements [VariableDeclaration] [Statement]
deriving (Eq, Show)
data Statement = Skip
| Block BlockStatements
| AssignmentStat Assignment
| InstanceCreationStat InstanceCreation
| MethodInvocationStat MethodInvocation
| ReturnStat (Maybe Expression)
| IfStat Expression Statement Statement
| WhileStat Expression Statement
--- Additions
| StatFocus Statement
deriving (Eq, Show)
data ClassDeclaration = ClassDecl FinalOpt Identifier Identifier
[FieldDeclaration]
ConstructorDeclaration
[MethodDeclaration]
deriving (Eq, Show)
type FinalOpt = Bool
data FieldDeclaration = FieldDecl Type Identifier
deriving (Eq, Show)
data ConstructorDeclaration
= ConstructorDecl Identifier FormalParameters
Arguments BlockStatements
deriving (Eq, Show)
data MethodDeclaration = MethodDecl (Maybe Type) Identifier FormalParameters
BlockStatements
deriving (Eq, Show)
data FormalParameters = FormalParams [FormalParameter]
deriving (Eq, Show)
data FormalParameter = FormalParam Type Identifier
deriving (Eq, Show)
data VariableDeclaration
= VariableDecl Type Identifier
deriving (Eq, Show)
data Type = INT | BOOLEAN | Type Identifier
deriving (Eq, Show)
| forste/haReFork | StrategyLib-4.0-beta/examples/joos-padl02/Datatypes.hs | bsd-3-clause | 3,590 | 50 | 8 | 1,129 | 722 | 410 | 312 | 78 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Stack.Options
(Command(..)
,benchOptsParser
,buildOptsParser
,configOptsParser
,dockerOptsParser
,dockerCleanupOptsParser
,dotOptsParser
,execOptsParser
,globalOptsParser
,initOptsParser
,newOptsParser
,logLevelOptsParser
,abstractResolverOptsParser
,solverOptsParser
,testOptsParser
) where
import Control.Monad.Logger (LogLevel(..))
import Data.Char (isSpace, toLower)
import Data.List.Split (splitOn)
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Monoid
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Text.Read (decimal)
import Options.Applicative.Args
import Options.Applicative.Builder.Extra
import Options.Applicative.Simple
import Options.Applicative.Types (readerAsk)
import Stack.Build.Types
import Stack.Docker
import qualified Stack.Docker as Docker
import Stack.Dot
import Stack.Init
import Stack.New (NewOpts(..))
import Stack.Types
-- | Command sum type for conditional arguments.
data Command
= Build
| Test
| Haddock
| Bench
deriving (Eq)
-- | Parser for bench arguments.
benchOptsParser :: Parser BenchmarkOpts
benchOptsParser = BenchmarkOpts
<$> optional (strOption (long "benchmark-arguments" <>
metavar "BENCH_ARGS" <>
help ("Forward BENCH_ARGS to the benchmark suite. " <>
"Supports templates from `cabal bench`")))
-- | Parser for build arguments.
buildOptsParser :: Command
-> Bool -- ^ default copy-bins value
-> Parser BuildOpts
buildOptsParser cmd defCopyBins =
BuildOpts <$> target <*> libProfiling <*> exeProfiling <*>
optimize <*> haddock <*> haddockDeps <*> finalAction <*> dryRun <*> ghcOpts <*>
flags <*> copyBins <*> preFetch <*> onlySnapshot <*>
fileWatch' <*> keepGoing <*> forceDirty
where optimize =
maybeBoolFlags "optimizations" "optimizations for TARGETs and all its dependencies" idm
target =
fmap (map T.pack)
(many (strArgument
(metavar "TARGET" <>
help "If none specified, use all packages")))
libProfiling =
boolFlags False
"library-profiling"
"library profiling for TARGETs and all its dependencies"
idm
exeProfiling =
boolFlags False
"executable-profiling"
"library profiling for TARGETs and all its dependencies"
idm
haddock =
boolFlags (cmd == Haddock)
"haddock"
"building Haddocks"
idm
haddockDeps =
if cmd == Haddock
then maybeBoolFlags
"haddock-deps"
"building Haddocks for dependencies"
idm
else pure Nothing
finalAction = pure DoNothing
copyBins = boolFlags defCopyBins
"copy-bins"
"copying binaries to the local-bin-path (see 'stack path')"
idm
dryRun = flag False True (long "dry-run" <>
help "Don't build anything, just prepare to")
ghcOpts = (++)
<$> flag [] ["-Wall", "-Werror"]
( long "pedantic"
<> help "Turn on -Wall and -Werror (note: option name may change in the future"
)
<*> many (fmap T.pack
(strOption (long "ghc-options" <>
metavar "OPTION" <>
help "Additional options passed to GHC")))
flags =
fmap (Map.unionsWith Map.union) $ many
(option readFlag
( long "flag"
<> metavar "PACKAGE:[-]FLAG"
<> help "Override flags set in stack.yaml (applies to local packages and extra-deps)"
))
preFetch = flag False True
(long "prefetch" <>
help "Fetch packages necessary for the build immediately, useful with --dry-run")
onlySnapshot = flag False True
(long "only-snapshot" <>
help "Only build packages for the snapshot database, not the local database")
fileWatch' = flag False True
(long "file-watch" <>
help "Watch for changes in local files and automatically rebuild")
keepGoing = maybeBoolFlags
"keep-going"
"continue running after a step fails (default: false for build, true for test/bench)"
idm
forceDirty = flag False True
(long "force-dirty" <>
help "Force treating all local packages as having dirty files (useful for cases where stack can't detect a file change)")
-- | Parser for package:[-]flag
readFlag :: ReadM (Map (Maybe PackageName) (Map FlagName Bool))
readFlag = do
s <- readerAsk
case break (== ':') s of
(pn, ':':mflag) -> do
pn' <-
case parsePackageNameFromString pn of
Nothing
| pn == "*" -> return Nothing
| otherwise -> readerError $ "Invalid package name: " ++ pn
Just x -> return $ Just x
let (b, flagS) =
case mflag of
'-':x -> (False, x)
_ -> (True, mflag)
flagN <-
case parseFlagNameFromString flagS of
Nothing -> readerError $ "Invalid flag name: " ++ flagS
Just x -> return x
return $ Map.singleton pn' $ Map.singleton flagN b
_ -> readerError "Must have a colon"
-- | Command-line arguments parser for configuration.
configOptsParser :: Bool -> Parser ConfigMonoid
configOptsParser docker =
(\opts systemGHC installGHC arch os jobs includes libs skipGHCCheck skipMsys localBin -> mempty
{ configMonoidDockerOpts = opts
, configMonoidSystemGHC = systemGHC
, configMonoidInstallGHC = installGHC
, configMonoidSkipGHCCheck = skipGHCCheck
, configMonoidArch = arch
, configMonoidOS = os
, configMonoidJobs = jobs
, configMonoidExtraIncludeDirs = includes
, configMonoidExtraLibDirs = libs
, configMonoidSkipMsys = skipMsys
, configMonoidLocalBinPath = localBin
})
<$> dockerOptsParser docker
<*> maybeBoolFlags
"system-ghc"
"using the system installed GHC (on the PATH) if available and a matching version"
idm
<*> maybeBoolFlags
"install-ghc"
"downloading and installing GHC if necessary (can be done manually with stack setup)"
idm
<*> optional (strOption
( long "arch"
<> metavar "ARCH"
<> help "System architecture, e.g. i386, x86_64"
))
<*> optional (strOption
( long "os"
<> metavar "OS"
<> help "Operating system, e.g. linux, windows"
))
<*> optional (option auto
( long "jobs"
<> short 'j'
<> metavar "JOBS"
<> help "Number of concurrent jobs to run"
))
<*> fmap (Set.fromList . map T.pack) (many $ strOption
( long "extra-include-dirs"
<> metavar "DIR"
<> help "Extra directories to check for C header files"
))
<*> fmap (Set.fromList . map T.pack) (many $ strOption
( long "extra-lib-dirs"
<> metavar "DIR"
<> help "Extra directories to check for libraries"
))
<*> maybeBoolFlags
"skip-ghc-check"
"skipping the GHC version and architecture check"
idm
<*> maybeBoolFlags
"skip-msys"
"skipping the local MSYS installation (Windows only)"
idm
<*> optional (strOption
( long "local-bin-path"
<> metavar "DIR"
<> help "Install binaries to DIR"
))
-- | Options parser configuration for Docker.
dockerOptsParser :: Bool -> Parser DockerOptsMonoid
dockerOptsParser showOptions =
DockerOptsMonoid
<$> pure Nothing
<*> maybeBoolFlags dockerCmdName
"using a Docker container"
hide
<*> ((Just . DockerMonoidRepo) <$> option str (long (dockerOptName dockerRepoArgName) <>
hide <>
metavar "NAME" <>
help "Docker repository name") <|>
(Just . DockerMonoidImage) <$> option str (long (dockerOptName dockerImageArgName) <>
hide <>
metavar "IMAGE" <>
help "Exact Docker image ID (overrides docker-repo)") <|>
pure Nothing)
<*> maybeBoolFlags (dockerOptName dockerRegistryLoginArgName)
"registry requires login"
hide
<*> maybeStrOption (long (dockerOptName dockerRegistryUsernameArgName) <>
hide <>
metavar "USERNAME" <>
help "Docker registry username")
<*> maybeStrOption (long (dockerOptName dockerRegistryPasswordArgName) <>
hide <>
metavar "PASSWORD" <>
help "Docker registry password")
<*> maybeBoolFlags (dockerOptName dockerAutoPullArgName)
"automatic pulling latest version of image"
hide
<*> maybeBoolFlags (dockerOptName dockerDetachArgName)
"running a detached Docker container"
hide
<*> maybeBoolFlags (dockerOptName dockerPersistArgName)
"not deleting container after it exits"
hide
<*> maybeStrOption (long (dockerOptName dockerContainerNameArgName) <>
hide <>
metavar "NAME" <>
help "Docker container name")
<*> argsOption (long (dockerOptName dockerRunArgsArgName) <>
hide <>
value [] <>
metavar "'ARG1 [ARG2 ...]'" <>
help "Additional options to pass to 'docker run'")
<*> many (option auto (long (dockerOptName dockerMountArgName) <>
hide <>
metavar "(PATH | HOST-PATH:CONTAINER-PATH)" <>
help ("Mount volumes from host in container " ++
"(may specify mutliple times)")))
<*> maybeStrOption (long (dockerOptName dockerDatabasePathArgName) <>
hide <>
metavar "PATH" <>
help "Location of image usage tracking database")
where
dockerOptName optName = dockerCmdName ++ "-" ++ T.unpack optName
maybeStrOption = optional . option str
hide = if showOptions
then idm
else internal <> hidden
-- | Parser for docker cleanup arguments.
dockerCleanupOptsParser :: Parser Docker.CleanupOpts
dockerCleanupOptsParser =
Docker.CleanupOpts <$>
(flag' Docker.CleanupInteractive
(short 'i' <>
long "interactive" <>
help "Show cleanup plan in editor and allow changes (default)") <|>
flag' Docker.CleanupImmediate
(short 'y' <>
long "immediate" <>
help "Immediately execute cleanup plan") <|>
flag' Docker.CleanupDryRun
(short 'n' <>
long "dry-run" <>
help "Display cleanup plan but do not execute") <|>
pure Docker.CleanupInteractive) <*>
opt (Just 14) "known-images" "LAST-USED" <*>
opt Nothing "unknown-images" "CREATED" <*>
opt (Just 0) "dangling-images" "CREATED" <*>
opt Nothing "stopped-containers" "CREATED" <*>
opt Nothing "running-containers" "CREATED"
where opt def' name mv =
fmap Just
(option auto
(long name <>
metavar (mv ++ "-DAYS-AGO") <>
help ("Remove " ++
toDescr name ++
" " ++
map toLower (toDescr mv) ++
" N days ago" ++
case def' of
Just n -> " (default " ++ show n ++ ")"
Nothing -> ""))) <|>
flag' Nothing
(long ("no-" ++ name) <>
help ("Do not remove " ++
toDescr name ++
case def' of
Just _ -> ""
Nothing -> " (default)")) <|>
pure def'
toDescr = map (\c -> if c == '-' then ' ' else c)
-- | Parser for arguments to `stack dot`
dotOptsParser :: Parser DotOpts
dotOptsParser = DotOpts
<$> includeExternal
<*> includeBase
<*> depthLimit
<*> fmap (maybe Set.empty Set.fromList . fmap splitNames) prunedPkgs
where includeExternal = boolFlags False
"external"
"inclusion of external dependencies"
idm
includeBase = boolFlags True
"include-base"
"inclusion of dependencies on base"
idm
depthLimit =
optional (option auto
(long "depth" <>
metavar "DEPTH" <>
help ("Limit the depth of dependency resolution " <>
"(Default: No limit)")))
prunedPkgs = optional (strOption
(long "prune" <>
metavar "PACKAGES" <>
help ("Prune each package name " <>
"from the comma separated list " <>
"of package names PACKAGES")))
splitNames :: String -> [String]
splitNames = map (takeWhile (not . isSpace) . dropWhile isSpace) . splitOn ","
-- | Parser for exec command
execOptsParser :: Maybe String -- ^ command
-> Parser ExecOpts
execOptsParser mcmd =
ExecOpts
<$> maybe eoCmdParser pure mcmd
<*> eoArgsParser
<*> (eoPlainParser <|>
ExecOptsEmbellished
<$> eoEnvSettingsParser
<*> eoPackagesParser)
where
eoCmdParser :: Parser String
eoCmdParser = strArgument (metavar "CMD")
eoArgsParser :: Parser [String]
eoArgsParser = many (strArgument (metavar "-- ARGS (e.g. stack ghc -- X.hs -o x)"))
eoEnvSettingsParser :: Parser EnvSettings
eoEnvSettingsParser = EnvSettings
<$> pure True
<*> boolFlags True
"ghc-package-path"
"setting the GHC_PACKAGE_PATH variable for the subprocess"
idm
<*> boolFlags True
"stack-exe"
"setting the STACK_EXE environment variable to the path for the stack executable"
idm
eoPackagesParser :: Parser [String]
eoPackagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
eoPlainParser :: Parser ExecOptsExtra
eoPlainParser = flag' ExecOptsPlain
(long "plain" <>
help "Use an unmodified environment (only useful with Docker)")
-- | Parser for global command-line options.
globalOptsParser :: Bool -> Parser GlobalOpts
globalOptsParser defaultTerminal =
GlobalOpts <$> logLevelOptsParser <*>
configOptsParser False <*>
optional abstractResolverOptsParser <*>
flag
defaultTerminal
False
(long "no-terminal" <>
help
"Override terminal detection in the case of running in a false terminal") <*>
(optional (strOption
(long "stack-yaml" <>
metavar "STACK-YAML" <>
help "Override project stack.yaml file (overrides any STACK_YAML environment variable)")))
initOptsParser :: Parser InitOpts
initOptsParser =
InitOpts <$> method <*> overwrite <*> fmap not ignoreSubDirs
where
ignoreSubDirs = flag False
True
(long "ignore-subdirs" <>
help "Do not search for .cabal files in sub directories")
overwrite = flag False
True
(long "force" <>
help "Force overwriting of an existing stack.yaml if it exists")
method = solver
<|> (MethodResolver <$> resolver)
<|> (MethodSnapshot <$> snapPref)
solver =
flag' MethodSolver
(long "solver" <>
help "Use a dependency solver to determine dependencies")
snapPref =
flag' PrefLTS
(long "prefer-lts" <>
help "Prefer LTS snapshots over Nightly snapshots") <|>
flag' PrefNightly
(long "prefer-nightly" <>
help "Prefer Nightly snapshots over LTS snapshots") <|>
pure PrefNone
resolver = option readAbstractResolver
(long "resolver" <>
metavar "RESOLVER" <>
help "Use the given resolver, even if not all dependencies are met")
-- | Parse for a logging level.
logLevelOptsParser :: Parser LogLevel
logLevelOptsParser =
fmap parse
(strOption (long "verbosity" <>
metavar "VERBOSITY" <>
help "Verbosity: silent, error, warn, info, debug")) <|>
flag defaultLogLevel
verboseLevel
(short 'v' <> long "verbose" <>
help ("Enable verbose mode: verbosity level \"" <> showLevel verboseLevel <> "\""))
where verboseLevel = LevelDebug
showLevel l =
case l of
LevelDebug -> "debug"
LevelInfo -> "info"
LevelWarn -> "warn"
LevelError -> "error"
LevelOther x -> T.unpack x
parse s =
case s of
"debug" -> LevelDebug
"info" -> LevelInfo
"warn" -> LevelWarn
"error" -> LevelError
_ -> LevelOther (T.pack s)
-- | Parser for the resolver
abstractResolverOptsParser :: Parser AbstractResolver
abstractResolverOptsParser =
option readAbstractResolver
(long "resolver" <>
metavar "RESOLVER" <>
help "Override resolver in project file")
readAbstractResolver :: ReadM AbstractResolver
readAbstractResolver = do
s <- readerAsk
case s of
"global" -> return ARGlobal
"nightly" -> return ARLatestNightly
"lts" -> return ARLatestLTS
'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x ->
return $ ARLatestLTSMajor x'
_ ->
case parseResolverText $ T.pack s of
Left e -> readerError $ show e
Right x -> return $ ARResolver x
-- | Parser for @solverCmd@
solverOptsParser :: Parser Bool
solverOptsParser = boolFlags False
"modify-stack-yaml"
"Automatically modify stack.yaml with the solver's recommendations"
idm
-- | Parser for test arguments.
testOptsParser :: Parser TestOpts
testOptsParser = TestOpts
<$> boolFlags True
"rerun-tests"
"running already successful tests"
idm
<*> fmap (fromMaybe [])
(optional (argsOption(long "test-arguments" <>
metavar "TEST_ARGS" <>
help "Arguments passed in to the test suite program")))
<*> flag False
True
(long "coverage" <>
help "Generate a code coverage report")
<*> flag False
True
(long "no-run-tests" <>
help "Disable running of tests. (Tests will still be built.)")
newOptsParser :: Parser NewOpts
newOptsParser =
NewOpts <$> templateRepositoryParser
<*> optional templateParser
<*> many templateArgParser
<*> initOptsParser
where
templateRepositoryParser = strOption
$ long "template-url-base"
<> metavar "URL"
<> value "raw.githubusercontent.com/commercialhaskell/stack-templates/master/"
-- TODO(DanBurton): reject argument if it has a colon.
templateParser = strArgument $ metavar "TEMPLATE"
-- TODO(DanBurton): reject argument if it doesn't have a colon.
templateArgParser = strArgument $ metavar "ARG:VAL"
| wskplho/stack | src/Stack/Options.hs | bsd-3-clause | 21,355 | 0 | 28 | 8,236 | 3,825 | 1,907 | 1,918 | 486 | 9 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE MultiParamTypeClasses, Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Groups.Examples
-- Copyright : Quentin Moser <moserq@gmail.com>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : orphaned
-- Stability : unstable
-- Portability : unportable
--
-- Example layouts for "XMonad.Layout.Groups".
--
-----------------------------------------------------------------------------
module XMonad.Layout.Groups.Examples ( -- * Usage
-- $usage
-- * Example: Row of columns
-- $example1
rowOfColumns
, zoomColumnIn
, zoomColumnOut
, zoomColumnReset
, toggleColumnFull
, zoomWindowIn
, zoomWindowOut
, zoomWindowReset
, toggleWindowFull
-- * Example: Tiled tab groups
-- $example2
, tallTabs
, mirrorTallTabs
, fullTabs
, TiledTabsConfig(..)
, defaultTiledTabsConfig
, increaseNMasterGroups
, decreaseNMasterGroups
, shrinkMasterGroups
, expandMasterGroups
, nextOuterLayout
-- * Useful re-exports and utils
, module XMonad.Layout.Groups.Helpers
, shrinkText
, defaultTheme
, GroupEQ(..)
, zoomRowG
) where
import XMonad hiding ((|||))
import qualified XMonad.Layout.Groups as G
import XMonad.Layout.Groups.Helpers
import XMonad.Layout.ZoomRow
import XMonad.Layout.Tabbed
import XMonad.Layout.Named
import XMonad.Layout.Renamed
import XMonad.Layout.LayoutCombinators
import XMonad.Layout.Decoration
import XMonad.Layout.Simplest
-- $usage
-- This module contains example 'G.Groups'-based layouts.
-- You can either import this module directly, or look at its source
-- for ideas of how "XMonad.Layout.Groups" may be used.
--
-- You can use the contents of this module by adding
--
-- > import XMonad.Layout.Groups.Examples
--
-- to the top of your @.\/.xmonad\/xmonad.hs@.
--
-- For more information on using any of the layouts, jump directly
-- to its \"Example\" section.
--
-- Whichever layout you choose to use, you will probably want to be
-- able to move focus and windows between groups in a consistent
-- manner. For this, you should take a look at the functions from
-- the "XMonad.Layout.Groups.Helpers" module, which are all
-- re-exported by this module.
--
-- For more information on how to extend your layour hook and key bindings, see
-- "XMonad.Doc.Extending".
-- * Helper: ZoomRow of Group elements
-- | Compare two 'Group's by comparing the ids of their layouts.
data GroupEQ a = GroupEQ
deriving (Show, Read)
instance Eq a => EQF GroupEQ (G.Group l a) where
eq _ (G.G l1 _) (G.G l2 _) = G.sameID l1 l2
zoomRowG :: (Eq a, Show a, Read a, Show (l a), Read (l a))
=> ZoomRow GroupEQ (G.Group l a)
zoomRowG = zoomRowWith GroupEQ
-- * Example 1: Row of columns
-- $example1
-- A layout that arranges windows in a row of columns. It uses 'ZoomRow's for
-- both, allowing you to:
--
-- * Freely change the proportion of the screen width allocated to each column
--
-- * Freely change the proportion of a column's heigth allocated to each of its windows
--
-- * Set a column to occupy the whole screen space whenever it has focus
--
-- * Set a window to occupy its whole column whenever it has focus
--
-- to use this layout, add 'rowOfColumns' to your layout hook, for example:
--
-- > myLayout = rowOfColumns
--
-- To be able to change the sizes of columns and windows, you can create key bindings
-- for the relevant actions:
--
-- > ((modMask, xK_minus), zoomWindowOut)
--
-- and so on.
rowOfColumns = G.group column zoomRowG
where column = renamed [CutWordsLeft 2, PrependWords "ZoomColumn"] $ Mirror zoomRow
-- | Increase the width of the focused column
zoomColumnIn :: X ()
zoomColumnIn = sendMessage $ G.ToEnclosing $ SomeMessage $ zoomIn
-- | Decrease the width of the focused column
zoomColumnOut :: X ()
zoomColumnOut = sendMessage $ G.ToEnclosing $ SomeMessage $ zoomOut
-- | Reset the width of the focused column
zoomColumnReset :: X ()
zoomColumnReset = sendMessage $ G.ToEnclosing $ SomeMessage $ zoomReset
-- | Toggle whether the currently focused column should
-- take up all available space whenever it has focus
toggleColumnFull :: X ()
toggleColumnFull = sendMessage $ G.ToEnclosing $ SomeMessage $ ZoomFullToggle
-- | Increase the heigth of the focused window
zoomWindowIn :: X ()
zoomWindowIn = sendMessage zoomIn
-- | Decrease the height of the focused window
zoomWindowOut :: X ()
zoomWindowOut = sendMessage zoomOut
-- | Reset the height of the focused window
zoomWindowReset :: X ()
zoomWindowReset = sendMessage zoomReset
-- | Toggle whether the currently focused window should
-- take up the whole column whenever it has focus
toggleWindowFull :: X ()
toggleWindowFull = sendMessage ZoomFullToggle
-- * Example 2: Tabbed groups in a Tall/Full layout.
-- $example2
-- A layout which arranges windows into tabbed groups, and the groups
-- themselves according to XMonad's default algorithm
-- (@'Tall' ||| 'Mirror' 'Tall' ||| 'Full'@). As their names
-- indicate, 'tallTabs' starts as 'Tall', 'mirrorTallTabs' starts
-- as 'Mirror' 'Tall' and 'fullTabs' starts as 'Full', but in any
-- case you can freely switch between the three afterwards.
--
-- You can use any of these three layouts by including it in your layout hook.
-- You will need to provide it with a 'TiledTabsConfig' containing the size
-- parameters for 'Tall' and 'Mirror' 'Tall', and the shrinker and decoration theme
-- for the tabs. If you're happy with defaults, you can use 'defaultTiledTabsConfig':
--
-- > myLayout = tallTabs defaultTiledTabsConfig
--
-- To be able to increase\/decrease the number of master groups and shrink\/expand
-- the master area, you can create key bindings for the relevant actions:
--
-- > ((modMask, xK_h), shrinkMasterGroups)
--
-- and so on.
-- | Configuration data for the "tiled tab groups" layout
data TiledTabsConfig s = TTC { vNMaster :: Int
, vRatio :: Rational
, vIncrement :: Rational
, hNMaster :: Int
, hRatio :: Rational
, hIncrement :: Rational
, tabsShrinker :: s
, tabsTheme :: Theme }
defaultTiledTabsConfig :: TiledTabsConfig DefaultShrinker
defaultTiledTabsConfig = TTC 1 0.5 (3/100) 1 0.5 (3/100) shrinkText defaultTheme
fullTabs c = _tab c $ G.group _tabs $ Full ||| _vert c ||| _horiz c
tallTabs c = _tab c $ G.group _tabs $ _vert c ||| _horiz c ||| Full
mirrorTallTabs c = _tab c $ G.group _tabs $ _horiz c ||| Full ||| _vert c
_tabs = named "Tabs" Simplest
_tab c l = renamed [CutWordsLeft 1] $ addTabs (tabsShrinker c) (tabsTheme c) l
_vert c = named "Vertical" $ Tall (vNMaster c) (vIncrement c) (vRatio c)
_horiz c = named "Horizontal" $ Mirror $ Tall (hNMaster c) (hIncrement c) (hRatio c)
-- | Increase the number of master groups by one
increaseNMasterGroups :: X ()
increaseNMasterGroups = sendMessage $ G.ToEnclosing $ SomeMessage $ IncMasterN 1
-- | Decrease the number of master groups by one
decreaseNMasterGroups :: X ()
decreaseNMasterGroups = sendMessage $ G.ToEnclosing $ SomeMessage $ IncMasterN (-1)
-- | Shrink the master area
shrinkMasterGroups :: X ()
shrinkMasterGroups = sendMessage $ G.ToEnclosing $ SomeMessage $ Shrink
-- | Expand the master area
expandMasterGroups :: X ()
expandMasterGroups = sendMessage $ G.ToEnclosing $ SomeMessage $ Expand
-- | Rotate the available outer layout algorithms
nextOuterLayout :: X ()
nextOuterLayout = sendMessage $ G.ToEnclosing $ SomeMessage $ NextLayout
| MasseR/xmonadcontrib | XMonad/Layout/Groups/Examples.hs | bsd-3-clause | 8,792 | 0 | 10 | 2,622 | 1,166 | 673 | 493 | 89 | 1 |
-- Example.hs -- Examples from HUnit user's guide
--
-- For more examples, check out the tests directory. It contains unit tests
-- for HUnit.
module Main where
import Test.HUnit
foo :: Int -> (Int, Int)
foo x = (1, x)
partA :: Int -> IO (Int, Int)
partA v = return (v+2, v+3)
partB :: Int -> IO Bool
partB v = return (v > 5)
test1 :: Test
test1 = TestCase (assertEqual "for (foo 3)," (1,2) (foo 3))
test2 :: Test
test2 = TestCase (do (x,y) <- partA 3
assertEqual "for the first result of partA," 5 x
b <- partB y
assertBool ("(partB " ++ show y ++ ") failed") b)
tests :: Test
tests = TestList [TestLabel "test1" test1, TestLabel "test2" test2]
tests' :: Test
tests' = test [ "test1" ~: "(foo 3)" ~: (1,2) ~=? (foo 3),
"test2" ~: do (x, y) <- partA 3
assertEqual "for the first result of partA," 5 x
partB y @? "(partB " ++ show y ++ ") failed" ]
main :: IO Counts
main = do runTestTT tests
runTestTT tests'
| Numberartificial/workflow | snipets/.stack-work/install/x86_64-osx/lts-8.12/8.0.2/share/x86_64-osx-ghc-8.0.2/HUnit-1.2.5.2/examples/Example.hs | mit | 1,073 | 0 | 13 | 346 | 372 | 192 | 180 | 25 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>OAST Support Add-on</title>
<maps>
<homeID>oast</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/oast/src/main/javahelp/org/zaproxy/addon/oast/resources/help_id_ID/helpset_id_ID.hs | apache-2.0 | 965 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.WorkspaceCursors
-- Copyright : (c) 2009 Adam Vogt <vogt.adam@gmail.com>
-- License : BSD
--
-- Maintainer : Adam Vogt
-- Stability : unstable
-- Portability : unportable
--
-- Like "XMonad.Actions.Plane" for an arbitrary number of dimensions.
-----------------------------------------------------------------------------
module XMonad.Actions.WorkspaceCursors
(
-- * Usage
-- $usage
focusDepth
,makeCursors
,toList
,workspaceCursors
,WorkspaceCursors
,getFocus
-- * Modifying the focus
,modifyLayer
,modifyLayer'
,shiftModifyLayer,shiftLayer
-- * Functions to pass to 'modifyLayer'
,focusNth'
,noWrapUp,noWrapDown,
-- * Todo
-- $todo
-- * Types
Cursors,
) where
import qualified XMonad.StackSet as W
import XMonad.Actions.FocusNth(focusNth')
import XMonad.Layout.LayoutModifier(ModifiedLayout(..),
LayoutModifier(handleMess, redoLayout))
import XMonad(Typeable, Message, WorkspaceId, X, XState(windowset),
fromMessage, sendMessage, windows, gets)
import Control.Monad((<=<), guard, liftM, liftM2, when)
import Control.Applicative((<$>))
import Data.Foldable(Foldable(foldMap), toList)
import Data.Maybe(fromJust, listToMaybe)
import Data.Monoid(Monoid(mappend, mconcat))
import Data.Traversable(sequenceA)
-- $usage
--
-- Here is an example config:
--
-- > import XMonad
-- > import XMonad.Actions.WorkspaceCursors
-- > import XMonad.Hooks.DynamicLog
-- > import XMonad.Util.EZConfig
-- > import qualified XMonad.StackSet as W
-- >
-- > main = do
-- > x <- xmobar conf
-- > xmonad x
-- >
-- > conf = additionalKeysP defaultConfig
-- > { layoutHook = workspaceCursors myCursors $ layoutHook defaultConfig
-- > , workspaces = toList myCursors } $
-- > [("M-"++shift++control++[k], f direction depth)
-- > | (f,shift) <- zip [modifyLayer,shiftModifyLayer] ["","S-"]
-- > , (direction,control) <- zip [W.focusUp',W.focusDown'] ["C-",""]
-- > , (depth,k) <- zip (reverse [1..focusDepth myCursors]) "asdf"]
-- > ++ moreKeybindings
-- >
-- > moreKeybindings = []
-- >
-- > myCursors = makeCursors $ map (map (\x -> [x])) [ "1234", "abc", "xyz"]
-- > -- myCursors = makeCursors [["wsA","wsB","wsC"],["-alpha-","-beta-","-gamma-"],["x","y"]]
-- $todo
--
-- * Find and document how to raise the allowable length of arguments:
-- restoring xmonad's state results in: @xmonad: executeFile: resource
-- exhausted (Argument list too long)@ when you specify more than about 50
-- workspaces. Or change it such that workspaces are created when you try to
-- view it.
--
-- * Function for pretty printing for DynamicLog that groups workspaces by
-- common prefixes
--
-- * Examples of adding workspaces to the cursors, having them appear multiple
-- times for being able to show jumping to some n'th multiple workspace
-- | makeCursors requires a nonempty string, and each sublist must be nonempty
makeCursors :: [[String]] -> Cursors String
makeCursors [] = error "Workspace Cursors cannot be empty"
makeCursors a = concat . reverse <$> foldl addDim x xs
where x = end $ map return $ head a
xs = map (map return) $ tail a
-- this could probably be simplified, but this true:
-- toList . makeCursors == map (concat . reverse) . sequence . reverse . map (map (:[]))
-- the strange order is used because it makes the regular M-1..9
-- bindings change the prefixes first
addDim :: (Monoid a) => Cursors a -> [a] -> Cursors a
addDim prev prefixes = Cons . fromJust . W.differentiate
$ map ((<$> prev) . mappend) prefixes
end :: [a] -> Cursors a
end = Cons . fromJust . W.differentiate . map End
data Cursors a
= Cons (W.Stack (Cursors a))
| End a deriving (Eq,Show,Read,Typeable)
instance Foldable Cursors where
foldMap f (End x) = f x
foldMap f (Cons (W.Stack x y z)) = foldMap f x `mappend` mconcat (map (foldMap f) $ reverse y ++ z)
instance Functor Cursors where
fmap f (End a) = End $ f a
fmap f (Cons (W.Stack x y z)) = Cons $ W.Stack (fmap f x) (fmap (fmap f) y) (fmap (fmap f) z)
changeFocus :: (Cursors t -> Bool) -> Cursors t -> [Cursors t]
changeFocus p (Cons x) = do
choose <- chFocus p x
foc <- changeFocus p $ W.focus choose
return . Cons $ choose { W.focus = foc }
changeFocus p x = guard (p x) >> return x
chFocus :: (a -> Bool) -> W.Stack a -> [W.Stack a]
chFocus p st = filter (p . W.focus) $ zipWith const (iterate W.focusDown' st) (W.integrate st)
getFocus :: Cursors b -> b
getFocus (Cons x) = getFocus $ W.focus x
getFocus (End x) = x
-- This could be made more efficient, if the fact that the suffixes are grouped
focusTo :: (Eq t) => t -> Cursors t -> Maybe (Cursors t)
focusTo x = listToMaybe . filter ((x==) . getFocus) . changeFocus (const True)
-- | non-wrapping version of 'W.focusUp''
noWrapUp :: W.Stack t -> W.Stack t
noWrapUp (W.Stack t (l:ls) rs) = W.Stack l ls (t:rs)
noWrapUp x@(W.Stack _ [] _ ) = x
-- | non-wrapping version of 'W.focusDown''
noWrapDown :: W.Stack t -> W.Stack t
noWrapDown = reverseStack . noWrapUp . reverseStack
where reverseStack (W.Stack t ls rs) = W.Stack t rs ls
focusDepth :: Cursors t -> Int
focusDepth (Cons x) = 1 + focusDepth (W.focus x)
focusDepth (End _) = 0
descend :: Monad m =>(W.Stack (Cursors a) -> m (W.Stack (Cursors a)))-> Int-> Cursors a-> m (Cursors a)
descend f 1 (Cons x) = Cons `liftM` f x
descend f n (Cons x) | n > 1 = liftM Cons $ descend f (pred n) `onFocus` x
descend _ _ x = return x
onFocus :: (Monad m) => (a1 -> m a1) -> W.Stack a1 -> m (W.Stack a1)
onFocus f st = (\x -> st { W.focus = x}) `liftM` f (W.focus st)
-- | @modifyLayer@ is used to change the focus at a given depth
modifyLayer :: (W.Stack (Cursors String) -> W.Stack (Cursors String)) -> Int -> X ()
modifyLayer f depth = modifyCursors (descend (return . f) depth)
-- | @shiftModifyLayer@ is the same as 'modifyLayer', but also shifts the
-- currently focused window to the new workspace
shiftModifyLayer :: (W.Stack (Cursors String) -> W.Stack (Cursors WorkspaceId))-> Int-> X ()
shiftModifyLayer f = modifyLayer' $ \st -> do
let st' = f st
windows $ W.shift $ getFocus (Cons st')
return st'
-- | @shiftLayer@ is the same as 'shiftModifyLayer', but the focus remains on
-- the current workspace.
shiftLayer :: (W.Stack (Cursors String) -> W.Stack (Cursors WorkspaceId))-> Int-> X ()
shiftLayer f = modifyLayer' $ \st -> do
windows $ W.shift $ getFocus $ Cons $ f st
return st
-- | example usages are 'shiftLayer' and 'shiftModifyLayer'
modifyLayer' :: (W.Stack (Cursors String) -> X (W.Stack (Cursors String))) -> Int -> X ()
modifyLayer' f depth = modifyCursors (descend f depth)
modifyCursors :: (Cursors String -> X (Cursors String)) -> X ()
modifyCursors = sendMessage . ChangeCursors . (liftM2 (>>) updateXMD return <=<)
data WorkspaceCursors a = WorkspaceCursors (Cursors String)
deriving (Typeable,Read,Show)
-- | The state is stored in the 'WorkspaceCursors' layout modifier. Put this as
-- your outermost modifier, unless you want different cursors at different
-- times (using "XMonad.Layout.MultiToggle")
workspaceCursors :: Cursors String -> l a -> ModifiedLayout WorkspaceCursors l a
workspaceCursors = ModifiedLayout . WorkspaceCursors
data ChangeCursors = ChangeCursors { unWrap :: Cursors String -> X (Cursors String) }
deriving (Typeable)
instance Message ChangeCursors
updateXMD :: Cursors WorkspaceId -> X ()
updateXMD cs = do
changed <- gets $ (getFocus cs /=) . W.currentTag . windowset
when changed $ windows $ W.greedyView $ getFocus cs
instance LayoutModifier WorkspaceCursors a where
redoLayout (WorkspaceCursors cs) _ _ arrs = do
cws <- gets $ W.currentTag . windowset
return (arrs,WorkspaceCursors <$> focusTo cws cs)
handleMess (WorkspaceCursors cs) m =
sequenceA $ fmap WorkspaceCursors . ($ cs) . unWrap <$> fromMessage m
| adinapoli/xmonad-contrib | XMonad/Actions/WorkspaceCursors.hs | bsd-3-clause | 8,231 | 0 | 13 | 1,701 | 2,250 | 1,192 | 1,058 | 106 | 1 |
module Grin.Lint(
lintCheckGrin,
typecheckGrin,
transformGrin,
dumpGrin
) where
import Control.Exception
import Control.Monad.Reader
import Data.Monoid
import System.IO
import qualified Data.Set as Set
import Doc.DocLike
import Grin.Grin
import Grin.Show
import Options
import Support.CanType
import Support.Compat
import Support.FreeVars
import Support.Transform
import Text.Printf
import Util.Gen
import Util.SetLike
import qualified FlagDump as FD
import qualified Stats
lintCheckGrin grin = when flint $ typecheckGrin grin
lintCheckGrin' onerr grin | flint = do
let env = TcEnv { envTyEnv = grinTypeEnv grin, envInScope = fromList (fsts $ grinCafs grin) }
let errs = [ (err ++ "\n" ++ render (prettyFun a) ) | (a,Left err) <- [ (a,runTc env (tcLam Nothing c)) | a@(_,c) <- grinFuncs grin ]]
if null errs then return () else do
onerr
putErrLn ">>> Type Errors"
mapM_ putErrLn errs
unless (null errs || optKeepGoing options) $ fail "There were type errors!"
lintCheckGrin' _ _ = return ()
typecheckGrin grin = do
let env = TcEnv { envTyEnv = grinTypeEnv grin, envInScope = fromList (fsts $ grinCafs grin) }
let errs = [ (err ++ "\n" ++ render (prettyFun a) ) | (a,Left err) <- [ (a,runTc env (tcLam Nothing c)) | a@(_,c) <- grinFuncs grin ]]
mapM_ putErrLn errs
unless (null errs || optKeepGoing options) $ fail "There were type errors!"
{-# NOINLINE dumpGrin #-}
dumpGrin pname grin = do
(argstring,sversion) <- getArgString
let fn ext action = do
let oname = outputName ++ "_" ++ pname ++ "." ++ ext
putErrLn $ "Writing: " ++ oname
h <- openFile oname WriteMode
action h
hClose h
fn "grin" $ \h -> do
hPutStrLn h $ unlines [ "-- " ++ argstring,"-- " ++ sversion,""]
hPrintGrin h grin
wdump FD.GrinDatalog $ fn "datalog" $ \h -> do
hPutStrLn h $ unlines [ "% " ++ argstring,"% " ++ sversion,""]
hPrintGrinDL h grin
wdump FD.Grin $ do
putErrLn $ "v-- " ++ pname ++ " Grin"
printGrin grin
putErrLn $ "^-- " ++ pname ++ " Grin"
class DShow a where
dshow :: a -> String
instance DShow String where
dshow s = '\'':f s where
f ('\'':rs) = "''" ++ f rs
f (x:xs) = x:f xs
f [] = "'"
instance DShow Tag where
dshow s = '\'':f (show s) where
f ('\'':rs) = "''" ++ f rs
f (x:xs) = x:f xs
f [] = "'"
instance DShow Var where
dshow v = dshow (show v)
instance DShow Ty where
dshow v = dshow $ show v
instance (DShow a,DShow b) => DShow (Either a b) where
dshow (Left x) = dshow x
dshow (Right x) = dshow x
funArg n i = show n ++ "@arg@" ++ show i
funRet n i = show n ++ "@ret@" ++ show i
printFunc h n (l :-> e) = do
hPrintf h "func(%s,%i).\n" (dshow n) (length l)
forM_ (zip naturals l) $ \ (i,Var v t) -> do
hPrintf h "perform(assign,%s,%s).\n" (dshow v) (dshow $ funArg n i)
hPrintf h "what(%s,funarg).\n" (dshow $ funArg n i)
hPrintf h "typeof(%s,%s).\n" (dshow $ funArg n i) (dshow t)
hPrintf h "typeof(%s,%s).\n" (dshow v) (dshow t)
let rts = getType e
lts = [ (t,funRet n i) | t <- rts | i <- naturals ]
mapM_ (hPrintf h "what(%s,funret).\n" . dshow) (snds lts)
mapM_ (\ (t,n) -> hPrintf h "typeof(%s,%s).\n" (dshow n) (dshow t)) lts
printDL h n (map (Left . snd) lts) e
hPrintGrinDL :: Handle -> Grin -> IO ()
hPrintGrinDL h grin = do
let cafs = grinCafs grin
when (not $ null cafs) $ do
hPutStrLn h "% cafs"
mapM_ (\ (x,y) -> hPrintf h "what(%s,'caf').\ntypeof(%s,inode).\n" (dshow x) (dshow x)) cafs
hPutStrLn h "% functions"
forM_ (grinFuncs grin) $ \ (n,l :-> e) -> printFunc h n (l :-> e)
bindUnknown h l r = do
mapM_ (\ (x,t) -> when (tyInteresting t) $ setUnknown h x r) (Set.toList $ freeVars l :: [(Var,Ty)])
setUnknown :: DShow a => Handle -> a -> String -> IO ()
setUnknown h x r = do hPrintf h "unknown(%s,%s).\n" (dshow x) (dshow r)
printDL h n fs e = f fs e where
f fs (x :>>= l :-> y) = do
f (map Right l) x
f fs y
f bs (Return vs) = do zipWithM_ (assign "assign") bs vs
-- f [Left b] (Store (NodeC n vs)) = hPrintf h "store(%s,%s,%s).\n" (dshow b) (dshow n) (if tagIsWHNF n then "true" else "false")
-- f [Right (Var b _)] (Store (NodeC n vs)) = hPrintf h "store(%s,%s,%s).\n" (dshow b) (dshow n) (if tagIsWHNF n then "true" else "false") >> app n vs
-- f [b] (Store x@Var {}) = do assign "demote" b x
f [b] (BaseOp Eval [x]) = do assign "eval" b x
f b (App fn as ty) = do
forM_ (zip naturals as) $ \ (i,a) -> do
assign "assign" (Left $ funArg fn i) a
forM_ (zip naturals b) $ \ (i,a) -> do
genAssign "assign" a (Left $ funRet fn i)
f b (Case v ls) = mapM_ (\l -> f b (Return [v] :>>= l)) ls
f b Let { expDefs = defs, expBody = body } = do
forM_ defs $ \d -> printFunc h (funcDefName d) (funcDefBody d)
forM_ defs $ \d -> hPrintf h "subfunc(%s,%s).\n" (dshow $ funcDefName d) (dshow n)
f b body
f b Error {} = return ()
f b Call { expValue = Item fn _, expArgs = as, expType = ty} = do
forM_ (zip naturals as) $ \ (i,a) -> do
assign "assign" (Left $ funArg fn i) a
forM_ (zip naturals b) $ \ (i,a) -> do
genAssign "assign" a (Left $ funRet fn i)
f bs e = do zipWithM_ (assign "assign") bs (map ValUnknown (getType e))
--app n as | Just (0,fn) <- tagUnfunction n = do
-- hPrintf h "lazyfunc(%s).\n" (dshow fn)
-- forM_ (zip naturals as) $ \ (i,a) -> do
-- assign "assign" (Left $ funArg fn i) a
--app _ _ = return ()
assign op b v = genAssign op b (Right v)
genAssign :: String -> Either String Val -> Either String Val -> IO ()
genAssign op (Left b) (Left l) = hPrintf h "perform(%s,%s,%s).\n" op (dshow b) (dshow l)
genAssign op (Right (Var v1 _)) (Left l) = hPrintf h "perform(%s,%s,%s).\n" op (dshow v1) (dshow l)
genAssign op (Left b) (Right (Var v _)) = hPrintf h "perform(%s,%s,%s).\n" op (dshow b) (dshow v)
genAssign op (Left b) (Right (Const {})) = hPrintf h "perform(%s,%s,%s).\n" op (dshow b) "const"
genAssign op (Right (Var v1 _)) (Right (Var v2 _)) = hPrintf h "perform(%s,%s,%s).\n" op (dshow v1) (dshow v2)
genAssign op (Left b) (Right v) = when (tyInteresting $ getType v) $ setUnknown h b (show (op,v))
genAssign op (Right b) rv = bindUnknown h b (take 20 $ show (op,rv))
tyInteresting ty = ty == TyNode || ty == tyINode
transformGrin :: TransformParms Grin -> Grin -> IO Grin
transformGrin TransformParms { transformIterate = IterateMax n } prog | n <= 0 = return prog
transformGrin TransformParms { transformIterate = IterateExactly n } prog | n <= 0 = return prog
transformGrin tp prog = do
let dodump = transformDumpProgress tp
name = transformCategory tp ++ pname (transformPass tp) ++ pname (transformName tp)
_scname = transformCategory tp ++ pname (transformPass tp)
pname "" = ""
pname xs = '-':xs
iterate = transformIterate tp
when dodump $ putErrLn $ "-- " ++ name
let ferr e = do
putErrLn $ "\n>>> Exception thrown"
putErrLn $ "\n>>> Before " ++ name
dumpGrin ("lint-before-" ++ name) prog
putErrLn $ "\n>>>"
putErrLn (show (e::SomeException'))
maybeDie
return prog
let istat = grinStats prog
prog' <- Control.Exception.catch (transformOperation tp prog { grinStats = mempty } >>= Control.Exception.evaluate ) ferr
let estat = grinStats prog'
let onerr grin' = do
putErrLn $ "\n>>> Before " ++ name
dumpGrin ("lint-before-" ++ name) prog
Stats.printStat name estat
putErrLn $ "\n>>> After " ++ name
dumpGrin ("lint-after-" ++ name) grin'
if transformSkipNoStats tp && Stats.null estat then do
when dodump $ putErrLn "program not changed"
return prog
else do
when (dodump && not (Stats.null estat)) $ Stats.printStat name estat
lintCheckGrin' (onerr prog') prog'
let tstat = istat `mappend` estat
if doIterate iterate (not $ Stats.null estat) then transformGrin tp { transformIterate = iterateStep iterate } prog' { grinStats = tstat } else return prog' { grinStats = tstat }
-- if doIterate iterate (estat /= mempty) then transformGrin tp { transformIterate = iterateStep iterate } prog' { progStats = istat `mappend` estat } else
-- return prog' { progStats = istat `mappend` estat, progPasses = name:progPasses prog' }
maybeDie = case optKeepGoing options of
True -> return ()
False -> putErrDie "Internal Error"
data TcEnv = TcEnv {
envTyEnv :: TyEnv,
envInScope :: Set.Set Var
}
newtype Tc a = Tc (ReaderT TcEnv (Either String) a)
deriving(Monad,MonadReader TcEnv)
tcErr :: String -> Tc a
tcErr s = Tc $ lift (Left s)
runTc :: TcEnv -> Tc a -> Either String a
runTc env (Tc r) = runReaderT r env
same _ t1 t2 | t1 == t2 = return t1
same msg t1 t2 = tcErr $ "Types not the same:" <+> parens msg <+> parens (tshow t1) <+> parens (tshow t2)
tcLam :: Maybe [Ty] -> Lam -> Tc [Ty]
tcLam mty (v :-> e) = f mty where
f Nothing = ans (mapM tcVal v)
f (Just ty) = ans $ do
t <- mapM tcVal v
same (":->" <+> show mty <+> show (v :-> e)) ty t
ans r = local (\e -> e { envInScope = freeVars v `mappend` envInScope e }) $ r >> tcExp e
tcExp :: Exp -> Tc [Ty]
tcExp e = f e where
f (e :>>= lam) = do
t1 <- f e
tcLam (Just t1) lam
f n@(Prim p as t') = do
mapM_ tcVal as
return t'
f ap@(BaseOp (Apply t) vs) = do
(v':_) <- mapM tcVal vs
if v' == TyNode then return t
else tcErr $ "App apply arg doesn't match: " ++ show ap
f ap@(BaseOp Eval [v]) = do
v' <- tcVal v
if v' == tyINode then return [TyNode]
else tcErr $ "App eval arg doesn't match: " ++ show ap
f a@(App fn as t) = do
te <- asks envTyEnv
(as',t') <- findArgsType te fn
as'' <- mapM tcVal as
if t' == t then
if as'' == as' then return t' else
tcErr $ "App: arguments do not match: " ++ show (a,as',t')
else tcErr $ "App: results do not match: " ++ show (a,t,(as',t'))
f e@(BaseOp (StoreNode _) vs) = do
[NodeC {}] <- return vs
mapM_ tcVal vs
return (getType e)
f Alloc { expValue = v, expCount = c, expRegion = r } = do
t <- tcVal v
tcVal c
tcVal r
return [TyPtr t]
f (Return v) = mapM tcVal v
f (BaseOp Promote [v]) = do
TyINode <- tcVal v
return [TyNode]
f (BaseOp Demote [v]) = do
TyNode <- tcVal v
return [TyINode]
f (Error _ t) = return t
f e@(BaseOp Overwrite [w,v]) = do
NodeC {} <- return v
tcVal w
tcVal v
return []
f e@(BaseOp PokeVal [w,v]) = do
TyPtr t <- tcVal w
tv <- tcVal v
when (t /= tv) $
tcErr "PokeVal: types don't match"
return []
f e@(BaseOp PeekVal [w]) = do
TyPtr t <- tcVal w
return [t]
f (Case _ []) = tcErr "empty case"
f (Case v as) = do
tv <- tcVal v
es <- mapM (tcLam (Just [tv])) as
foldl1M (same $ "case exp: " ++ show (map head $ sortGroupUnder fst (zip es as)) ) es
f (Let { expDefs = defs, expBody = body }) = do
local (\e -> e { envTyEnv = extendTyEnv defs (envTyEnv e) }) $ do
mapM_ (tcLam Nothing) [ b | FuncDef { funcDefBody = b } <- defs ]
f body
f _ = error "Grin.Lint: unknown value passed to f"
tcVal :: Val -> Tc Ty
tcVal v = f v where
f e@(Var v t) = do
s <- asks envInScope
case v `member` s of
True -> return t
False -> tcErr $ "variable not in scope: " ++ show e
f (Lit _ t) = return t
f Unit = return TyUnit
f (Const t) = do
v <- f t
case v of
TyNode -> return TyINode
v -> return (TyPtr v)
f (Index v offset) = do
t <- f v
TyPrim _ <- f offset
return t
f (ValUnknown ty) = return ty
f (ValPrim _ vs ty) = do mapM_ f vs >> return ty
f n@(NodeC tg as) = do
te <- asks envTyEnv
(as',_) <- findArgsType te tg
as'' <- mapM f as
if as'' == as' then return TyNode else
tcErr $ "NodeC: arguments do not match " ++ show n ++ show (as'',as')
f (Item _ t) = return t
| m-alvarez/jhc | src/Grin/Lint.hs | mit | 12,583 | 1 | 18 | 3,741 | 5,269 | 2,568 | 2,701 | -1 | -1 |
{-# LANGUAGE CPP #-}
--------------------------------------------------------------------------------
-- | Deal with Cmm registers
--
module LlvmCodeGen.Regs (
lmGlobalRegArg, lmGlobalRegVar, alwaysLive,
stgTBAA, baseN, stackN, heapN, rxN, otherN, tbaa, getTBAA
) where
#include "HsVersions.h"
import Llvm
import CmmExpr
import DynFlags
import FastString
import Outputable ( panic )
import Unique
-- | Get the LlvmVar function variable storing the real register
lmGlobalRegVar :: DynFlags -> GlobalReg -> LlvmVar
lmGlobalRegVar dflags = pVarLift . lmGlobalReg dflags "_Var"
-- | Get the LlvmVar function argument storing the real register
lmGlobalRegArg :: DynFlags -> GlobalReg -> LlvmVar
lmGlobalRegArg dflags = lmGlobalReg dflags "_Arg"
{- Need to make sure the names here can't conflict with the unique generated
names. Uniques generated names containing only base62 chars. So using say
the '_' char guarantees this.
-}
lmGlobalReg :: DynFlags -> String -> GlobalReg -> LlvmVar
lmGlobalReg dflags suf reg
= case reg of
BaseReg -> ptrGlobal $ "Base" ++ suf
Sp -> ptrGlobal $ "Sp" ++ suf
Hp -> ptrGlobal $ "Hp" ++ suf
VanillaReg 1 _ -> wordGlobal $ "R1" ++ suf
VanillaReg 2 _ -> wordGlobal $ "R2" ++ suf
VanillaReg 3 _ -> wordGlobal $ "R3" ++ suf
VanillaReg 4 _ -> wordGlobal $ "R4" ++ suf
VanillaReg 5 _ -> wordGlobal $ "R5" ++ suf
VanillaReg 6 _ -> wordGlobal $ "R6" ++ suf
VanillaReg 7 _ -> wordGlobal $ "R7" ++ suf
VanillaReg 8 _ -> wordGlobal $ "R8" ++ suf
SpLim -> wordGlobal $ "SpLim" ++ suf
FloatReg 1 -> floatGlobal $"F1" ++ suf
FloatReg 2 -> floatGlobal $"F2" ++ suf
FloatReg 3 -> floatGlobal $"F3" ++ suf
FloatReg 4 -> floatGlobal $"F4" ++ suf
FloatReg 5 -> floatGlobal $"F5" ++ suf
FloatReg 6 -> floatGlobal $"F6" ++ suf
DoubleReg 1 -> doubleGlobal $ "D1" ++ suf
DoubleReg 2 -> doubleGlobal $ "D2" ++ suf
DoubleReg 3 -> doubleGlobal $ "D3" ++ suf
DoubleReg 4 -> doubleGlobal $ "D4" ++ suf
DoubleReg 5 -> doubleGlobal $ "D5" ++ suf
DoubleReg 6 -> doubleGlobal $ "D6" ++ suf
XmmReg 1 -> xmmGlobal $ "XMM1" ++ suf
XmmReg 2 -> xmmGlobal $ "XMM2" ++ suf
XmmReg 3 -> xmmGlobal $ "XMM3" ++ suf
XmmReg 4 -> xmmGlobal $ "XMM4" ++ suf
XmmReg 5 -> xmmGlobal $ "XMM5" ++ suf
XmmReg 6 -> xmmGlobal $ "XMM6" ++ suf
YmmReg 1 -> ymmGlobal $ "YMM1" ++ suf
YmmReg 2 -> ymmGlobal $ "YMM2" ++ suf
YmmReg 3 -> ymmGlobal $ "YMM3" ++ suf
YmmReg 4 -> ymmGlobal $ "YMM4" ++ suf
YmmReg 5 -> ymmGlobal $ "YMM5" ++ suf
YmmReg 6 -> ymmGlobal $ "YMM6" ++ suf
ZmmReg 1 -> zmmGlobal $ "ZMM1" ++ suf
ZmmReg 2 -> zmmGlobal $ "ZMM2" ++ suf
ZmmReg 3 -> zmmGlobal $ "ZMM3" ++ suf
ZmmReg 4 -> zmmGlobal $ "ZMM4" ++ suf
ZmmReg 5 -> zmmGlobal $ "ZMM5" ++ suf
ZmmReg 6 -> zmmGlobal $ "ZMM6" ++ suf
MachSp -> wordGlobal $ "MachSp" ++ suf
_other -> panic $ "LlvmCodeGen.Reg: GlobalReg (" ++ (show reg)
++ ") not supported!"
-- LongReg, HpLim, CCSS, CurrentTSO, CurrentNusery, HpAlloc
-- EagerBlackholeInfo, GCEnter1, GCFun, BaseReg, PicBaseReg
where
wordGlobal name = LMNLocalVar (fsLit name) (llvmWord dflags)
ptrGlobal name = LMNLocalVar (fsLit name) (llvmWordPtr dflags)
floatGlobal name = LMNLocalVar (fsLit name) LMFloat
doubleGlobal name = LMNLocalVar (fsLit name) LMDouble
xmmGlobal name = LMNLocalVar (fsLit name) (LMVector 4 (LMInt 32))
ymmGlobal name = LMNLocalVar (fsLit name) (LMVector 8 (LMInt 32))
zmmGlobal name = LMNLocalVar (fsLit name) (LMVector 16 (LMInt 32))
-- | A list of STG Registers that should always be considered alive
alwaysLive :: [GlobalReg]
alwaysLive = [BaseReg, Sp, Hp, SpLim, HpLim, node]
-- | STG Type Based Alias Analysis hierarchy
stgTBAA :: [(Unique, LMString, Maybe Unique)]
stgTBAA
= [ (topN, fsLit "top", Nothing)
, (stackN, fsLit "stack", Just topN)
, (heapN, fsLit "heap", Just topN)
, (rxN, fsLit "rx", Just heapN)
, (baseN, fsLit "base", Just topN)
-- FIX: Not 100% sure about 'others' place. Might need to be under 'heap'.
-- OR I think the big thing is Sp is never aliased, so might want
-- to change the hieracy to have Sp on its own branch that is never
-- aliased (e.g never use top as a TBAA node).
, (otherN, fsLit "other", Just topN)
]
-- | Id values
topN, stackN, heapN, rxN, baseN, otherN :: Unique
topN = getUnique (fsLit "LlvmCodeGen.Regs.topN")
stackN = getUnique (fsLit "LlvmCodeGen.Regs.stackN")
heapN = getUnique (fsLit "LlvmCodeGen.Regs.heapN")
rxN = getUnique (fsLit "LlvmCodeGen.Regs.rxN")
baseN = getUnique (fsLit "LlvmCodeGen.Regs.baseN")
otherN = getUnique (fsLit "LlvmCodeGen.Regs.otherN")
-- | The TBAA metadata identifier
tbaa :: LMString
tbaa = fsLit "tbaa"
-- | Get the correct TBAA metadata information for this register type
getTBAA :: GlobalReg -> Unique
getTBAA BaseReg = baseN
getTBAA Sp = stackN
getTBAA Hp = heapN
getTBAA (VanillaReg _ _) = rxN
getTBAA _ = topN
| tjakway/ghcjvm | compiler/llvmGen/LlvmCodeGen/Regs.hs | bsd-3-clause | 5,550 | 0 | 11 | 1,633 | 1,447 | 741 | 706 | 94 | 44 |
{-# OPTIONS_GHC -w #-}
{-# LANGUAGE FlexibleInstances, UndecidableInstances #-}
module ShouldCompile where
import Control.Monad.Reader
instance Eq (a -> b) where
_ == _ = error "whoops"
instance Show (a -> b) where
show = const "<fun>"
-- This is the example from Trac #179
foo x = show (\_ -> True)
-- This is the example from Trac #963
instance (Num a, Monad m, Eq (m a), Show (m a)) => Num (m a) where
test = 1 True
| urbanslug/ghc | testsuite/tests/typecheck/should_compile/tc217.hs | bsd-3-clause | 436 | 0 | 8 | 96 | 146 | 78 | 68 | -1 | -1 |
{-# LANGUAGE Trustworthy, Unsafe #-}
-- | Basic test to see if Safe flags compiles
module SafeFlags06 where
f :: Int
f = 1
| urbanslug/ghc | testsuite/tests/safeHaskell/flags/SafeFlags06.hs | bsd-3-clause | 126 | 0 | 4 | 27 | 16 | 11 | 5 | 4 | 1 |
{-# OPTIONS_GHC -fwarn-warnings-deprecations #-}
-- Test deprecation of constructors and class ops
module ShouldCompile where
import Rn050_A
instance Foo T where
op x = x
bop y = y
foo = op C
| urbanslug/ghc | testsuite/tests/rename/should_compile/rn050.hs | bsd-3-clause | 202 | 0 | 6 | 43 | 41 | 23 | 18 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Control.Monad (mplus)
import Data.Aeson
import qualified Data.ByteString.Lazy as BSL
import Data.Dynamic
import Data.Map as M
import System.Directory
import System.Environment (getEnv)
import System.IO
import Text.Blaze.Html.Renderer.Text
import Text.Blaze.Html5 as H hiding (main)
import qualified Text.Blaze.Html5.Attributes as A
import qualified Web.Scotty as S
main :: IO ()
main = do
msg <- nicetyFromKey "test"
port <- getEnv "PORT" `mplus` return "3000"
S.scotty (read port :: Int) $ do
S.get "/" $ do
S.html . renderHtml $ page msg
-- S.get "/:key" $ do
-- key <- param "key"
-- msg2 <- nicetyFromKey key
-- S.html . renderHtml $ page msg2
root :: Html
root = do
H.head $ do
title "Niceties"
body $ do
h1 "Hi there!"
someList :: String -> Html
someList msg = ol $ li $ toMarkup msg
page :: String -> Html
page msg = do
root
someList msg
type Hash = Map String String
type Key = String
-- n1, n2, n3 exist mainly to document the steps
-- they can be combined but leaving 'em as is for
-- documentation purposes
nicetyFromKey :: Key -> IO String
nicetyFromKey key = do
n1 <- decodeNiceties "data/niceties.json"
let n2 = M.lookup key <$> n1 -- lookup "test" <$> Right Hash => Right (lookup "test" Hash)
-- which has type Either String (Maybe String)
let n3 = case n2 of Left err -> err
Right (Just s) -> s -- m :: String
Right Nothing -> "Key " ++ key ++ " not found!" -- :: String
return n3
{-
The whole do block above in nicetyFromKey is equivalent to:
decodeNiceties "data/niceties.json" >>= (\n1 -> let x = case Data.Map.lookup "test" <$> n1 of { Left err -> err; Right (Just s) -> s; Right Nothing -> "boop" } in return x)
-}
decodeNiceties :: FilePath -> IO (Either String Hash)
decodeNiceties p = do
rawData <- BSL.readFile p -- path relative to where process cwd
-- process cwd seems to be root of project
return (eitherDecode rawData :: Either String Hash)
printCwd :: IO ()
printCwd = getCurrentDirectory >>= Prelude.putStrLn
| zeusdeux/heart.mudit.xyz | src/main.hs | isc | 2,378 | 0 | 15 | 720 | 511 | 271 | 240 | 49 | 3 |
{- |
This module works with recursive data structure with cycles.
-}
module Data.Generics.Rec (
universeRec
)
where
import Data.Generics
import Control.Monad.State
import Data.Set
import System.Mem.StableName
import System.IO.Unsafe
type RecState a = StateT (Set Int) IO a
goRec :: (Data v, Data r) => v -> RecState [r]
goRec !v = do
hash <- hashStableName `fmap` liftIO (makeStableName v)
p <- gets (member hash)
if p
then return []
else do
modify $ insert hash
case cast v of
Just v0 -> (v0:) `fmap` continue
Nothing -> continue
where
continue = concat `fmap` sequence (gmapQ goRec v)
-- | Get all distinct children of a node, including itself and all children.
--
-- > dataRose = Rose { roseId :: Int, roseBranches :: [Rose] }
-- > deriving (Data,Typeable)
--
-- > roses =
-- > let a = Rose 1 [b,c]
-- > b = Rose 2 [a,c]
-- > c = Rose 3 [a,b]
-- > in a
--
-- > [ i | Rose i _ <- universeRec roses ]
-- > [1,2,3]
universeRec :: (Data v, Data r) => v -> [r]
universeRec v = unsafePerformIO $ evalStateT (goRec v) empty
| lomeo/unirec | src/Data/Generics/Rec.hs | mit | 1,167 | 0 | 14 | 342 | 286 | 158 | 128 | 22 | 3 |
{- |
module: Main
description: Querying the contents of OpenTheory packages
license: MIT
maintainer: Joe Leslie-Hurd <joe@gilith.com>
stability: provisional
portability: portable
-}
module Main
( main )
where
import System.FilePath (isValid,takeDirectory,takeExtension)
import qualified System.Environment as Environment
import HOL.OpenTheory (readArticle,readPackages)
import qualified HOL.OpenTheory.Interpret as Interpret
import HOL.OpenTheory.Package (Name,NameVersion)
import qualified HOL.OpenTheory.Package as Package
import HOL.Parse
import HOL.Print
import HOL.Theory (Theory)
import qualified HOL.Theory as Theory
-------------------------------------------------------------------------------
-- An article file
-------------------------------------------------------------------------------
articleArg :: [String] -> Maybe FilePath
articleArg [f] | isValid f && takeExtension f == ".art" = Just f
articleArg _ = Nothing
articleThy :: FilePath -> IO Theory
articleThy f = do
ths <- readArticle Theory.standard Interpret.empty f
return $ Theory.fromThmSet ths
-------------------------------------------------------------------------------
-- A collection of packages
-------------------------------------------------------------------------------
packagesArg :: [String] -> Maybe [Name]
packagesArg = mapM fromString
packagesThy :: [Name] -> IO Theory
packagesThy = fmap Theory.unionList . readPackages
-------------------------------------------------------------------------------
-- A package file
-------------------------------------------------------------------------------
packageFileArg :: [String] -> Maybe FilePath
packageFileArg [f] | isValid f && takeExtension f == ".thy" = Just f
packageFileArg _ = Nothing
packageFileThy :: FilePath -> IO Theory
packageFileThy f = do
pkg <- fromTextFile f
req <- packagesThy (Package.requires pkg)
let thy = Theory.union Theory.standard req
let int = Interpret.empty
let dir = takeDirectory f
Package.readPackage thy int dir pkg
-------------------------------------------------------------------------------
-- A specific version of a package
-------------------------------------------------------------------------------
packageVersionArg :: [String] -> Maybe NameVersion
packageVersionArg [s] = fromString s
packageVersionArg _ = Nothing
packageVersionThy :: NameVersion -> IO Theory
packageVersionThy nv = do
dir <- Package.directoryVersion nv
packageFileThy (Package.packageFile dir (Package.name nv))
-------------------------------------------------------------------------------
-- Top-level
-------------------------------------------------------------------------------
usage :: String -> a
usage err =
error $ err ++ "\n" ++ info
where
info =
"Usage: hol-pkg INPUT\n" ++
"where INPUT is one of the following forms:\n" ++
" FILE.art : a proof article file\n" ++
" FILE.thy : a theory package file\n" ++
" NAME-VERSION : a specific version of an installed theory package\n" ++
" NAME ... : the latest installed version of a list of packages\n" ++
"hol-pkg reads the INPUT to generate a set of theorems, which are\n" ++
"pretty-printed to standard output together with the symbols they contain."
main :: IO ()
main = do
args <- Environment.getArgs
if null args then usage "no arguments" else return ()
thy <- case articleArg args of
Just f -> articleThy f
Nothing ->
case packageFileArg args of
Just f -> packageFileThy f
Nothing ->
case packageVersionArg args of
Just nv -> packageVersionThy nv
Nothing ->
case packagesArg args of
Just ns -> packagesThy ns
Nothing -> usage $ "bad arguments: " ++ show args
putStrLn $ toString thy
return ()
| gilith/hol | src/Main.hs | mit | 3,999 | 0 | 21 | 820 | 783 | 398 | 385 | 71 | 6 |
data Vector = Vector { x :: Double, y :: Double } deriving (Eq, Ord, Show)
instance Num Vector where
p1 + p2 = Vector (x p1 + x p2) (y p1 + y p2)
p1 - p2 = p1 + negate p2
p1 * p2 = Vector (x p1 * x p2) (y p1 * y p2)
abs (Vector x y) = Vector (sqrt (x * x + y * y)) 0
negate (Vector x y) = Vector (-x) (-y)
fromInteger x = Vector (fromInteger x) 0
signum (Vector x y) = let m = sqrt (x * x + y * y) in Vector (x / m) (y / m)
p .* s = Vector (x p * s) (y p * s)
(*.) = flip (.*)
| 0xd34df00d/hencil | src/Vector.hs | mit | 507 | 0 | 14 | 159 | 351 | 174 | 177 | 11 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Hogldev.Pipeline (
Pipeline(..)
, getTrans
, PersProj(..)
, Camera(..)
) where
import Graphics.Rendering.OpenGL
import Hogldev.Math3D
import Hogldev.Utils
import Hogldev.Camera
data Pipeline
= WPipeline
{ scaleInfo :: Vector3 GLfloat
, worldInfo :: Vector3 GLfloat
, rotateInfo :: Vector3 GLfloat
}
| WPPipeline
{ scaleInfo :: Vector3 GLfloat
, worldInfo :: Vector3 GLfloat
, rotateInfo :: Vector3 GLfloat
, persProj :: PersProj
}
| WVPPipeline
{ scaleInfo :: Vector3 GLfloat
, worldInfo :: Vector3 GLfloat
, rotateInfo :: Vector3 GLfloat
, persProj :: PersProj
, pipeCamera :: Camera
}
| VPPipeline
{ persProj :: PersProj
, pipeCamera :: Camera
}
deriving Show
getTrans :: Pipeline -> Matrix4
getTrans WPipeline{..} = worldTrans scaleInfo worldInfo rotateInfo
getTrans WPPipeline{..} = projTrans scaleInfo worldInfo rotateInfo persProj
getTrans VPPipeline{..} = vpTrans persProj pipeCamera
getTrans WVPPipeline{..} =
projViewTrans scaleInfo worldInfo rotateInfo persProj pipeCamera
vpTrans :: PersProj -> Camera -> Matrix4
vpTrans persProj camera = perspProjTrans persProj !*! cameraTrans camera
worldTrans :: Vector3 GLfloat
-> Vector3 GLfloat
-> Vector3 GLfloat
-> Matrix4
worldTrans scaleInfo worldInfo rotateInfo =
translationTrans !*! rotateTrans !*! scaleTrans
where
scaleTrans, rotateTrans, translationTrans :: Matrix4
scaleTrans = scaleMatrix scaleInfo
rotateTrans = initRotateTransform rotateInfo
translationTrans = translateMatrix worldInfo
projTrans :: Vector3 GLfloat
-> Vector3 GLfloat
-> Vector3 GLfloat
-> PersProj
-> Matrix4
projTrans scaleInfo worldInfo rotateInfo persProj =
perspProjTrans persProj !*! worldTrans scaleInfo worldInfo rotateInfo
projViewTrans :: Vector3 GLfloat
-> Vector3 GLfloat
-> Vector3 GLfloat
-> PersProj
-> Camera
-> Matrix4
projViewTrans scaleInfo worldInfo rotateInfo persProj camera =
perspProjTrans persProj
!*! cameraTrans camera
!*! worldTrans scaleInfo worldInfo rotateInfo
cameraTrans :: Camera -> Matrix4
cameraTrans c@Camera{..} =
cameraRotationTrans c !*! translateMatrix (fmap (*(-1) ) cameraPos)
initRotateTransform :: Vector3 GLfloat -> Matrix4
initRotateTransform (Vector3 x y z) = rz !*! ry !*! rx
where
rx, ry, rz :: Matrix4
rx = rotateXMatrix(toRadian x)
ry = rotateYMatrix(toRadian y)
rz = rotateZMatrix(toRadian z)
| triplepointfive/hogldev | common/Hogldev/Pipeline.hs | mit | 2,783 | 0 | 11 | 772 | 676 | 358 | 318 | 74 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE JavaScriptFFI #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE EmptyDataDecls #-}
module JavaScript.Web.Blob.Internal where
import Data.Typeable
import GHCJS.Types
data BlobType = BlobTypeBlob
| BlobTypeFile
newtype SomeBlob (a :: BlobType) = SomeBlob JSRef deriving Typeable
type File = SomeBlob BlobTypeFile
type Blob = SomeBlob BlobTypeBlob
size :: SomeBlob a -> Int
size b = js_size b
{-# INLINE size #-}
contentType :: SomeBlob a -> JSString
contentType b = js_type b
{-# INLINE contentType #-}
-- is the type correct, does slicing a File give another File?
slice :: Int -> Int -> JSString -> SomeBlob a -> SomeBlob a
slice start end contentType b = js_slice start end contentType b
{-# INLINE slice #-}
isClosed :: SomeBlob a -> IO Bool
isClosed b = js_isClosed b
{-# INLINE isClosed #-}
close :: SomeBlob a -> IO ()
close b = js_close b
{-# INLINE close #-}
-- -----------------------------------------------------------------------------
foreign import javascript unsafe "$1.size" js_size :: SomeBlob a -> Int
foreign import javascript unsafe "$1.type" js_type :: SomeBlob a -> JSString
-- fixme figure out if we need to support older browsers with obsolete slice
foreign import javascript unsafe "$4.slice($1,$2,$3)"
js_slice :: Int -> Int -> JSString -> SomeBlob a -> SomeBlob a
foreign import javascript unsafe "$1.isClosed"
js_isClosed :: SomeBlob a -> IO Bool
foreign import javascript unsafe "$1.close();"
js_close :: SomeBlob a -> IO ()
| tavisrudd/ghcjs-base | JavaScript/Web/Blob/Internal.hs | mit | 1,607 | 15 | 10 | 276 | 358 | 190 | 168 | 37 | 1 |
module Main where
import Data.Bifunctor (first)
import Data.Bits (testBit, unsafeShiftL)
import Data.Bool (bool)
import Data.Char (digitToInt)
import Data.Foldable (foldl', traverse_)
import Data.Function (on)
import Data.List.NonEmpty (NonEmpty (..), fromList)
readBits :: String -> [Bool]
readBits = concatMap (fourBits . digitToInt)
where fourBits n = [ n `testBit` i | i <- [3, 2, 1, 0] ]
bToI :: [Bool] -> Int
bToI = foldl' (\i b -> (i `unsafeShiftL` 1) + bool 0 1 b) 0
type Parser a = [Bool] -> (a, [Bool])
int :: Int -> Parser Int
int n = first bToI . splitAt n
data Operation = OSum | OMul | OMin | OMax deriving Enum
data Comparator = CGT | CLT | CEQ deriving Enum
data Packet = Literal !Int !Int
| Operation !Int !Operation !(NonEmpty Packet)
| Comparison !Int !Comparator !Packet !Packet
literal :: Int -> Parser Packet
literal v = first (Literal v . bToI) . go
where
go [] = error "Empty list passed to `literal`"
go bs =
let (x : ys, bs') = splitAt 5 bs
in if not x then (ys, bs') else first (ys ++) $ go bs'
subN :: Int -> Parser [Packet]
subN n = go []
where
go ps bs =
if length ps >= n then (ps, bs) else let (p, bs') = parse bs in go (p : ps) bs'
subT :: Int -> Parser [Packet]
subT n = go 0 []
where
go i ps bs = if i >= n
then (ps, bs)
else
let (p, bs') = parse bs
d = ((-) `on` length) bs bs'
in go (i + d) (p : ps) bs'
dispatch :: Parser [Packet]
dispatch [] = error "Empty list passed to `dispatch`"
dispatch (b : bs) = if b
then let (n, bs') = (first bToI $ splitAt 11 bs) in subN n bs'
else let (n, bs') = (first bToI $ splitAt 15 bs) in subT n bs'
operation :: Int -> Int -> Parser Packet
operation v o = first (Operation v (toEnum o) . fromList) . dispatch
comparison :: Int -> Int -> Parser Packet
comparison v c bits = (Comparison v (toEnum c) x y, bits')
where
(ps, bits') = dispatch bits
[x , y ] = reverse $ take 2 ps
parse :: Parser Packet
parse bs = p
where
(v, bs' ) = int 3 bs
(t, bs'') = int 3 bs'
p = case t of
n | n `elem` [0 .. 3] -> operation v n bs''
4 -> literal v bs''
n | n `elem` [5 .. 7] -> comparison v (n - 5) bs''
n -> error $ "Unexpected integer in `parse`: " ++ show n
sumVersions :: Packet -> Int
sumVersions (Literal v _ ) = v
sumVersions (Operation v _ ps ) = v + sum (sumVersions <$> ps)
sumVersions (Comparison v _ x y) = v + sumVersions x + sumVersions y
eval :: Packet -> Int
eval (Literal _ v ) = v
eval (Operation _ o ps) = op $ eval <$> ps
where op = [sum, product, minimum, maximum] !! fromEnum o
eval (Comparison _ c x y) = bool 0 1 $ cmp (eval x) (eval y)
where cmp = [(>), (<), (==)] !! fromEnum c
solve :: (Packet -> Int) -> String -> Int
solve = (. fst . parse . readBits)
part1 :: String -> Int
part1 = solve sumVersions
part2 :: String -> Int
part2 = solve eval
main :: IO ()
main = do
input <- readFile "input.txt"
traverse_ (print . ($ input)) [part1, part2]
| genos/online_problems | advent_of_code_2021/day16/Main.hs | mit | 3,088 | 0 | 14 | 858 | 1,460 | 771 | 689 | 96 | 4 |
{-# LANGUAGE FlexibleContexts, Rank2Types, NoMonomorphismRestriction #-}
module Game.World.Lens
( objectBoundary
, Get
, Set
, Component(..)
, writeProp
, compUnit
, getWires
, addWire
, addObject
, moveObject
, isCollidable
, getCollisionFilters
, setCollisionEvent
, getAnimations
, setAnimation
, setAnimations
, deleteObject
, objectPosition
, rotateObject
, getPositions
, collisionEvent
, setIgnoreCollision
, setBoundary
, getItems
, getObjects
, setOrientation
, setStaticCollidable
)
where
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Control.Lens
import Game.World.Objects
import Control.Monad.Writer
--import Game.World.Types
import Data.Maybe
import Game.World.Common
import Control.Arrow
--objectExists :: ObjectId -> World -> Bool
--objectExists oId w = Map.member oId (w^.wObjects)
--wObjectExists :: ObjectId -> Getter World Bool
--wObjectExists oId = to (objectExists oId)
type Get a = Getter World a
type Set a = Setter' WorldDelta a
data Component a da = Component
{ _compGet :: Getter World a
, _compSet :: Setter' WorldDelta da
}
--makeLenses ''Component (ObjectProp Position) (ObjectProp Position)
--compGet = to _compGet
--compSet = to _compSet
type Component' a = Component a a
compObject :: Component (ObjectProp Object) (ObjectProp (Maybe Object))
compObject = Component
{ _compGet = wObjects
, _compSet = wdObjects
}
compUnit :: Component (ObjectProp Unit) (ModifyContainer UnitId Unit)
compUnit = Component
{ _compGet = wUnitManager.umUnits
, _compSet = wdUnitManager.umdUnits
}
compItem :: Component (ObjectProp Item) (ModifyContainer ItemId Item)
compItem = Component
{ _compGet = wUnitManager.umItems
, _compSet = wdUnitManager.umdItems
}
compPosition :: Component' (ObjectProp Position)
compPosition = Component
{ _compGet = wCommon.wcPositions
, _compSet = wdCommon.wcDelta.wcPositions
}
compRotation :: Component' (ObjectProp Rotation)
compRotation = Component
{ _compGet = wCommon.wcRotations
, _compSet = wdCommon.wcDelta.wcRotations
}
compWires :: Component' (ObjectProp [ObjectWire ObjectId ()])
compWires = Component
{ _compGet = wCommon.wcWires
, _compSet = wdCommon.wcDelta.wcWires
}
compAnimations :: Component' (ObjectProp Animation)
compAnimations = Component
{ _compGet = wCommon.wcAnimations
, _compSet = wdCommon.wcDelta.wcAnimations
}
compBoundaries :: Component' (ObjectProp Boundary)
compBoundaries = Component
{ _compGet = wCommon.wcBoundaries
, _compSet = wdCommon.wcDelta.wcBoundaries
}
compOrientation :: Component' (ObjectProp Orientation)
compOrientation = Component
{ _compGet = wCommon.wcOrientation
, _compSet = wdCommon.wcDelta.wcOrientation
}
compCollisionEvent :: Component' (ObjectProp [ObjectId])
compCollisionEvent = Component
{ _compGet = wCommon.wcCollisionEvents
, _compSet = wdCommon.wcDelta.wcCollisionEvents
}
type IngoredObjects = Set.Set ObjectId
type ObjectIdTo a = ObjectProp a
type ListOfChanges = Map.Map ObjectId (Maybe ObjectId)
type CollisionFilter = Component
--type ObjectChangeSet = Map.Map ObjectId (Maybe ObjectId)
compCollisionFilter :: CollisionFilter (ObjectIdTo IngoredObjects) (ObjectIdTo ObjectChangeSet)
compCollisionFilter = Component
{ _compGet = wCollisionFilter
, _compSet = wdCollisionFilter
}
getWires :: Get (ObjectProp [ObjectWire ObjectId ()])
getWires = _compGet compWires
setWires :: Set (ObjectProp [ObjectWire ObjectId ()])
setWires = _compSet compWires
writeProp :: (MonadWriter WorldDelta m)
=> Set (ObjectProp a)
-> ObjectId
-> a
-> m ()
writeProp mapSetter oId a = scribe (mapSetter . at oId) (Just a)
addWire :: (MonadWriter WorldDelta m) => ObjectId -> ObjectWire ObjectId () -> m ()
addWire oId w = writeProp setWires oId [w]
setAnimations :: Set (ObjectProp Animation)
setAnimations = _compSet compAnimations
getAnimations :: Get (ObjectProp Animation)
getAnimations = _compGet compAnimations
setAnimation :: (MonadWriter WorldDelta m) => ObjectId -> Animation -> m ()
setAnimation = writeProp setAnimations
setPositions :: Setter' WorldDelta (ObjectProp Position)
setPositions = _compSet compPosition
getPositions :: Getter World (ObjectProp Position)
getPositions = _compGet compPosition
objectPosition :: ObjectId -> Getter World (Maybe Position)
objectPosition oId = getPositions . at oId
-- | Rotation component
setRotations :: Setter' WorldDelta (ObjectProp Rotation)
setRotations = _compSet compRotation
getRotations :: Getter World (ObjectProp Rotation)
getRotations = _compGet compRotation
rotateObject :: (MonadWriter WorldDelta m)
=> ObjectId -> Rotation -> m ()
rotateObject = writeProp setRotations
objectRotation :: ObjectId -> Get (Maybe Rotation)
objectRotation oId = getRotations . at oId
moveObject :: (MonadWriter WorldDelta m) => ObjectId -> (Float, Float) -> m ()
moveObject = writeProp setPositions
setObjects :: Setter' WorldDelta (ObjectProp (Maybe Object))
setObjects = _compSet compObject
getObjects :: Getter World (ObjectProp Object)
getObjects = _compGet compObject
addObject :: (MonadWriter WorldDelta m) => ObjectId -> Object -> m ()
addObject oId obj = writeProp setObjects oId (Just obj)
deleteObject :: (MonadWriter WorldDelta m) => ObjectId -> m ()
deleteObject oId = writeProp setObjects oId Nothing
deletedObjects :: Getter WorldDelta [ObjectId]
deletedObjects = to getDeleted
where
getDeleted wd = getDeletedObjects (wd^.wdObjects)
-- new objects are inserted into the map with Just
getDeletedObjects objectMap = map fst $
filter (\(objId, mobj) -> case mobj of Nothing -> True; _ -> False) $
Map.toList objectMap
newObjects :: Getter WorldDelta [Object]
newObjects = to getNew
where
getNew wd = getNewObjects (wd^.wdObjects)
-- new objects are inserted into the map with Just
getNewObjects objectMap = map fromJust .
filter (\mobj -> case mobj of Just _ -> True; _ -> False) $
map snd $ Map.toList objectMap
findObject :: String -> Getter World (Maybe Object)
findObject name = to (\w ->
unOne $ ifoldMap (\_ obj -> One $
if (obj^.objName) == name then Just obj else Nothing
) (w^.wObjects)
)
-- | boundary + position
tileBoundary :: ObjectId -> Get ((Float, Float), (Float, Float))
tileBoundary oId = to boundary
where
boundary w = (pos w, w^.wTileBoundary)
pos w = fromJust $ w^.getPositions . at oId
-- | boundary + position
objectBoundary :: ObjectId -> Get Boundary
objectBoundary oId = to boundary
where
boundary w = let (px, py) = pos w in -- collision boundary = object boundary + position
map ((+) px *** (+) py) $ fromJust $ w^.getBoundaries . at oId
pos w = fromJust $ w^.getPositions . at oId
setBoundary :: (MonadWriter WorldDelta m) => ObjectId -> Boundary -> m ()
setBoundary = writeProp setBoundaries
setBoundaries :: Set (ObjectProp Boundary)
setBoundaries = _compSet compBoundaries
getBoundaries :: Get (ObjectProp Boundary)
getBoundaries = _compGet compBoundaries
setStaticCollidable :: (MonadWriter WorldDelta m) => ObjectId -> m ()
setStaticCollidable oId = scribe (wdCommon.wcDelta.wcStaticCollidable) (Set.insert oId Set.empty)
isCollidable :: ObjectId -> Get Bool
isCollidable oId = to collidable
where
collidable w = Set.member oId $ objPosAndBoundary w
objectsWithPos w = Set.fromList $ w^..wCommon.wcPositions.itraversed.asIndex
objectsWithBoundary w = Set.fromList $ w^..wCommon.wcBoundaries.itraversed.asIndex
objPosAndBoundary w = Set.intersection (objectsWithPos w) (objectsWithBoundary w)
setOrientations :: Set (ObjectProp Orientation)
setOrientations = _compSet compOrientation
getOrientations :: Get (ObjectProp Orientation)
getOrientations = _compGet compOrientation
setOrientation :: (MonadWriter WorldDelta m) => ObjectId -> Orientation -> m ()
setOrientation = writeProp setOrientations
getCollisionFilters :: Get (ObjectProp (Set.Set ObjectId))
getCollisionFilters = _compGet compCollisionFilter
setCollisionFilters :: Set (ObjectProp (Map.Map ObjectId (Maybe ObjectId)))
setCollisionFilters = _compSet compCollisionFilter
setIgnoreCollision :: (MonadWriter WorldDelta m) => ObjectId -> ObjectId -> m ()
setIgnoreCollision oId otherId = writeProp setCollisionFilters oId (Map.fromList [(otherId, Just otherId)])
unsetIgnoreCollision :: (MonadWriter WorldDelta m) => ObjectId -> ObjectId -> m ()
unsetIgnoreCollision oId otherId = writeProp setCollisionFilters oId (Map.fromList [(otherId, Nothing)])
setCollisionEvents :: Set (ObjectProp [ObjectId])
setCollisionEvents = _compSet compCollisionEvent
getCollisionEvents :: Get (ObjectProp [ObjectId])
getCollisionEvents = _compGet compCollisionEvent
setCollisionEvent :: (MonadWriter WorldDelta m) => ObjectId -> ObjectId -> m ()
setCollisionEvent oId otherId = writeProp setCollisionEvents oId [otherId]
collisionEvent :: ObjectId -> Get [ObjectId]
collisionEvent oId = to (\w -> fromMaybe [] $ w^.getCollisionEvents . at oId)
--collided :: ObjectId -> ObjectId -> Get Bool
--collided oId otherId = to (\w -> otherId `elem` (w^.collisionEvent oId))
setItems :: Set (ModifyContainer ItemId Item)
setItems = _compSet compItem
getItems :: Get (ObjectProp Item)
getItems = _compGet compItem
--unitsInDistance :: ObjectId -> Float -> Get Set.Set ObjectId
--unitsInDistance oId distance = to get
-- where
-- get world = let
-- Just oPos = world^.wcPositions.at oId
| mfpi/q-inqu | Game/World/Lens.hs | mit | 9,726 | 0 | 16 | 1,799 | 2,647 | 1,406 | 1,241 | -1 | -1 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | This module deals with Exception logging.
module System.Wlog.Exception
( logException
, catchLog
) where
import Universum
import System.Wlog.CanLog (WithLogger, WithLoggerIO, logError)
-- | Logs exception's description with ''System.Wlog.Severity.Error' 'System.Wlog.Severity.Severity'
logException :: forall e m . (WithLogger m, Exception e) => e -> m ()
logException = logError . show
{- | Runs the action, if an exception is raised the 'logException' is executed.
==== __Example__
Here is very simple example of usage 'catchLog' on IO functions:
@
main :: IO ()
main = do
buildAndSetupYamlLogging productionB "log-config.yaml"
usingLoggerName "new-logger" runWithExceptionLog
runWithExceptionLog :: (WithLoggerIO m, MonadCatch m) => m ()
runWithExceptionLog = catchLog @IOException (liftIO simpleIOfun)
simpleIOfun :: IO ()
simpleIOfun = getLine >>= readFile >>= putStrLn
@
and when run you will get:
>>> run-main-from-this-example
> not-existing-filename.txt
[new-logger:ERROR] [2017-12-01 13:07:33.21 UTC] asd: openFile: does not exist (No such file or directory)
-}
catchLog :: forall e m . (WithLoggerIO m, MonadCatch m, Exception e) => m () -> m ()
catchLog a = a `catch` logE
where
logE :: e -> m ()
logE = logException
| serokell/log-warper | src/System/Wlog/Exception.hs | mit | 1,398 | 0 | 9 | 248 | 166 | 96 | 70 | 14 | 1 |
{-# LANGUAGE DatatypeContexts #-}
module Ch13.Num
where
import qualified Data.List as L
--------------------------------------------------
-- Symbolic/units manipulation
--------------------------------------------------
data Op
= Plus
| Minus
| Mul
| Div
| Pow
deriving (Eq, Show, Ord)
data SymbolicManip a
= Number a
| Symbol String
| BinaryArith Op (SymbolicManip a) (SymbolicManip a)
| UnaryArith String (SymbolicManip a)
deriving Eq
instance Num a => Num (SymbolicManip a) where
a + b =
BinaryArith Plus a b
a - b =
BinaryArith Minus a b
a * b =
BinaryArith Mul a b
negate a =
BinaryArith Mul a (-1)
abs a =
UnaryArith "abs" a
signum =
undefined
fromInteger i =
Number (fromInteger i)
instance (Fractional a) => Fractional (SymbolicManip a) where
a / b =
BinaryArith Div a b
recip a =
BinaryArith Div (Number 1) a
fromRational r =
Number (fromRational r)
instance (Floating a) => Floating (SymbolicManip a) where
pi =
Symbol "pi"
exp a =
UnaryArith "exp" a
log a =
UnaryArith "log" a
sqrt a =
UnaryArith "sqrt" a
a ** b =
BinaryArith Pow a b
sin a =
UnaryArith "sin" a
cos a =
UnaryArith "cos" a
tan a =
UnaryArith "tan" a
asin a =
UnaryArith "asin" a
acos a =
UnaryArith "acos" a
atan a =
UnaryArith "atan" a
sinh a =
UnaryArith "sinh" a
cosh a =
UnaryArith "cosh" a
tanh a =
UnaryArith "tanh" a
asinh a =
UnaryArith "asinh" a
acosh a =
UnaryArith "acosh" a
atanh a =
UnaryArith "atanh" a
prettyShow :: (Show a, Num a) => SymbolicManip a -> String
prettyShow (Number n) =
show n
prettyShow (Symbol x) =
x
prettyShow (BinaryArith op x y) =
let px = simpleParen x op
py = simpleParen y op
pop = op2str op
in px ++ pop ++ py
prettyShow (UnaryArith op x) =
op ++ "(" ++ prettyShow x ++ ")"
simpleParen :: (Num a, Show a) => SymbolicManip a -> Op -> [Char]
simpleParen x@(BinaryArith op _ _) op'
| prio op' > prio op =
"(" ++ prettyShow x ++ ")"
| prio op' == prio op && op /= op' =
"(" ++ prettyShow x ++ ")"
| otherwise =
prettyShow x
simpleParen x _ =
prettyShow x
prio :: Op -> Int
prio Plus =
0
prio Minus =
0
prio Mul =
1
prio Div =
1
prio Pow =
1
op2str :: Op -> String
op2str Plus =
"+"
op2str Minus =
"-"
op2str Mul =
"*"
op2str Div =
"/"
op2str Pow =
"**"
instance (Show a, Num a) => Show (SymbolicManip a) where
show =
prettyShow
rpnShow :: (Show a, Num a) => SymbolicManip a -> String
rpnShow =
L.intercalate " " . rpnShow'
where
rpnShow' :: (Show a, Num a) => SymbolicManip a -> [String]
rpnShow' (Number n) =
[show n]
rpnShow' (Symbol str) =
[str]
rpnShow' (BinaryArith op x y) =
rpnShow' x ++ rpnShow' y ++ [op2str op]
rpnShow' (UnaryArith op x) =
rpnShow' x ++ [op]
simplify :: (Eq a, Num a) => SymbolicManip a -> SymbolicManip a
simplify (BinaryArith op ia ib) =
let sa = simplify ia
sb = simplify ib
in
case (op, sa, sb) of
(Mul, Number 1, b) ->
b
(Mul, a, Number 1) ->
a
(Mul, Number 0, _) ->
Number 0
(Mul, _, Number 0) ->
Number 0
(Div, a, Number 1) ->
a
(Plus, a, Number 0) ->
a
(Plus, Number 0, b) ->
b
(Minus, a, Number 0) ->
a
_ ->
BinaryArith op sa sb
simplify (UnaryArith op a) =
UnaryArith op (simplify a)
simplify x =
x
data Num a => Units a =
Units a (SymbolicManip a)
deriving Eq
instance (Eq a, Num a) => Num (Units a) where
(Units xa ua) + (Units xb ub)
| ua == ub =
Units (xa + xb) ua
| otherwise =
error "Mis-matched units in add or subtract"
(Units xa ua) - (Units xb ub) =
(Units xa ua) + (Units (xb * (-1)) ub)
(Units xa ua) * (Units xb ub) =
Units (xa * xb) (ua * ub)
negate (Units xa ua) =
Units (negate xa) ua
abs (Units xa ua) =
Units (abs xa) ua
signum (Units xa _) =
Units (signum xa) (Number 1)
fromInteger i =
Units (fromInteger i) (Number 1)
instance (Eq a, Fractional a) => Fractional (Units a) where
(Units xa ua) / (Units xb ub) =
Units (xa / xb) (ua / ub)
recip a =
1 / a
fromRational r =
Units (fromRational r) (Number 1)
instance (Eq a, Floating a) => Floating (Units a) where
pi =
(Units pi (Number 1))
exp _ =
error "exp not yet implemented in Units"
log _ =
error "log not yet implemented in Units"
(Units xa ua) ** (Units xb ub)
| ub == Number 1 =
Units (xa ** xb) (ua ** Number xb)
| otherwise =
error "units for RHS of ** not supported"
sqrt (Units xa ua) =
Units (sqrt xa) (sqrt ua)
sin (Units xa ua)
| ua == Symbol "rad" =
Units (sin xa) (Number 1)
| ua == Symbol "deg" =
Units (sin (deg2rad xa)) (Number 1)
| otherwise =
error "Units for sin must be deg or rad"
cos (Units xa ua)
| ua == Symbol "rad" =
Units (cos xa) (Number 1)
| ua == Symbol "deg" =
Units (cos (deg2rad xa)) (Number 1)
| otherwise =
error "Units for cos must be deg or rad"
tan (Units xa ua)
| ua == Symbol "rad" =
Units (tan xa) (Number 1)
| ua == Symbol "deg" =
Units (tan (deg2rad xa)) (Number 1)
| otherwise =
error "Units for tan must be deg or rad"
asin (Units xa ua)
| ua == Number 1 =
Units (rad2deg $ asin xa) (Symbol "deg")
| otherwise =
error "Units for asin must be empty"
acos (Units xa ua)
| ua == Number 1 =
Units (rad2deg $ acos xa) (Symbol "deg")
| otherwise =
error "Units for acos must be empty"
atan (Units xa ua)
| ua == Number 1 =
Units (rad2deg $ atan xa) (Symbol "deg")
| otherwise =
error "Units for atan must be empty"
sinh =
error "sinh not yet implemented in Units"
cosh =
error "cosh not yet implemented in Units"
tanh =
error "tanh not yet implemented in Units"
asinh =
error "asinh not yet implemented in Units"
acosh =
error "acosh not yet implemented in Units"
atanh =
error "atanh not yet implemented in Units"
units :: (Num z) => z -> String -> Units z
units a b =
Units a (Symbol b)
dropUnits :: (Num z) => Units z -> z
dropUnits (Units x _) =
x
deg2rad :: Floating a => a -> a
deg2rad x =
2 * pi * x / 360
rad2deg :: Floating a => a -> a
rad2deg x =
360 * x / (2 * pi)
instance (Show a, Num a, Eq a) => Show (Units a) where
show (Units xa ua) =
show xa ++ "_" ++ prettyShow (simplify ua)
| futtetennista/IntroductionToFunctionalProgramming | RWH/src/ch13/Num.hs | mit | 6,719 | 0 | 12 | 2,145 | 2,818 | 1,382 | 1,436 | 264 | 9 |
-- Copyright (C) 2013 Jorge Aparicio
main :: IO()
main
= print
$ squaredSumOfIntegers - sumOfSquaredIntegers
where integers = [1..100] :: [Int]
sumOfIntegers = sum integers
squaredSumOfIntegers = sumOfIntegers * sumOfIntegers
squaredIntegers = zipWith (*) integers integers
sumOfSquaredIntegers = sum squaredIntegers
| japaric/eulermark | problems/0/0/6/006.hs | mit | 366 | 0 | 7 | 90 | 82 | 45 | 37 | 9 | 1 |
{-# LANGUAGE RecursiveDo #-} -- needed for Earley
module AST (Type (..), Expression (..), BindingType (..), Statement (..), Block (..), Argument (..), Function (..), AST, Error (..), parse, RenderName (..)) where
import MyPrelude
import qualified Text.Earley as E
import qualified Pretty as P
import qualified Token as T
import Pretty (Render, render)
----------------------------------------------------------------------------- types
data Type metadata name
= NamedType
name
| FunctionType
[NodeWith Type metadata name]
(NodeWith Type metadata name)
deriving (Generic, Eq, Show, Functor, Foldable, Traversable)
data Expression metadata name
= Named
name
| Call
(NodeWith Expression metadata name)
[NodeWith Expression metadata name]
| NumberLiteral
Integer
| TextLiteral
Text
| UnaryOperator
UnaryOperator
(NodeWith Expression metadata name)
| BinaryOperator
(NodeWith Expression metadata name)
BinaryOperator
(NodeWith Expression metadata name)
deriving (Generic, Eq, Show, Functor, Foldable, Traversable)
data BindingType
= Let
| Var
deriving (Generic, Eq, Show)
data Statement metadata name
= Expression
(NodeWith Expression metadata name)
| Binding
BindingType
name
(NodeWith Expression metadata name)
| Assign
name
(NodeWith Expression metadata name)
| IfThen
(NodeWith Expression metadata name)
(NodeWith Block metadata name)
| IfThenElse
(NodeWith Expression metadata name)
(NodeWith Block metadata name)
(NodeWith Block metadata name)
| Forever
(NodeWith Block metadata name)
| While
(NodeWith Expression metadata name)
(NodeWith Block metadata name)
| Return
name -- return and break refer to the `exitTarget` in `Block`; these are "phantom names", not present in the source code
(Maybe (NodeWith Expression metadata name))
| Break
name -- see above
deriving (Generic, Eq, Show, Functor, Foldable, Traversable)
data Block metadata name = Block {
exitTarget :: Maybe name, -- "phantom", see above
statements :: [NodeWith Statement metadata name]
} deriving (Generic, Eq, Show, Functor, Foldable, Traversable)
data Argument metadata name = Argument {
argumentName :: name,
argumentType :: NodeWith Type metadata name
} deriving (Generic, Eq, Show, Functor, Foldable, Traversable)
data Function metadata name = Function {
functionName :: name,
arguments :: [NodeWith Argument metadata name],
returns :: Maybe (NodeWith Type metadata name),
body :: NodeWith Block metadata name
} deriving (Generic, Eq, Show, Functor, Foldable, Traversable)
----------------------------------------------------------------------------- parsing
type Expected = Text
type Prod r = Compose (E.Prod r Expected (With Loc T.Token)) (With Loc)
type Grammar r node = E.Grammar r (Prod r (node Loc Text))
token :: T.Token -> Prod r ()
token = unused . Compose . E.token . pure
keyword :: T.Keyword -> Prod r ()
keyword = token . T.Keyword
terminal :: (T.Token -> Maybe a) -> Prod r a
terminal f = Compose (E.terminal (\(With loc a) -> fmap (With loc) (f a)))
tokenConstructor :: forall name inner r. AsConstructor' name T.Token inner => Prod r inner
tokenConstructor = terminal (match @name)
bracketed :: T.BracketKind -> Prod r output -> Prod r output
bracketed kind inner = do
token (T.Bracket' (T.Bracket kind T.Open))
output <- inner
token (T.Bracket' (T.Bracket kind T.Close))
return output
separatedBy :: T.Token -> Prod r output -> Prod r [output]
separatedBy t element = oneOf [pure [], liftA2 prepend element (zeroOrMore (token t `followedBy` element))]
followedBy :: Prod r a -> Prod r b -> Prod r b
followedBy = (*>)
-- This may seem somewhat surprising -- why do we need to /duplicate/ the location info? Doesn't the Applicative instance handle this for us?
-- The explanation is that Applicative only handles combining the sublocations into the location of the final result --
-- but we don't just want the location of the whole tree, we also want the locations of all the sub-nodes!
-- So this takes a snapshot of the location for the subnode, and also lets `Applicative` go on combining it into the location of the parent node.
located :: Prod r (node Loc Text) -> Prod r (NodeWith node Loc Text)
located = Compose . fmap dupLocated . getCompose where
dupLocated node = With (getMetadata node) (NodeWith node)
nodeRule :: Prod r (node Loc Text) -> Grammar r node
nodeRule = fmap Compose . E.rule . getCompose
locatedNode :: Prod r (node Loc Text) -> Grammar r (NodeWith node)
locatedNode = nodeRule . located
-- from tightest to loosest; operators within a group have equal precedence
precedenceGroups :: [[BinaryOperator]]
precedenceGroups = assert (justIf isWellFormed listOfGroups) where
isWellFormed = all exactly1 (enumerate :: [BinaryOperator]) && not (any null listOfGroups)
exactly1 op = length (filter (== op) (concat listOfGroups)) == 1
listOfGroups =
[map ArithmeticOperator [Mul, Div, Mod],
map ArithmeticOperator [Add, Sub],
map ComparisonOperator [Less, LessEqual, Greater, GreaterEqual],
map ComparisonOperator [Equal, NotEqual],
map LogicalOperator [And],
map LogicalOperator [Or]]
data BinaryOperationList metadata name
= SingleExpression (NodeWith Expression metadata name)
| BinaryOperation (NodeWith Expression metadata name) BinaryOperator (BinaryOperationList metadata name)
deriving Show
resolvePrecedences :: BinaryOperationList Loc Text -> NodeWith Expression Loc Text
resolvePrecedences binaryOperationList = finalResult where
finalResult = case allPrecedencesResolved of
SingleExpression expr -> expr
list -> bug ("Unresolved binary operator precedence: " ++ prettyShow list)
allPrecedencesResolved = foldl' resolveOnePrecedenceLevel binaryOperationList precedenceGroups
resolveOnePrecedenceLevel binOpList precedenceGroup = case binOpList of
BinaryOperation expr1 op1 (BinaryOperation expr2 op2 rest)
| elem op1 precedenceGroup -> resolveOnePrecedenceLevel (BinaryOperation (locatedBinaryOperator expr1 op1 expr2) op2 rest) precedenceGroup
| otherwise -> BinaryOperation expr1 op1 (resolveOnePrecedenceLevel (BinaryOperation expr2 op2 rest) precedenceGroup)
BinaryOperation expr1 op (SingleExpression expr2)
| elem op precedenceGroup -> SingleExpression (locatedBinaryOperator expr1 op expr2)
other -> other
locatedBinaryOperator expr1 op expr2 = NodeWith (With combinedLoc (BinaryOperator expr1 op expr2)) where
combinedLoc = mconcat (map nodeMetadata [expr1, expr2])
expressionGrammar :: Grammar r (NodeWith Expression)
expressionGrammar = mdo
atom <- (locatedNode . oneOf)
[
liftA1 Named (tokenConstructor @"Name"),
liftA1 NumberLiteral (tokenConstructor @"Number"),
liftA1 TextLiteral (tokenConstructor @"Text"),
liftA1 nodeWithout (bracketed T.Round expression)
]
call <- (locatedNode . oneOf)
[
liftA2 Call call (bracketed T.Round (separatedBy T.Comma expression)),
liftA1 nodeWithout atom
]
unary <- (locatedNode . oneOf)
[
liftA2 UnaryOperator (tokenConstructor @"UnaryOperator") unary,
liftA1 nodeWithout call
]
binaries <- (nodeRule . oneOf)
[
liftA3 BinaryOperation unary (tokenConstructor @"BinaryOperator") binaries,
liftA1 SingleExpression unary
]
let expression = liftA1 resolvePrecedences binaries
return expression
blockGrammar :: Grammar r (NodeWith Block)
blockGrammar = mdo
expression <- expressionGrammar
-----------------------------------------------------------
binding <- locatedNode do
letvar <- terminal (\case T.Keyword T.K_let -> Just Let; T.Keyword T.K_var -> Just Var; _ -> Nothing) -- TODO prism?
name <- tokenConstructor @"Name"
token T.EqualsSign
rhs <- expression
token T.Semicolon
return (Binding letvar name rhs)
assign <- locatedNode do
lhs <- tokenConstructor @"Name"
token T.EqualsSign
rhs <- expression
token T.Semicolon
return (Assign lhs rhs)
ifthen <- locatedNode do
keyword T.K_if
cond <- expression
body <- block
return (IfThen cond body)
ifthenelse <- locatedNode do
keyword T.K_if
cond <- expression
body1 <- block
keyword T.K_else
body2 <- block
return (IfThenElse cond body1 body2)
forever <- locatedNode do
keyword T.K_forever
body <- block
return (Forever (mapNode (set (field @"exitTarget") (Just "break")) body))
while <- locatedNode do
keyword T.K_while
cond <- expression
body <- block
return (While cond (mapNode (set (field @"exitTarget") (Just "break")) body))
ret <- locatedNode do
keyword T.K_return
arg <- liftA1 head (zeroOrOne expression)
token T.Semicolon
return (Return "return" arg)
break <- locatedNode do
keyword T.K_break
token T.Semicolon
return (Break "break")
exprStatement <- locatedNode do
expr <- expression
token T.Semicolon
return (Expression expr)
-----------------------------------------------------
statement <- nodeRule (oneOf [binding, assign, ifthen, ifthenelse, forever, while, ret, break, exprStatement])
block <- locatedNode do
statements <- bracketed T.Curly (oneOrMore statement)
return Block { exitTarget = Nothing, statements }
return block
typeGrammar :: Grammar r (NodeWith Type)
typeGrammar = mdo
functionType <- nodeRule do
keyword T.K_function
parameters <- bracketed T.Round (separatedBy T.Comma type')
keyword T.K_returns
returns <- type'
return (FunctionType parameters returns)
type' <- locatedNode (oneOf [liftA1 NamedType (tokenConstructor @"Name"), functionType])
return type'
functionGrammar :: Grammar r (NodeWith Function)
functionGrammar = do
block <- blockGrammar
type' <- typeGrammar
argument <- locatedNode do
argumentName <- tokenConstructor @"Name"
token T.Colon
argumentType <- type'
return Argument { argumentName, argumentType }
locatedNode do
keyword T.K_function
functionName <- tokenConstructor @"Name"
arguments <- bracketed T.Round (separatedBy T.Comma argument)
returns <- liftA1 head (zeroOrOne (keyword T.K_returns `followedBy` type'))
body <- block
return Function { functionName, arguments, returns, body = mapNode (set (field @"exitTarget") (Just "return")) body }
type AST metadata name = [NodeWith Function metadata name]
data Error
= Invalid Int [Expected] [With Loc T.Token]
| Ambiguous [AST Loc Text]
deriving (Generic, Show)
parse :: [With Loc T.Token] -> Either Error (AST Loc Text)
parse = checkResult . E.fullParses parser where
parser = E.parser (liftM oneOrMore (fmap (fmap unWith . getCompose) functionGrammar))
checkResult = \case
([], E.Report a b c) ->
Left (Invalid a b c)
([one], _) ->
Right one
(more, _) ->
Left (Ambiguous more)
----------------------------------------------------------------------------- pretty-printing
renderBlock :: RenderName name => NodeWith Block metadata name -> P.Document
renderBlock block =
P.braces (P.nest 4 (P.hardline ++ render block) ++ P.hardline)
class RenderName name where
renderName :: P.DefinitionOrUse -> name -> P.Document
instance RenderName Text where
renderName defOrUse name =
P.note (P.Identifier (P.IdentInfo name defOrUse P.Unknown False)) (P.pretty name)
instance RenderName name => Render (Type metadata name) where
listSeparator = ", "
render = \case
NamedType name ->
renderName P.Use name
FunctionType parameters returns ->
P.keyword "function" ++ P.parens (render parameters) ++ " " ++ P.keyword "returns" ++ " " ++ render returns
instance RenderName name => Render (Expression metadata name) where
listSeparator = ", "
render = \case
Named name ->
renderName P.Use name
Call fn args ->
render fn ++ P.parens (render args)
NumberLiteral number->
P.number number
TextLiteral text ->
P.string text
UnaryOperator op expr->
P.unaryOperator op ++ render expr
BinaryOperator expr1 op expr2 ->
render expr1 ++ " " ++ P.binaryOperator op ++ " " ++ render expr2
instance Render BindingType where
render = P.keyword . \case
Let -> "let"
Var -> "var"
instance RenderName name => Render (Statement metadata name) where
render = \case
Binding btype name expr ->
render btype ++ " " ++ renderName P.Definition name ++ " " ++ P.defineEquals ++ " " ++ render expr ++ P.semicolon
Assign name expr ->
renderName P.Use name ++ " " ++ P.assignEquals ++ " " ++ render expr ++ P.semicolon
IfThen expr block ->
P.keyword "if" ++ " " ++ render expr ++ " " ++ renderBlock block
IfThenElse expr block1 block2 ->
render (IfThen expr block1) ++ " " ++ P.keyword "else" ++ " " ++ renderBlock block2
Forever block ->
P.keyword "forever" ++ " " ++ renderBlock block
While expr block ->
P.keyword "while" ++ " " ++ render expr ++ " " ++ renderBlock block
Return _ maybeExpr ->
P.keyword "return" ++ (maybe "" (\expr -> " " ++ render expr) maybeExpr) ++ P.semicolon
Break _ ->
P.keyword "break" ++ P.semicolon
Expression expr ->
render expr ++ P.semicolon
instance RenderName name => Render (Block metadata name) where
render Block { statements } = render statements
instance RenderName name => Render (Argument metadata name) where
listSeparator = ", "
render Argument { argumentName, argumentType } =
renderName P.Definition argumentName ++ P.colon ++ " " ++ render argumentType
instance RenderName name => Render (Function metadata name) where
render Function { functionName, arguments, returns, body } =
renderedHead ++ renderedArguments ++ renderedReturns ++ renderedBody where
renderedHead = P.keyword "function" ++ " " ++ renderName P.Definition functionName
renderedArguments = P.parens (render arguments)
renderedReturns = maybe "" (\returnType -> " " ++ P.keyword "returns" ++ " " ++ render returnType) returns
renderedBody = P.hardline ++ renderBlock body
| glaebhoerl/stageless | src/AST.hs | mit | 15,229 | 0 | 20 | 3,916 | 4,583 | 2,278 | 2,305 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
module DBTypes where
import Data.Aeson
import Data.Aeson.TH
import Data.Text
import Data.Time.Clock
import GHC.Generics
import Control.Lens
import Types
import Price
import Models
import Database.Persist
import Database.Persist.Sql
import Data.Serialize
import GHC.TypeLits
type Tenant = DBTenant
type TenantOutput = DBTenant
type TenantId = Key DBTenant
type UserId = Key DBUser
type ProductId = Key DBProduct
type UserActivationId = Key DBUserActivation
type TenantActivationId = Key DBTenantActivation
data Product = Product { getProduct :: Entity DBProduct
, getVariants :: [Entity DBVariant]}
-- Dummy instance
instance ToJSON Product where
toJSON = undefined
data ProductInput =
ProductI { piName :: Text
, piDescription :: Text
, piCurrency :: Text
, piType :: ProductType
, piVariants :: [VariantInput]
, piProperties :: AppJSON
, piCostPrice :: Maybe Price
, piComparisonPrice :: Maybe Price
, piAdvertisedPrice :: Maybe Price
, piURLSlug :: Maybe Text
}
data VariantInput =
VariantI { viName :: Text
, viSKU :: Text
, viWeightInGrams :: Maybe Double
, viWeightDisplayUnit :: Maybe Text
, viPrice :: Price
}
instance Serialize UserId where
get = DBUserKey . SqlBackendKey <$> Data.Serialize.get
put x = put (unSqlBackendKey $ unDBUserKey x)
instance Serialize UTCTime where
get = read <$> Data.Serialize.get
put x = put (show x)
data Session = Session { sessionUserID :: UserId
, startTime :: UTCTime
} deriving (Show, Generic, Serialize, FromJSON , ToJSON)
data DBError = TenantNotFound TenantId
| UserNotFound UserId
| ProductNotFound ProductId
| RoleNotFound (Either RoleId Text)
| ViolatesTenantUniqueness (Unique Tenant)
| UserAlreadyActive UserId
deriving (Eq, Show)
data UserCreationError = UserExists Text
| TenantDoesn'tExist Text
deriving (Eq, Show)
data ActivationError = ActivationError
data TenantInput =
TenantI { _name :: Text
, _backofficeDomain :: Text
} deriving (Generic)
instance FromJSON TenantInput where
parseJSON = genericParseJSON (defaultOptions { fieldLabelModifier = Prelude.drop 1})
instance ToJSON TenantInput where
toEncoding = genericToEncoding (defaultOptions { fieldLabelModifier = Prelude.drop 1})
toJSON = genericToJSON (defaultOptions { fieldLabelModifier = Prelude.drop 1})
instance HasName TenantInput where
name = lens _name (\ti n -> ti { _name = n } )
instance HasBackofficeDomain TenantInput where
backofficeDomain = lens _backofficeDomain (\ti bd -> ti { _backofficeDomain = bd } )
data UserType = Input
| Regular
type family Omittable (state :: UserType) (s :: Symbol) a where
Omittable Input "password" a = a
Omittable Input _ a = ()
Omittable Regular "password" a = ()
Omittable Regular _ a = a
class HasTenantID s where
tenantID :: Lens' s TenantId
class HasUserID s where
userID :: Lens' s UserId
instance HasTenantID DBUser where
tenantID = dBUserTenantID
data UserBase (userType :: UserType)=
UserB { _userFirstName :: Text
, _userLastName :: Text
, _userEmail :: Text
, _userPhone :: Text
, _userUsername :: Text
, _userTenantID :: TenantId
, _userPassword :: Omittable userType "password" Text
, _userStatus :: Omittable userType "status" UserStatus
, _userRole :: Omittable userType "role" Role
, _userUserID :: Omittable userType "userID" UserId
} deriving (Generic)
makeLenses ''UserBase
instance HasHumanName (UserBase a) where
firstName = userFirstName
lastName = userLastName
instance HasContactDetails (UserBase a) where
email = userEmail
phone = userPhone
instance HasUsername (UserBase a) where
username = userUsername
instance HasPassword (UserBase Input) where
password = userPassword
instance HasTenantID (UserBase a) where
tenantID = userTenantID
instance HasUserID (UserBase Regular) where
userID = userUserID
deriving instance (Show (Omittable a "password" Text),
Show (Omittable a "status" UserStatus),
Show (Omittable a "role" Role),
Show (Omittable a "userID" UserId))
=> Show (UserBase a)
type UserInput = UserBase Input
type User = UserBase Regular
| wz1000/haskell-webapps | ServantPersistent/src/DBTypes.hs | mit | 4,951 | 0 | 11 | 1,354 | 1,185 | 666 | 519 | -1 | -1 |
{-|
Module : TTN.Model.Translation
Description : Model code for Translation.
Author : Sam van Herwaarden <samvherwaarden@protonmail.com>
-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module TTN.Model.Translation where
import TTN.Model.Article
import TTN.Model.Language
import TTN.Model.User
import Data.Text ( Text )
import Data.Time.Clock ( UTCTime )
import Database.Persist
import Database.Persist.Postgresql
import Database.Persist.TH
import qualified Data.Text as T
-- * Translation
share [mkPersist sqlSettings, mkMigrate "migrateTranslation"] [persistLowerCase|
Translation sql=translations
artId ArticleId sql=article_id
contrId UserId sql=contributor_id
lang Language sql=trans_lang
title Text sql=title
summary Text Maybe sql=summary
body ArticleBody default='[]' sql=body_new
created UTCTime default=CURRENT_TIMESTAMP sql=created
bodyOld String sql=body
Id sql=id
deriving Read Show
|]
entTranslation :: Entity Translation -> Translation
entTranslation (Entity _ t) = t
getArticlesTranslatedToLang :: Language -> SqlPersistM [Entity Article]
getArticlesTranslatedToLang lang = rawSql query [toPersistValue lang]
where query = T.unwords [ "SELECT DISTINCT ?? FROM translations"
, "INNER JOIN articles"
, "ON translations.article_id = articles.id"
, "WHERE translations.trans_lang = ?" ]
getTranslationsInLangs :: [Language] -> SqlPersistM [Entity Translation]
getTranslationsInLangs langs = rawSql query $ map toPersistValue langs
where holes = T.intercalate "," . take (length langs) $ repeat "?"
query = T.unwords [ "SELECT ??"
, "FROM ( SELECT DISTINCT ON (a.id)"
, "a.created AS a_created, t.* "
, "FROM translations AS t"
, "INNER JOIN articles AS a"
, "ON t.article_id = a.id"
, "WHERE t.trans_lang IN ("
, holes
, ")"
, "ORDER BY a.id, t.created DESC) translations"
, "ORDER BY translations.a_created DESC" ]
| samvher/translatethenews | app/TTN/Model/Translation.hs | mit | 2,816 | 0 | 11 | 1,056 | 309 | 177 | 132 | 40 | 1 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Y2018.M04.D04.Solution where
{--
So you have the data from the last two day's exercises, let's start storing
those data into a PostgreSQL database. Today's exercise is to store just
the authors.
But there's a catch: you have to consider you're doing this as a daily upload.
So: are there authors already stored? If so we don't store them, if not, we
DO store them and get back the unique ID associated with those authors (for
eventual storage in an article_author join table.
First, fetch all the authors already stored (with, of course their unique ids)
--}
import Control.Monad.State
import Data.Aeson
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Tuple (swap)
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromRow
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
-- we will use previous techniques for memoizing tables:
-- below imports available via 1HaskellADay git repository
import Data.LookupTable
import Data.MemoizingTable (MemoizingS, MemoizingTable(MT))
import qualified Data.MemoizingTable as MT
import Store.SQL.Util.Indexed
import Store.SQL.Util.LookupTable
import Y2018.M04.D02.Solution hiding (idx)
import Y2018.M04.D03.Solution
import Y2018.M04.D05.Solution -- looking forward in time, m'kay
-- 1. fetch the authors into a LookupTable then convert that into a memoizing
-- table state
{--
>>> json <- readJSON arts
>>> (Success arties) = (fromJSON json) :: Result [Article]
>>> ci <- connectInfo "WPJ"
>>> conn <- connect ci
--}
authorTableName :: String
authorTableName = "author"
lookupAuthors :: Connection -> IO LookupTable
lookupAuthors = flip lookupTable authorTableName
{--
>>> auths <- lookupAuthors conn
fromList []
--}
type MemoizedAuthors m = MemoizingS m Integer Author ()
lk2MS :: Monad m => LookupTable -> MemoizedAuthors m
lk2MS table = put (MT.start (map swap $ Map.toList table), Map.empty)
-- 2. from yesterday's exercise, triage the authors into the memoizing table
addNewAuthors :: Monad m => [Author] -> MemoizedAuthors m
addNewAuthors = MT.triageM 5 -- because '5' is a nice number
-- 3. store the new memoizing table values into the author table
authorStmt :: Query
authorStmt = [sql|INSERT INTO author (author) VALUES (?) returning id|]
data AuthorVal = AV Author
instance ToRow AuthorVal where
toRow (AV x) = [toField x]
insertAuthors :: Connection -> MemoizedAuthors IO
insertAuthors conn = get >>= \(mt@(MT _ _ news), _) ->
let authors = Set.toList news
auths = map AV authors in
if null auths then return () else
lift ((returning conn authorStmt auths) :: IO [Index]) >>= \idxen ->
put (MT.update (zip (map idx idxen) authors) mt, Map.empty)
{--
>>> execStateT (addNewAuthors (Map.elems $ authors arties) >>
insertAuthors conn) (MT.start (map swap $ Map.toList auths), Map.empty)
(MT {fromTable = fromList [(1,"Ahmed H. Adam"),(2,"Jonathan Cristol")],
readIndex = fromList [("Ahmed H. Adam",1),("Jonathan Cristol",2)],
newValues = fromList []},fromList [])
>>> close conn
$ select * from author;
id author
--------------
1 Ahmed H. Adam
2 Jonathan Cristol
--}
-- and there we go for today! Have at it!
| geophf/1HaskellADay | exercises/HAD/Y2018/M04/D04/Solution.hs | mit | 3,329 | 0 | 18 | 566 | 513 | 299 | 214 | 41 | 2 |
module Rewriting.Abstract.ToDoc where
import Rewriting.Abstract.Data
import Autolib.ToDoc
instance ToDoc Prop where
toDoc p = case p of
Or ps -> hsep $ punctuate (text " ||") $ map toDoc ps
And ps -> hsep $ punctuate (text " &&")$ map toDoc ps
Not p -> text "not" <+> toDoc p
PropParens p -> parens $ toDoc p
Prop1 p1 x -> text (show p1) <+> parens ( toDoc x )
Prop2 p2 x y -> text (show p2) <+> toDoc (x,y)
instance ToDoc Exp where
-- this is a bit risky (it prints no parens on its own,
-- so all parens must be present via ExpParens)
toDoc x = case x of
ExpParens x -> parens $ toDoc x
Ref i -> toDoc i
Op1 Complement x -> text "complement" <+> toDoc x
Op1 o x -> toDoc x <+> case o of
Inverse -> text "^-"
Transitive_Closure -> text "^+"
Transitive_Reflexive_Closure -> text "^*"
Op2 o x y -> hsep
[ toDoc x
, text $ case o of
Product -> "."
Intersection -> "&"
Union -> "+"
Difference -> "-"
, toDoc y ]
| marcellussiegburg/autotool | collection/src/Rewriting/Abstract/ToDoc.hs | gpl-2.0 | 1,164 | 0 | 14 | 442 | 386 | 183 | 203 | 28 | 0 |
module Scope where
import Utils
import Stack as Stack
import qualified Data.Map as HashTable
type HashTable = HashTable.Map String Int
type Scope = (Stack (HashTable, Int, Bool), Int)
empty :: Scope
empty = (Stack.empty, 0)
enscope :: Scope -> Bool -> Scope
enscope (st, i) recursive = (Stack.push (HashTable.empty, 0, recursive) st, i)
descope :: Scope -> Scope
descope (st, i) = (Stack.pop st, i - (snd3 (Stack.top st)))
insert :: String -> Scope -> Scope
insert var (st, i) = (Stack.changeTop ((HashTable.insert var i hs), hi + 1, vl) st, i + 1)
where (hs, hi, vl) = Stack.top st
lookup :: String -> Scope -> (Maybe Int)
lookup var (st, _)
| Stack.isEmpty st = Nothing
| otherwise = let hs = fst3 (Stack.top st)
in let ans = HashTable.lookup var hs
in if ans == Nothing && (last3 (Stack.top st)) then Scope.lookup var (Stack.pop st, 0)
else ans
size :: Scope -> Int
size (_, vl) = vl
topSize :: Scope -> Int
topSize (st, _) = snd3 (Stack.top st)
{- main = do
let sc = Main.insert "b" (Main.insert "a" (Main.enscope Main.empty))
putStrLn (show (Main.lookup "a" sc))
putStrLn (show (Main.lookup "b" sc))
putStrLn (show (Main.lookup "c" sc))
putStrLn ""
let sc1 = Main.insert "c" (Main.enscope sc)
putStrLn (show (Main.lookup "a" sc1))
putStrLn (show (Main.lookup "b" sc1))
putStrLn (show (Main.lookup "c" sc1))
putStrLn ""
let sc2 = Main.descope sc1
putStrLn (show (Main.lookup "a" sc2))
putStrLn (show (Main.lookup "b" sc2))
putStrLn (show (Main.lookup "c" sc2)) -}
| gangsterveggies/julia-pinheiro-compiler | Scope.hs | gpl-2.0 | 1,573 | 0 | 16 | 353 | 474 | 259 | 215 | 26 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
module InnerEar.Exercises.Intervals1 (intervals1Exercise) where
import Reflex
import Reflex.Dom
import Sound.MusicW
import Data.Map
import Text.JSON
import Text.JSON.Generic
import InnerEar.Exercises.MultipleChoice
import InnerEar.Types.ExerciseId
import InnerEar.Types.Exercise
import InnerEar.Types.Score
import InnerEar.Types.MultipleChoiceStore
import InnerEar.Types.Data hiding (Time)
import InnerEar.Types.Utility
import InnerEar.Widgets.SpecEval
import InnerEar.Widgets.Config
import InnerEar.Widgets.AnswerButton
type Config = ()
data Answer = P1 | M2 | M3 | P4 | P5
deriving (Eq,Ord,Data,Typeable)
answerToSemitones :: Answer -> Double
answerToSemitones P1 = 0.0
answerToSemitones M2 = 2.0
answerToSemitones M3 = 4.0
answerToSemitones P4 = 5.0
answerToSemitones P5 = 7.0
instance Show Answer where
show P1 = "Perfect Unison"
show M2 = "Major 2nd"
show M3 = "Major 3rd"
show P4 = "Perfect 4th"
show P5 = "Perfect 5th"
instance Buttonable Answer where
makeButton = showAnswerButton
answers = [P1,M2,M3,P4,P5]
baseTone :: Frequency
baseTone = Midi 60
-- *** note: random pitches requires renderAnswer to return IO Sound instead of Sound
-- ^ then the pitch generation belongs in the Config where the config widget can perform the IO
renderAnswer :: Map String AudioBuffer -> Config -> (SourceNodeSpec, Maybe Time) -> Maybe Answer -> Synth ()
renderAnswer _ _ _ Nothing = buildSynth_ $ silent >> destination
renderAnswer _ _ _ (Just interval) = buildSynth_ $ do
osc <- oscillator Triangle baseTone
let amp = Db $ fromIntegral (-20)
g <- rectEnv (Millis 100) (Sec 1) amp
let firstDur = (Millis $ 2 * 100) + (Sec 1)
let rest = Sec 0.5
-- Change the frequency of the oscillator after the first playback.
setParamValue "frequency" (inHz $ Midi $ answerToSemitones interval + inMidi baseTone) firstDur osc
-- Reset for second note and have another rectEnv at firstDur.
setParamValue "gain" 0 (firstDur + rest) g
linearRampToParamValue "gain" (inAmp amp) (firstDur + rest + Millis 100) g
setParamValue "gain" (inAmp amp) (firstDur + rest + Millis 100 + Sec 1) g
linearRampToParamValue "gain" 0 (firstDur + rest + Millis (2 * 100) + Sec 1) g
destination
setDeletionTime (firstDur*2+rest +(Sec 0.5))
instructions :: MonadWidget t m => m ()
instructions = el "div" $ do
elClass "div" "instructionsText" $ text "Instructions placeholder"
displayEval :: MonadWidget t m => Dynamic t (Map Answer Score) -> Dynamic t (MultipleChoiceStore Config Answer) -> m ()
displayEval e _ = displayMultipleChoiceEvaluationGraph ("scoreBarWrapperFiveBars","svgBarContainerFiveBars","svgFaintedLineFiveBars", "xLabelFiveBars") "Session performance" "Hz" answers e
generateQ :: Config -> [ExerciseDatum] -> IO ([Answer],Answer)
generateQ _ _ = randomMultipleChoiceQuestion answers
intervals1Exercise :: MonadWidget t m => Exercise t m Config [Answer] Answer (Map Answer Score) (MultipleChoiceStore Config Answer)
intervals1Exercise = multipleChoiceExercise
3
answers
instructions
(\_ _-> return (constDyn (), constDyn (Just (Silent, Nothing)), never, never))
renderAnswer
Intervals1
()
(\_ _ -> return ())
generateQ
(const (0,2))
| d0kt0r0/InnerEar | src/InnerEar/Exercises/Intervals1.hs | gpl-3.0 | 3,230 | 0 | 14 | 524 | 1,005 | 523 | 482 | 72 | 1 |
module Graph where
import Data.List (find)
import Control.Monad (liftM)
import Relation
import Set
newtype Id = Id Int
deriving (Enum, Show, Eq, Ord)
data State a = State Id a
deriving (Show)
type StartingStateId = Id
type AcceptingStateId = Id
data Graph a b = Graph [State a] (Relation Id Id b) StartingStateId [AcceptingStateId] [Id]
instance (Show a, Show b) => Show (Graph a b) where
show (Graph states transitions startingId acceptingIds (availableId:_)) =
"States:\n" ++ show states ++
"\nTransitions:\n" ++ show transitions ++
"\nStarting id:\n" ++ show startingId ++
"\nAccepting ids:\n" ++ show acceptingIds ++
"\nFirst available id:\n" ++ show availableId ++
"\n"
new :: a -> (Graph a b, Id)
new x = (Graph [State (Id 1) x] Relation.empty (Id 1) [] [(Id 2) ..], Id 1)
getStateData :: State a -> a
getStateData (State _ x) = x
getStateId :: State a -> Id
getStateId (State id _) = id
getStates :: Graph a b -> [State a]
getStates (Graph states _ _ _ _) = states
getTransitions :: Graph a b -> Relation Id Id b
getTransitions (Graph _ transitions _ _ _) = transitions
getStartingState :: Graph a b -> StartingStateId
getStartingState (Graph _ _ startingId _ _) = startingId
getAcceptingStates :: Graph a b -> [AcceptingStateId]
getAcceptingStates (Graph _ _ _ acceptingIds _) = acceptingIds
getId :: (Eq a) => a -> Graph a b -> Maybe Id
getId x graph = (find ((x ==) . getStateData) . getStates) graph >>= return . getStateId
insertState :: a -> Bool -> Graph a b -> (Graph a b, Id)
insertState x accepting (Graph states transitions startingId acceptingIds (id:ids)) =
(Graph (State id x : states) transitions startingId acceptingIds' ids, id)
where
acceptingIds' = if accepting then id : acceptingIds else acceptingIds
insertTransition :: b -> (Id, Id) -> Graph a b -> Graph a b
insertTransition x (src, dst) (Graph states transitions startingId acceptingIds ids) =
Graph states (Relation.insert src dst x transitions) startingId acceptingIds ids
isAccepting :: Id -> Graph a b -> Bool
isAccepting id (Graph _ _ _ acceptingIds _) = Prelude.elem id acceptingIds
transitionsFrom :: Id -> Graph a b -> Set (RelPair Id b)
transitionsFrom id graph = Relation.lookupFst id (getTransitions graph)
transitionsTo :: Id -> Graph a b -> Set (RelPair Id b)
transitionsTo id graph = Relation.lookupSnd id (getTransitions graph)
mapStates :: (a -> c) -> Graph a b -> Graph c b
mapStates f (Graph states transitions startingId acceptingIds ids) =
Graph (Prelude.map (\(State id x) -> State id (f x)) states) transitions startingId acceptingIds ids
modifyState :: Id -> (a -> Maybe a) -> Graph a b -> (Bool, Graph a b)
modifyState id f (Graph states transitions startingId acceptingIds ids) =
(modified, Graph states' transitions startingId acceptingIds ids)
where
modifyOccurence [] = Nothing
modifyOccurence (state@(State id' x):states)
| id == id' =
case f x of
Nothing -> Nothing
Just x' -> Just $ (State id' x') : states
| otherwise = liftM (state :) (modifyOccurence states)
(modified, states') =
case modifyOccurence states of
Nothing -> (False, states)
Just states' -> (True, states')
dataFromId :: Id -> Graph a b -> Maybe a
dataFromId id graph = liftM getStateData $ find ((id ==) . getStateId) (getStates graph) | mkm/sokoban | src/Graph.hs | gpl-3.0 | 3,457 | 0 | 16 | 782 | 1,375 | 704 | 671 | 68 | 4 |
{-# LANGUAGE TemplateHaskell #-}
module Lamdu.Calc.Term.Utils
( Composite(..), tags, rest
, case_
, recExtend
) where
import qualified Control.Lens as Lens
import Hyper.Syntax.Row (RowExtend(..))
import Lamdu.Calc.Term (Val)
import qualified Lamdu.Calc.Term as V
import qualified Lamdu.Calc.Type as T
import Lamdu.Prelude
data Composite a = Composite
{ _tags :: Map T.Tag a
, _rest :: Maybe a
} deriving (Functor, Foldable, Traversable)
Lens.makeLenses ''Composite
case_ :: RowExtend T.Tag V.Term V.Term # Annotated pl -> Composite (Val pl)
case_ (RowExtend tag handler r) =
caseVal r
& tags . Lens.at tag ?~ handler
where
caseVal v@(Ann _ body) =
case body of
V.BLeaf V.LAbsurd -> Composite mempty Nothing
V.BCase x -> case_ x
_ -> Composite mempty (Just v)
recExtend :: RowExtend T.Tag V.Term V.Term # Annotated pl -> Composite (Val pl)
recExtend (RowExtend tag field r) =
recExtendVal r
& tags . Lens.at tag ?~ field
where
recExtendVal v@(Ann _ body) =
case body of
V.BLeaf V.LRecEmpty -> Composite mempty Nothing
V.BRecExtend x -> recExtend x
_ -> Composite mempty (Just v)
| lamdu/lamdu | src/Lamdu/Calc/Term/Utils.hs | gpl-3.0 | 1,278 | 0 | 12 | 370 | 444 | 233 | 211 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
import Pipes.Csv (HasHeader(..), decode, decodeByName)
import Pipes.ByteString (stdin, ByteString)
import Data.Csv ((.:), FromNamedRecord(..), Record)
import Pipes
import Control.Applicative
import System.Environment
(getArgs)
data Person = Person String Int
deriving (Show)
instance FromNamedRecord Person where
parseNamedRecord p =
Person <$> p .: "name"
<*> p .: "age"
persons :: Monad m
=> Producer ByteString m ()
-> Producer (Either String Person) m ()
persons = decodeByName
collect :: Monad m => Producer ByteString m () -> Producer (Either String [String]) m ()
collect = decode NoHeader
usage :: String
usage = "usage: ./stdinToStdout <index>"
main = do
argv <- getArgs
case argv of
("1":[]) -> runEffect $ for (persons stdin) (lift . print)
("2":[]) -> runEffect $ for (collect stdin) (lift . print)
{- ("1":[]) -> runEffect $ for (persons stdin) (lift . \_ -> return ())
("2":[]) -> runEffect $ for (collect stdin) (lift . \_ -> return ())
-}
_ -> putStrLn usage
| adarqui/ToyBox | haskell/gonzalez/pipes/Pipes.CSV/src/stdinToStdout.hs | gpl-3.0 | 1,077 | 0 | 13 | 224 | 338 | 181 | 157 | 28 | 3 |
module Model where
import Yesod
import Data.Text (Text)
import Database.Persist.Quasi
import Data.Typeable (Typeable)
import Data.Time (UTCTime)
import Prelude
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist sqlSettings, mkMigrate "migrateAll"]
$(persistFileWith lowerCaseSettings "config/models")
| DasAsozialeNetzwerk/r0z | Model.hs | gpl-3.0 | 469 | 0 | 8 | 62 | 79 | 46 | 33 | -1 | -1 |
{-|
Module : Lipid.Parsers.UnknownSn.GlycerolipidSpec
Description :
Copyright : Michael Thomas
License : GPL-3
Maintainer : Michael Thomas <Michaelt293@gmail.com>
Stability : Experimental
-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeApplications #-}
module Lipid.Parsers.UnknownSn.GlycerolipidSpec where
import Lipid.Blocks
import Test.Hspec
import Lipid.UnknownSn.Glycerolipid
import Lipid.Parsers.UnknownSn.Glycerolipid
spec :: Spec
spec = do
describe "Test for quasiquoters and Shorthand instances" $ do
it "QuasiQuoter for TG 16:0_18:1_22:6" $
shorthand @ (TG (Maybe DeltaPosition)) [tgMaybeDelta|TG 16:0_18:1_22:6|] `shouldBe` "TG 16:0_18:1_22:6"
it "QuasiQuoter for DG 16:0_22:6" $
shorthand @ (DG (Maybe DeltaPosition)) [dgMaybeDelta|DG 16:0_22:6|] `shouldBe` "DG 16:0_22:6"
it "QuasiQuoter for TG 16:0_18:1(9Z)_22:6" $
shorthand [tgMaybeDelta|TG 16:0_18:1(9Z)_22:6|] `shouldBe` "TG 16:0_18:1(9Z)_22:6"
it "QuasiQuoter for TG 16:0_18:1(9Z)_22:6(4,7,10,?,16,19)" $
shorthand [tgMaybeDelta|TG 16:0_18:1(9Z)_22:6(4,7,10,?,16,19)|] `shouldBe` "TG 16:0_18:1(9Z)_22:6(4,7,10,?,16,19)"
it "QuasiQuoter for TG 16:0_18:1(9Z)_22:6(4Z,7Z,10Z,13Z,16Z,19Z)" $
shorthand [tgDelta|TG 16:0_18:1(9Z)_22:6(4Z,7Z,10Z,13Z,16Z,19Z)|] `shouldBe` "TG 16:0_18:1(9Z)_22:6(4Z,7Z,10Z,13Z,16Z,19Z)"
it "QuasiQuoter for DG 14:0_15:0" $
shorthand @ (DG DeltaPosition) [dgDelta|DG 14:0_15:0|] `shouldBe` "DG 14:0_15:0"
it "QuasiQuoter for MG 22:6(4Z,7Z,10Z,13Z,16Z,19Z)" $
shorthand [mgMaybeDelta|MG 22:6(4Z,7Z,10Z,13Z,16Z,19Z)|] `shouldBe` "MG 22:6(4Z,7Z,10Z,13Z,16Z,19Z)"
describe "Test for quasiquoters and NNomenclature instances" $ do
it "QuasiQuoter for TG 16:0_18:1_22:6" $
nNomenclature @ (TG (Maybe OmegaPosition)) [tgMaybeOmega|TG 16:0_18:1_22:6|] `shouldBe` "TG 16:0_18:1_22:6"
it "QuasiQuoter for DG 16:0_22:6" $
nNomenclature @ (DG (Maybe OmegaPosition)) [dgMaybeOmega|DG 16:0_22:6|] `shouldBe` "DG 16:0_22:6"
it "QuasiQuoter for TG 16:0_18:1(n-9)_22:6" $
nNomenclature [tgMaybeOmega|TG 16:0_18:1(n-9)_22:6|] `shouldBe` "TG 16:0_18:1(n-9)_22:6"
it "QuasiQuoter for TG 16:0_18:1(n-9)_18:2(n-9,?)" $
nNomenclature [tgMaybeOmega|TG 16:0_18:1(n-9)_18:2(n-9,?)|] `shouldBe` "TG 16:0_18:1(n-9)_18:2(n-9,?)"
it "QuasiQuoter for TG 16:0_18:1(n-9)_18:2(n-9,n-6)" $
nNomenclature [tgOmega|TG 16:0_18:1(n-9)_18:2(n-9,n-6)|] `shouldBe` "TG 16:0_18:1(n-9)_18:2(n-6)"
it "QuasiQuoter for DG 14:0_15:0" $
nNomenclature @ (DG OmegaPosition) [dgOmega|DG 14:0_15:0|] `shouldBe` "DG 14:0_15:0"
it "QuasiQuoter for MG 22:6(n-3)" $
nNomenclature [mgMaybeOmega|MG 22:6(n-3)|] `shouldBe` "MG 22:6(n-3)"
it "QuasiQuoter for MG 22:6(n-3)" $
nNomenclature [mgOmega|MG 22:6(n-3)|] `shouldBe` "MG 22:6(n-3)"
| Michaelt293/Lipid-Haskell | test/Lipid/Parsers/UnknownSn/GlycerolipidSpec.hs | gpl-3.0 | 2,844 | 0 | 17 | 438 | 493 | 275 | 218 | 41 | 1 |
module Graphics.Forensics.Analyser.LocalCFA where
import GHC.Float
import Graphics.Forensics.Algorithms.Convolve
import Graphics.Forensics.Algorithms.Fragmentize
import Graphics.Forensics.Analyser
import Graphics.Forensics.Color
import Graphics.Forensics.Image
import Graphics.Forensics.Report
import Data.Array.Repa (Source(..), Z(..), DIM2, (:.)(..), Array(..),
U, D, computeP, computeUnboxedP)
import qualified Data.Array.Repa as Repa
import Data.Complex
import qualified Data.Vector.Algorithms.Intro as V
import qualified Data.Vector.Unboxed as V
import Data.Vector.Unboxed (Vector)
import Numeric.FFT.Vector.Invertible as V
import qualified Numeric.FFT.Vector.Plan as V
analyser :: Analyser ByteImage
analyser =
Analyser
{ analyse = localCFAAnalyse
, name = "localcfa"
, author = "Moritz Roth"
, version = readVersion "1.0"
}
highpass :: Stencil DIM2 Float
highpass = [stencil2| 0 1 0
1 -4 1
0 1 0 |]
localCFAAnalyse :: ByteImage -> Analysis ()
localCFAAnalyse img = task "Local CFA analysis" 4 $ do
{- Extract the green channel and convolve with a highpass filter -}
hpf <- highpassFilter . byteToFloatImage $ img
step
{- Split the image into overlapping fragments, and map the local -}
{- CFA detection function over them. This is sequential because of -}
{- limitations in the vector-fftw package, but still runs ~6-10 times -}
{- faster than evaluating dftS from repa-algorithms in parallel. -}
let filtered = Repa.computeUnboxedS $
fragmentMap localAnalysis (Z :. 32 :. 32) hpf
step
{- Return the resulting grayscale image (as RGBA) -}
rgbaResult <- computeUnboxedP $ Repa.map fromGrayscaleFloat filtered
step
reportInfo "Output: local CFA peak size mapped image."
$ reportImage (floatToByteImage rgbaResult)
-- | Extracts the green channel of the image and convolves it with a
-- highpass filter
highpassFilter :: (Monad m) => FloatImage -> m (Array U DIM2 Float)
highpassFilter !i = do
greenChannel <- computeUnboxedP $ Repa.map ((* 255) . channelGreen) i
computeP $ convolveS Clamp highpass greenChannel
-- | Returns the normalised local CFA peak size for an array fragment
localAnalysis :: Array D DIM2 Float -> Float
localAnalysis !a =
getPeakValue ix . normalise ix $ magnitudes
where
!ix = len `div` 2
!len = V.length diags
!p = plan dft len
magnitudes = dftMagnitude p diags
diags = getDiagonalVariances a
-- | Computes the DFT of a vector and returns the magnitudes of the result
{-# INLINE dftMagnitude #-}
dftMagnitude :: V.Plan (Complex Double) (Complex Double) ->
Vector Float -> Vector Float
dftMagnitude !p a =
V.map (double2Float . magnitude) dftc
where
dftc = execute p list
list = V.map ((:+ 0) . float2Double) a
-- | Returns the variances of all diagonals in the given array as a vector
{-# INLINE getDiagonalVariances #-}
getDiagonalVariances :: (Source r1 Float) =>
Array r1 DIM2 Float -> Vector Float
getDiagonalVariances !arr =
getDiagonals $ w + h - 1
where
(Z :. w :. h) = extent arr
{- Get the variance of all diagonals in the array -}
{-# INLINE getDiagonals #-}
getDiagonals :: Int -> Vector Float
getDiagonals s = V.generate s $ variance . getDiagonalAt
{- Get a single diagonal at position n in x direction -}
{-# INLINE getDiagonalAt #-}
getDiagonalAt :: Int -> Vector Float
getDiagonalAt n =
V.generate d getFromArray
where
x0 = min n (w - 1)
y0 = max 0 (n - w + 1)
xN = max 0 (w - h + y0)
d = 1 + (x0 - xN)
{-# INLINE getFromArray #-}
getFromArray :: Int -> Float
getFromArray a = arr `unsafeIndex` (Z :. x0 - a :. y0 + a)
{-As described in the paper, this computes the mean instead of real variance-}
{-# INLINE variance #-}
variance :: Vector Float -> Float
variance !a =
s / l
where
(s, l) = V.foldl' (\(su, le) n -> (su + n, le + 1)) (0, 0) a
{- Removes DC value and normalises by the median -}
{-# INLINE normalise #-}
normalise :: Int -> Vector Float -> Vector Float
normalise ix !list =
V.map (/ median) (V.unsafeTail list)
where
median = sorted `V.unsafeIndex` ix
!sorted = V.modify (\v -> V.select v ix) list
{- Gets the peak value from the DFT spectrum and scales it -}
{-# INLINE getPeakValue #-}
getPeakValue :: Int -> Vector Float -> Float
getPeakValue mid !list =
peak / maxVal
where
peak = V.maximum $ V.unsafeSlice (mid - 1) 3 list
maxVal = V.maximum list | Purview/purview | src/Graphics/Forensics/Analyser/LocalCFA.hs | gpl-3.0 | 4,588 | 0 | 15 | 1,064 | 1,153 | 629 | 524 | -1 | -1 |
module PulseLambda.Parser
( tokenStreamToAst
, OpPrec(..)
, PrecDirection(..)
, standardOps
) where
import PulseLambda.Location
import PulseLambda.Error
import PulseLambda.TokenStream
import PulseLambda.Ast
import Data.Int
import Data.Word
import Data.Maybe
data OpPrec = OpPrec
{ getOperator :: String
, getFunctionName :: String
, getPrecDirection :: PrecDirection
, getPrecedence :: Word32
}
deriving (Eq, Show)
data PrecDirection = LeftPrec | RightPrec deriving (Eq, Show)
data IncompleteOp = IncompleteOp
{ getIncompleteOpLocation :: Location
, getIncompleteOpPrec :: OpPrec
} deriving (Show)
data OpPosition = PrefixOp | PostfixOp | InfixOp | CallOp {getCallOp :: [Node]} deriving (Show)
instance Eq OpPosition where
PrefixOp == PrefixOp = True
PostfixOp == PostfixOp = True
InfixOp == InfixOp = True
CallOp _ == CallOp _ = True
_ == _ = False
data Operator = Operator
{ getOperatorLocation :: Location
, getOperatorPrec :: OpPrec
, getOperatorPosition :: OpPosition
} deriving (Show)
standardOps :: [OpPrec]
standardOps =
[ OpPrec ">" "isGreaterThanStrict" LeftPrec 10000
, OpPrec "<" "isLessThanStrict" LeftPrec 10000
, OpPrec ">=" "isGreaterOrEqualToStrict" LeftPrec 10000
, OpPrec "<=" "isLessOrEqualToStrict" LeftPrec 10000
, OpPrec "==" "isEqualToStrict" LeftPrec 10000
, OpPrec "!=" "isDifferentOfStrict" LeftPrec 10000
, OpPrec ">~" "isGreaterThan" LeftPrec 10000
, OpPrec "<~" "isLessThan" LeftPrec 10000
, OpPrec ">~=" "isGreaterOrEqualTo" LeftPrec 10000
, OpPrec "<~=" "isLessOrEqualTo" LeftPrec 10000
, OpPrec "~=" "isEqualTo" LeftPrec 10000
, OpPrec "!~=" "isDifferentOf" LeftPrec 10000
, OpPrec "++" "concat" LeftPrec 15000
, OpPrec ":" "buildList" LeftPrec 16000
, OpPrec "+" "sum" LeftPrec 20000
, OpPrec "-" "subtract" LeftPrec 20000
, OpPrec "*" "multiply" LeftPrec 40000
, OpPrec "/" "divide" LeftPrec 40000
, OpPrec "/:" "remain" LeftPrec 40000
, OpPrec "/-" "modulus" LeftPrec 40000
, OpPrec "^" "power" LeftPrec 80000
, OpPrec "!" "factorialOrLogicalNot" LeftPrec 160000
, OpPrec "()" "#call" LeftPrec 500000
, OpPrec "&&" "#and" LeftPrec 50000
, OpPrec "||" "#or" LeftPrec 2500
]
searchOpPrec :: [OpPrec] -> String -> Maybe OpPrec
searchOpPrec [] _ = Nothing
searchOpPrec (x:xs) op = if getOperator x == op
then Just x
else searchOpPrec xs op
setOpPrec :: [OpPrec] -> OpPrec -> [OpPrec]
setOpPrec [] prec = [prec]
setOpPrec (precIn:precs) prec = if getOperator precIn == getOperator prec
then prec:precs
else precIn:setOpPrec precs prec
getOpPrec :: [OpPrec] -> Token -> OpPrec
getOpPrec [] (Token loc (OperatorToken op)) =
throwParsingError loc $ "Unknwon operator `" ++ op ++ "`."
getOpPrec (x:xs) (Token loc (OperatorToken op)) = if getOperator x == op
then x
else getOpPrec xs $! Token loc $! OperatorToken op
tokenStreamToAst :: TokenStream -> [OpPrec] -> TokenValue -> (Node, TokenStream, [OpPrec])
tokenStreamToAst stream precs endToken
| currTokVal == endToken = (Node currTokLoc $! SetOfExp [], goNext stream, precs)
| getTokenValue endedToken == endToken = (Node currTokLoc $! SetOfExp (if isNothing maybeNode
then []
else [node]),
newStream, newPrecs)
| isNothing maybeNode = next
| otherwise = (Node nextLoc $! SetOfExp $! node:exps, nextStream, nextPrec)
where
(Token currTokLoc currTokVal) = getCurrToken stream
(maybeNode, endedToken, newStream, newPrecs) = parseLine stream precs [NewlineToken, endToken]
(Just node) = maybeNode
next = tokenStreamToAst newStream newPrecs endToken
(Node nextLoc (SetOfExp exps), nextStream, nextPrec) = next
parseSetOfExps :: TokenStream -> [OpPrec] -> (Node, TokenStream, [OpPrec])
parseSetOfExps stream precs = tokenStreamToAst (goNext stream) precs closeCrlBracket
parseLine :: TokenStream -> [OpPrec] -> [TokenValue] -> (Maybe Node, Token, TokenStream, [OpPrec])
parseLine stream precs endToks
| currTokVal == operatorKw =
let (lastTok, newStream, newPrecs) = parseOperatorStatement stream precs endToks
in (Nothing, lastTok, newStream, newPrecs)
| currTokVal `elem` endToks = (Nothing, currTok, goNext stream, precs)
| otherwise = parseExpression stream precs endToks
where
currTok = getCurrToken stream
currTokVal = getTokenValue currTok
parseIf :: TokenStream -> [OpPrec] -> [TokenValue] -> (Node, Token, TokenStream, [OpPrec])
parseIf stream precs endToks
| isNothing maybeCond =
throwParsingError condTokLoc
$! "Expecting expression, but found "
++ show condTokVal ++ "."
| isNothing maybeThen =
throwParsingError endThenTokLoc
$! "Expecting expression, but found "
++ show endThenTokVal ++ "."
| isNothing maybeElse =
throwParsingError endElseTokLoc
$! "Expecting expression, but found "
++ show endElseTokVal ++ "."
| otherwise = (node, elseTok, elseStream, elsePrecs)
where
(Token currTokLoc _) = getCurrToken stream
skippedStream = goNext stream
(maybeCond, Token condTokLoc condTokVal, condStream, condPrecs) =
parseExpression skippedStream precs [thenKw]
(Just cond) = maybeCond
(maybeThen, Token endThenTokLoc endThenTokVal, thenStream, thenPrecs) =
parseExpression condStream condPrecs [elseKw]
(Just thenStmt) = maybeThen
(maybeElse, elseTok, elseStream, elsePrecs) =
parseExpression thenStream thenPrecs endToks
(Token endElseTokLoc endElseTokVal) = elseTok
(Just elseStmt) = maybeElse
node = Node currTokLoc (Condition cond thenStmt elseStmt)
parseOperatorStatement :: TokenStream -> [OpPrec] -> [TokenValue] -> (Token, TokenStream, [OpPrec])
parseOperatorStatement stream precs endToks
| (not . isOperatorToken) opTokVal = throwParsingError opTokLoc
$! "Expecting operator token, but found " ++ show opTokVal ++ "."
| (not . isIdentifierToken) funcTokVal = throwParsingError funcTokLoc
$! "Expecting identifier, found " ++ show funcTokVal ++ "."
| termTokVal `notElem` endToks = throwParsingError funcTokLoc
$! "Expecting " ++ showListOfTokens endToks ++ ", found " ++ show funcTokVal ++ "."
| otherwise =
( terminationTok
, goNext terminationStream
, setOpPrec precs $! OpPrec
(stringFromTokenValue opTokVal)
(stringFromTokenValue funcTokVal)
precDirection
precVal
)
where
opStream = goNext stream
(Token opTokLoc opTokVal) = getCurrToken opStream
funcStream = skipExpected (goNext opStream) [asKw]
(Token funcTokLoc funcTokVal) = getCurrToken funcStream
precDirectionStream = goNext funcStream
(Token precDirectionTokLoc precDirectionTokVal) = getCurrToken precDirectionStream
precValStream = goNext precDirectionStream
(Token precValTokLoc precValTokVal) = getCurrToken precValStream
terminationStream = goNext precValStream
terminationTok = getCurrToken terminationStream
(Token _ termTokVal) = terminationTok
precVal :: Word32
precVal
| isIntToken precValTokVal = (fromIntegral . intFromTokenValue) precValTokVal
| isLargeIntToken precValTokVal = (fromIntegral . integerFromTokenValue) precValTokVal
| isNaturalToken precValTokVal = (fromIntegral . wordFromTokenValue) precValTokVal
| otherwise = throwParsingError precValTokLoc
$! "Expecting an integral literal, found " ++ show precValTokVal
precDirection
| precDirectionTokVal == leftKw = LeftPrec
| precDirectionTokVal == rightKw = RightPrec
| otherwise = throwParsingError precDirectionTokLoc
$! "Expecting " ++ show leftKw
++ " or " ++ show rightKw ++ ", found "
++ show precDirectionTokVal
-- sy stands for shunting yard
parseExpression :: TokenStream -> [OpPrec] -> [TokenValue] -> (Maybe Node, Token, TokenStream, [OpPrec])
parseExpression stream precs endToks = parseShuntingYard stream precs [] [] [] False
where
addOperandToStack optrStack opndStack [] opnd = (optrStack, opnd:opndStack)
addOperandToStack optrStack [] [IncompleteOp loc prec] opnd =
(Operator loc prec PrefixOp:optrStack, [opnd])
addOperandToStack optrStack [] (IncompleteOp loc prec:xs) opnd =
(Operator loc prec PrefixOp:newOptrStack, newOpndStack)
where
(newOptrStack, newOpndStack) = addOperandToStack optrStack [] xs opnd
addOperandToStack optrStack (inStack:opnds) [IncompleteOp loc prec] opnd =
let (newOptrStack, newOpndStack) = addOperatorToStack optrStack (inStack:opnds) $! Operator loc prec InfixOp
in (newOptrStack, opnd:newOpndStack)
addOperandToStack optrStack (inStack:opnds) (x:y:incmps) opnd
| biggerPrec =
let (newOptrStack, newOpndStack) = remaining
in (Operator xLoc xPrec PrefixOp:newOptrStack, newOpndStack)
| null incmps =
let (Just node) = finalize optrStack (inStack:opnds) [y]
in addOperandToStack [] [node] [x] opnd
| otherwise =
let (newOptrStack, newOpndStack) = remaining
in addOperandToStack newOptrStack newOpndStack [x] opnd
where
biggerPrec = xPrecVal > yPrecVal || xPrecVal == yPrecVal && xDir == LeftPrec && xDir == yDir
remaining = addOperandToStack optrStack (inStack:opnds) (y:incmps) opnd
(IncompleteOp xLoc xPrec) = x
(OpPrec _ _ xDir xPrecVal) = xPrec
(IncompleteOp _ yPrec) = y
(OpPrec _ _ yDir yPrecVal) = yPrec
addIncompleteOpToStack optrStack opndStack incmpStack optr = (optrStack, opndStack, optr:incmpStack)
addOperatorToStack [] opnd optr = ([optr], opnd)
addOperatorToStack (inStack:optrs) opndStack optr
| biggerPrec = (optr:inStack:optrs, opndStack)
| inType == PostfixOp = case opndStack of
(opnd:opnds) -> addOperatorToStack optrs (Node inLoc (FunctionCall
(Node inLoc $! VarMention inFname)
[opnd, Node loc NullLiteral]):opnds) optr
| inType == PrefixOp = case opndStack of
(opnd:opnds) -> addOperatorToStack optrs (Node inLoc (FunctionCall
(Node inLoc $! VarMention inFname)
[Node loc NullLiteral, opnd]):opnds) optr
| inType == InfixOp = case opndStack of
(x:y:opnds) -> addOperatorToStack optrs (Node inLoc (FunctionCall
(Node inLoc $! VarMention inFname)
[y, x]):opnds) optr
| otherwise = case opndStack of
(opnd:opnds) -> addOperatorToStack optrs (Node inLoc (FunctionCall opnd args):opnds) optr
where
biggerPrec = inPrecVal < precVal || inPrecVal == precVal && inDir == RightPrec && dir == inDir
(Operator loc prec _) = optr
(Operator inLoc inPrec inType) = inStack
(OpPrec _ inFname inDir inPrecVal) = inPrec
(OpPrec _ _ dir precVal) = prec
(CallOp args) = inType
finalize [] [] [] = Nothing
finalize [] [node] [] = Just node
finalize (Operator loc (OpPrec op _ _ _) _:_) [] _ = throwParsingError loc
$! "Unexpected " ++ show (OperatorToken op) ++ "."
finalize _ [] (IncompleteOp loc (OpPrec op _ _ _):_) = throwParsingError loc
$! "Unexpected " ++ show (OperatorToken op) ++ "."
finalize (Operator loc (OpPrec _ fname _ _) PostfixOp:ops) (x:xs) [] = finalize ops
(Node loc (FunctionCall (Node loc $! VarMention fname) [x, Node loc NullLiteral]):xs) []
finalize (Operator loc (OpPrec _ fname _ _) PrefixOp:ops) (x:xs) [] = finalize ops
(Node loc (FunctionCall (Node loc $! VarMention fname) [Node loc NullLiteral, x]):xs) []
finalize (Operator loc (OpPrec _ fname _ _) InfixOp:ops) (x:y:xs) [] = finalize ops
(Node loc (FunctionCall (Node loc $! VarMention fname) [y, x]):xs) []
finalize (Operator loc _ (CallOp args):ops) (x:xs) [] = finalize ops
(Node loc (FunctionCall x args):xs) []
finalize optrStack opndStack (IncompleteOp loc opPrec:ops) =
finalize newOptrStack (node:newOpndStack) ops
where
(optr:newOptrStack, opnd:newOpndStack) =
addOperatorToStack optrStack opndStack $! Operator loc opPrec PostfixOp
(Just node) = finalize [optr] [opnd] []
-- finalize optrStack opndStack incmpStack = error $! "optr:" ++ show optrStack ++ "\nopnd: " ++ show opndStack ++ "\nincmpStack:" ++ show incmpStack
parseShuntingYard syStream syPrecs optrStack opndStack incmpStack isPreviousValue
| currTokVal `elem` endToks = (finalize optrStack opndStack incmpStack, currTok, goNext skippedStream, syPrecs)
| isPreviousValue = if currTokVal == openParen
then
let (args, newStream, newPrecs) = parseCallArgs syStream syPrecs
(newOptrStack, newOpndStack) = addOperatorToStack optrStack opndStack
$! Operator currTokLoc
(getOpPrec newPrecs $! Token currTokLoc $! OperatorToken "()")
$! CallOp args
in parseShuntingYard newStream newPrecs newOptrStack newOpndStack incmpStack True
else nextWithOp
| isJust readMaybeValue =
let (Just (node, newStream, newPrecs)) = readMaybeValue
(newOptrStack, newOpndStack) = addOperandToStack optrStack opndStack incmpStack node
in parseShuntingYard newStream newPrecs newOptrStack newOpndStack [] True
| isJust readMaybeEndingValue =
let (Just (node, lastTok, newStream, newPrecs)) = readMaybeEndingValue
(newOptrStack, newOpndStack) = addOperandToStack optrStack opndStack incmpStack node
in (finalize newOptrStack newOpndStack [], lastTok, newStream, newPrecs)
| otherwise = nextWithOp
where
skippedStream = skipln syStream
currTok = getCurrToken skippedStream
(Token currTokLoc currTokVal) = currTok
nextWithOp
| currTokVal /= assignOp =
let (incompleteOp, newStream, newPrecs) = if isOperatorToken currTokVal
then (IncompleteOp currTokLoc $! getOpPrec syPrecs currTok, advance syStream, syPrecs)
else throwParsingError currTokLoc $! "Unexpected " ++ show currTokVal ++ "."
(newOptrStack, newOpndStack, newIncmpStack) =
addIncompleteOpToStack optrStack opndStack incmpStack incompleteOp
in parseShuntingYard newStream newPrecs newOptrStack newOpndStack newIncmpStack False
| (not . null) incmpStack || null opndStack = unexpectedError
| otherwise = case (getExpression . head) opndStack of
(VarMention var) -> case parseExpression (advance syStream) syPrecs endToks of
(Just assigned, lastTok, newStream, newPrecs) ->
let assignment = Node currTokLoc (Assign var assigned)
in (finalize optrStack (assignment:tail opndStack) [], lastTok, newStream, newPrecs)
(Nothing, Token lastTokLoc lastTokVal, _, _) -> throwParsingError lastTokLoc
$! "Unexpected " ++ show lastTokVal ++ "."
_ -> unexpectedError
where
unexpectedError =
throwParsingError currTokLoc $! "Unexpected " ++ show assignOp ++ "."
readMaybeEndingValue
| currTokVal == ifKw =
let (ifExp, lastTok, newStream, newPrecs) = parseIf syStream syPrecs endToks
in Just (ifExp, lastTok, skipln newStream, newPrecs)
| currTokVal == lambdaKw =
let (funcExp, lastTok, newStream, newPrecs) = parseFunction syStream syPrecs endToks
in Just (funcExp, lastTok, skipln newStream, newPrecs)
| currTokVal == constKw =
let (constExp, lastTok, newStream, newPrecs) = parseConst syStream syPrecs
in Just (constExp, lastTok, skipln newStream, newPrecs)
| otherwise = Nothing
readMaybeValue
| isIdentifierToken currTokVal = nodeWith parseVar
| isStringToken currTokVal = nodeWith parseString
| isCharToken currTokVal = nodeWith parseChar
| isNaturalToken currTokVal = nodeWith parseNatural
| isIntToken currTokVal = nodeWith parseInt
| isLargeIntToken currTokVal = nodeWith parseLargeInt
| isRealToken currTokVal = nodeWith parseReal
| currTokVal == openParen =
let (maybeNode, _, newStream, newPrecs) =
parseExpression (goNext syStream) syPrecs [closeParen]
node = fromMaybe (Node currTokLoc NullLiteral) maybeNode
in Just (node, skipln newStream, newPrecs)
| currTokVal == openSqrBracket =
let (list, newStream, newPrecs) = parseList syStream syPrecs
in Just (list, skipln newStream, newPrecs)
| currTokVal == openCrlBracket =
let (setOfExp, newStream, newPrecs) = parseSetOfExps syStream syPrecs
in Just (setOfExp, skipln newStream, newPrecs)
| otherwise = Nothing
where
nodeWith func = Just (func currTok, advance syStream, syPrecs)
(advance, skipln) = if NewlineToken `elem` endToks
then (goNext, id)
else (skipWhileIsNewline . goNext, skipWhileIsNewline)
parseConst constStream constPrecs
| (not . isIdentifierToken) identfTokVal =
throwParsingError identfTokLoc
$! "Expecting identifier, but found "
++ show identfTokVal ++ "."
| isNothing maybeExp =
throwParsingError lastTokLoc
$! "Expecting expression, but found " ++ show lastTokVal
| otherwise = (Node location $! ConstantDeclare constName expr, lastTok, newStream, newPrecs)
where
location = (getTokenLocation . getCurrToken) constStream
skippedStream = advance constStream
(Token identfTokLoc identfTokVal) = getCurrToken skippedStream
constName = stringFromTokenValue identfTokVal
expStream = skipln $! skipExpected (advance skippedStream) [assignOp]
(maybeExp, lastTok, newStream, newPrecs) = parseExpression expStream constPrecs endToks
(Token lastTokLoc lastTokVal) = lastTok
(Just expr) = maybeExp
parseVar :: Token -> Node
parseVar (Token loc (IdentifierToken identf)) = Node loc (VarMention identf)
parseString :: Token -> Node
parseString (Token loc (StringToken str)) = Node loc (StringLiteral str)
parseChar :: Token -> Node
parseChar (Token loc (CharToken char)) = Node loc (CharLiteral char)
parseNatural :: Token -> Node
parseNatural (Token loc (NaturalToken nat)) = Node loc (NaturalLiteral nat)
parseInt :: Token -> Node
parseInt (Token loc (IntToken int)) = Node loc (IntLiteral int)
parseLargeInt :: Token -> Node
parseLargeInt (Token loc (LargeIntToken int)) = Node loc (LargeIntLiteral int)
parseReal :: Token -> Node
parseReal (Token loc (RealToken int)) = Node loc (RealLiteral int)
parseFunction :: TokenStream -> [OpPrec] -> [TokenValue] -> (Node, Token, TokenStream, [OpPrec])
parseFunction stream precs endToks = if isNothing maybeBody
then throwParsingError lastTokLoc
$! "Expecting expression, but found "
++ show lastTokVal ++ "."
else (Node fLoc $! FunctionLiteral args body, lastTok, newerStream, newerPrecs)
where
(Token fLoc _) = getCurrToken stream
(args, newStream, newPrecs) = parseArgsDeclare (goNext stream) precs
(maybeBody, lastTok, newerStream, newerPrecs) = parseExpression newStream newPrecs endToks
(Token lastTokLoc lastTokVal) = lastTok
(Just body) = maybeBody
parseArgsDeclare :: TokenStream -> [OpPrec] -> ([Node], TokenStream, [OpPrec])
parseArgsDeclare stream precs = if currTokVal == closeParen
then ([], goNext skippedStream, precs)
else parseArg skippedStream precs
where
skippedStream = skipExpected stream [openParen]
(Token tokLoc currTokVal) = getCurrToken skippedStream
parseArg argStream argPrecs
| (not . isIdentifierToken) argTokVal =
throwParsingError argTokLoc
$! "Expecting identifier, but found "
++ show argTokVal ++ "."
| skpdArgTokVal == openSqrBracket =
let streamAfterExpected = skipExpected (goNext stream) [closeSqrBracket]
(Token tokLocAfterExpected tokValAfterExpected) = getCurrToken streamAfterExpected
in if tokValAfterExpected == closeParen
then
( [Node argTokLoc $! VariadicArgDeclare identifier]
, goNext streamAfterExpected
, argPrecs
)
else
throwParsingError tokLocAfterExpected
$! "Expecting " ++ show closeParen
++ ", but found " ++ show tokValAfterExpected
++ " (Variadic Argument must be the last declared argument)."
| lastTokVal == closeParen =
( [Node argTokLoc $! ArgDeclare identifier defaultValue]
, streamAfterDefault
, precsAfterDefault
)
| lastTokVal == commaSeparator =
let (args, finalStream, finalPrecs) = parseArg streamAfterDefault precsAfterDefault
in
( Node argTokLoc (ArgDeclare identifier defaultValue):args
, finalStream
, finalPrecs
)
| otherwise =
throwParsingError lastTokLoc
$! "Expecting " ++ show closeParen ++ ", "
++ show commaSeparator ++ ", "
++ show assignOp ++ " or "
++ show openSqrBracket ++ ", found "
++ show lastTokVal ++ "."
where
(Token argTokLoc argTokVal) = getCurrToken argStream
identifier = stringFromTokenValue argTokVal
skippedArgStream = goNext argStream
skpdArgTok = getCurrToken skippedArgStream
(Token _ skpdArgTokVal) = skpdArgTok
(defaultValue, Token lastTokLoc lastTokVal, streamAfterDefault, precsAfterDefault)
| skpdArgTokVal /= assignOp =
(Node argTokLoc NullLiteral, skpdArgTok, goNext skippedArgStream, argPrecs)
| isJust maybeNode = (node, lastReadTok, newStream, newPrecs)
| otherwise =
throwParsingError lastReadTokLoc
$! "Expected expression, but found "
++ show lastReadTokVal ++ "."
where
(maybeNode, lastReadTok, newStream, newPrecs) =
parseExpression (goNext skippedArgStream) argPrecs [commaSeparator, closeParen]
(Token lastReadTokLoc lastReadTokVal) = lastReadTok
(Just node) = maybeNode
parseList :: TokenStream -> [OpPrec] -> (Node, TokenStream, [OpPrec])
parseList stream precs = (Node location $! ListLiteral nodes, newStream, newPrecs) where
location = (getTokenLocation . getCurrToken) stream
(nodes, newStream, newPrecs) = parseSequence stream precs closeSqrBracket
parseCallArgs :: TokenStream -> [OpPrec] -> ([Node], TokenStream, [OpPrec])
parseCallArgs stream precs = parseSequence stream precs closeParen
parseSequence :: TokenStream -> [OpPrec] -> TokenValue -> ([Node], TokenStream, [OpPrec])
parseSequence stream precs endTok = if currTokVal == endTok
then ([], goNext skippedStream, precs)
else readMembers skippedStream precs
where
skippedStream = goNext stream
currTokVal = (getTokenValue . getCurrToken) skippedStream
readMembers memberStream memberPrecs
| isNothing maybeNode =
throwParsingError (getTokenLocation lastTok)
$! "Expecting an expression, but found "
++ show lastTok ++ "."
| lastTokVal == commaSeparator =
let (nodes, newerStream, newerPrecs) = readMembers newStream newPrecs
in (node:nodes, newerStream, newerPrecs)
| otherwise = ([node], newStream, newPrecs)
where
(maybeNode, lastTok, newStream, newPrecs) = parseExpression
memberStream
memberPrecs
[ commaSeparator
, endTok
]
(Just node) = maybeNode
lastTokVal = getTokenValue lastTok
skipExpected :: TokenStream -> [TokenValue] -> TokenStream
skipExpected stream [] = stream
skipExpected stream (tok:toks) = if currTokVal == tok
then skipExpected (goNext stream) toks
else throwParsingError currTokLoc
$! "Expecting "
++ show tok
++ ", but found "
++ show currTokVal
++ "."
where
(Token currTokLoc currTokVal) = getCurrToken stream
| brunoczim/PulseLambda | PulseLambda/Parser.hs | gpl-3.0 | 26,661 | 0 | 22 | 8,246 | 7,191 | 3,659 | 3,532 | 454 | 19 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.UnSampledReports.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a single unsampled report.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.unsampledReports.get@.
module Network.Google.Resource.Analytics.Management.UnSampledReports.Get
(
-- * REST Resource
ManagementUnSampledReportsGetResource
-- * Creating a Request
, managementUnSampledReportsGet
, ManagementUnSampledReportsGet
-- * Request Lenses
, musrgWebPropertyId
, musrgProFileId
, musrgAccountId
, musrgUnSampledReportId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.unsampledReports.get@ method which the
-- 'ManagementUnSampledReportsGet' request conforms to.
type ManagementUnSampledReportsGetResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"profiles" :>
Capture "profileId" Text :>
"unsampledReports" :>
Capture "unsampledReportId" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] UnSampledReport
-- | Returns a single unsampled report.
--
-- /See:/ 'managementUnSampledReportsGet' smart constructor.
data ManagementUnSampledReportsGet = ManagementUnSampledReportsGet'
{ _musrgWebPropertyId :: !Text
, _musrgProFileId :: !Text
, _musrgAccountId :: !Text
, _musrgUnSampledReportId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ManagementUnSampledReportsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'musrgWebPropertyId'
--
-- * 'musrgProFileId'
--
-- * 'musrgAccountId'
--
-- * 'musrgUnSampledReportId'
managementUnSampledReportsGet
:: Text -- ^ 'musrgWebPropertyId'
-> Text -- ^ 'musrgProFileId'
-> Text -- ^ 'musrgAccountId'
-> Text -- ^ 'musrgUnSampledReportId'
-> ManagementUnSampledReportsGet
managementUnSampledReportsGet pMusrgWebPropertyId_ pMusrgProFileId_ pMusrgAccountId_ pMusrgUnSampledReportId_ =
ManagementUnSampledReportsGet'
{ _musrgWebPropertyId = pMusrgWebPropertyId_
, _musrgProFileId = pMusrgProFileId_
, _musrgAccountId = pMusrgAccountId_
, _musrgUnSampledReportId = pMusrgUnSampledReportId_
}
-- | Web property ID to retrieve unsampled reports for.
musrgWebPropertyId :: Lens' ManagementUnSampledReportsGet Text
musrgWebPropertyId
= lens _musrgWebPropertyId
(\ s a -> s{_musrgWebPropertyId = a})
-- | View (Profile) ID to retrieve unsampled report for.
musrgProFileId :: Lens' ManagementUnSampledReportsGet Text
musrgProFileId
= lens _musrgProFileId
(\ s a -> s{_musrgProFileId = a})
-- | Account ID to retrieve unsampled report for.
musrgAccountId :: Lens' ManagementUnSampledReportsGet Text
musrgAccountId
= lens _musrgAccountId
(\ s a -> s{_musrgAccountId = a})
-- | ID of the unsampled report to retrieve.
musrgUnSampledReportId :: Lens' ManagementUnSampledReportsGet Text
musrgUnSampledReportId
= lens _musrgUnSampledReportId
(\ s a -> s{_musrgUnSampledReportId = a})
instance GoogleRequest ManagementUnSampledReportsGet
where
type Rs ManagementUnSampledReportsGet =
UnSampledReport
type Scopes ManagementUnSampledReportsGet =
'["https://www.googleapis.com/auth/analytics",
"https://www.googleapis.com/auth/analytics.edit",
"https://www.googleapis.com/auth/analytics.readonly"]
requestClient ManagementUnSampledReportsGet'{..}
= go _musrgAccountId _musrgWebPropertyId
_musrgProFileId
_musrgUnSampledReportId
(Just AltJSON)
analyticsService
where go
= buildClient
(Proxy ::
Proxy ManagementUnSampledReportsGetResource)
mempty
| rueshyna/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/UnSampledReports/Get.hs | mpl-2.0 | 4,980 | 0 | 19 | 1,184 | 550 | 326 | 224 | 97 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
module Crypto.OPVault.Types
( module Crypto.OPVault.Types
, module Common
) where
import Crypto.OPVault.Types.Base64 as Common
import Crypto.OPVault.Types.Common as Common
import Crypto.OPVault.Types.Encryption as Common
import Crypto.OPVault.Types.FileTypes as Common
import Crypto.OPVault.Types.ItemIndex as Common
import Crypto.OPVault.Types.Opdata01 as Common
import Crypto.OPVault.Types.ResultT as Common
import Control.Concurrent.Async (Concurrently(..))
newtype Vault = VaultPath String deriving (Show, Eq)
instance IsString Vault where
fromString = VaultPath
newtype ConcurrentlyT (m :: * -> *) a = ConcurrentlyT (Concurrently a)
deriving (Functor, Applicative, Monad)
runConcurrentlyT :: MonadIO m => ConcurrentlyT m a -> m a
runConcurrentlyT (ConcurrentlyT c) = io $ runConcurrently c
io :: MonadIO m => IO a -> m a
io = liftIO
io' :: MonadIO m => IO a -> ConcurrentlyT m a
io' = ConcurrentlyT . Concurrently
| bitemyapp/opvault | src/Crypto/OPVault/Types.hs | mpl-2.0 | 1,098 | 0 | 7 | 176 | 280 | 169 | 111 | 26 | 1 |
-- | Path robots are:
-- platform robots
-- patrol robots
module Sorts.Robots.PathRobots where
import Base
import qualified Sorts.Robots.PathRobots.PatrolRobot as PatrolRobot
import qualified Sorts.Robots.PathRobots.Platform as Platform
-- * loading
sorts :: [IO (Maybe Sort_)]
sorts =
(Just <$> Platform.sort) :
(Just <$> PatrolRobot.sort) :
[]
| nikki-and-the-robots/nikki | src/Sorts/Robots/PathRobots.hs | lgpl-3.0 | 383 | 0 | 9 | 81 | 84 | 54 | 30 | 9 | 1 |
-- HVamp - A Vamp host for Haskell
--
-- Copyright (C) 2014-2016 Richard Lewis, Goldsmiths' College
-- Author: Richard Lewis <richard.lewis@gold.ac.uk>
-- This file is part of HVamp
-- HVamp is free software: you can redistribute it and/or modify it
-- under the terms of the GNU Lesser General Public License as
-- published by the Free Software Foundation, either version 3 of the
-- License, or (at your option) any later version.
-- HVamp is distributed in the hope that it will be useful, but
-- WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-- You should have received a copy of the GNU General Public License
-- along with HVamp. If not, see <http://www.gnu.org/licenses/>.
module HVamp ( listLibraries
, listPlugins
, listPluginsOfLib
, loadMaybePlugin
, instantiateMaybePlugin
, initialiseMaybePluginP
, initialiseMaybePlugin
, withMaybePluginHandle
, withMaybePluginHandle_ ) where
import Control.Applicative
import Control.Exception (bracket)
import Data.Maybe (maybe)
import Data.Traversable (traverse)
import Foreign
import Foreign.C.String
import Foreign.C.Types (CFloat(..))
import Foreign.Storable (peek)
import Host
import Vamp
type Library = String
type Index = Integer
type PluginName = String
type PluginID = (Library, Index, PluginName)
listLibraries :: IO [Library]
listLibraries = do
count <- c_getLibraryCount
traverse (\n -> c_getLibraryName n >>= peekCString) [0..(count - 1)]
listPluginsOfLib :: Ptr VHLibrary -> Library -> IO [PluginID]
listPluginsOfLib libPtr libName = do
let
pluginId n = do
pdPtr <- c_getPluginDescriptor libPtr (fromInteger n)
pd <- peek pdPtr
return $ (libName, n, pldName pd)
count <- c_getPluginCount libPtr
traverse pluginId [0..((toInteger count) - 1)]
listPlugins :: IO [[PluginID]]
listPlugins = do
let
withLib n f =
bracket
(c_loadLibrary n)
c_unloadLibrary
(\l -> f n l)
findPluginIDs n l = do
libName <- (c_getLibraryName n) >>= peekCString
listPluginsOfLib l libName
count <- c_getLibraryCount
traverse (\n -> withLib n findPluginIDs) [0..(count - 1)]
loadMaybePluginDescPtr :: PluginID -> IO (Maybe HVPluginDescriptorPtr)
loadMaybePluginDescPtr (libName, pluginIdx, _) = do
libNameC <- newCString libName
libIdx <- c_getLibraryIndex libNameC
if libIdx < 0 then return Nothing else do
libPtr <- c_loadLibrary libIdx
if libPtr == nullPtr then return Nothing else do
plgPtr <- c_getPluginDescriptor libPtr (fromIntegral pluginIdx)
return $ if plgPtr /= nullPtr then Just plgPtr else Nothing
loadMaybePlugin :: PluginID -> IO (Maybe HVPluginDescriptor)
loadMaybePlugin plgId = loadMaybePluginDescPtr plgId >>= peekDescriptor
where
peekDescriptor (Just ptr) = do { d <- peek ptr; return (Just d) }
peekDescriptor Nothing = return Nothing
instantiateMaybePlugin :: PluginID -> Float -> IO (Maybe HVPluginHandle)
instantiateMaybePlugin plgId sampleRate = loadMaybePluginDescPtr plgId >>= peekDescriptor
where
peekDescriptor (Just ptr) = instantiateMaybePluginFromDesc ptr sampleRate
peekDescriptor Nothing = return Nothing
instantiateMaybePluginFromDesc :: HVPluginDescriptorPtr -> Float -> IO (Maybe HVPluginHandle)
instantiateMaybePluginFromDesc plgDescPtr sampleRate = do
desc <- peek plgDescPtr
hndl <- pluginInstantiate plgDescPtr desc (CFloat sampleRate)
if hndl /= nullPtr then return (Just hndl) else return Nothing
setMaybePluginHandleParams :: HVPluginDescriptor -> [(Int, Float)] -> Maybe HVPluginHandle -> IO (Maybe HVPluginHandle)
setMaybePluginHandleParams _ _ Nothing = return Nothing
setMaybePluginHandleParams desc params (Just hndl) = do
mapM_ (\(i, v) -> pluginSetParameter desc hndl (fromIntegral i) (realToFrac v)) params
return $ Just hndl
initialiseMaybePluginP :: HVPluginDescriptor -> Maybe HVPluginHandle -> Int -> Int -> Int -> IO Bool
initialiseMaybePluginP _ Nothing _ _ _ = return False
initialiseMaybePluginP desc (Just hndl) inputChannels stepSize blockSize = do
minCh <- fmap fromIntegral (pluginGetMinChannelCount desc hndl)
maxCh <- fmap fromIntegral (pluginGetMaxChannelCount desc hndl)
if inputChannels < minCh || inputChannels > maxCh
then fail $ "inputChannels out of range ([" ++ (show minCh) ++ ".." ++ (show maxCh) ++ "])"
else do
res <- pluginInitialise desc hndl (fromIntegral inputChannels) (fromIntegral stepSize) (fromIntegral blockSize)
return $ if res == 0 then False else True
initialiseMaybePlugin :: HVPluginDescriptor -> Int -> Int -> Int -> Maybe HVPluginHandle -> IO (Maybe HVPluginHandle)
initialiseMaybePlugin desc inputChannels stepSize blockSize hndl =
initialiseMaybePluginP desc hndl inputChannels stepSize blockSize >>= withP
where
withP True = return hndl
withP False = return Nothing
maybeM :: (Monad m) => b -> (a -> m b) -> Maybe a -> m b
maybeM def f = maybe (return def) f
maybeM_ :: (Monad m) => (a -> m ()) -> Maybe a -> m ()
maybeM_ f x = maybeM () f x
withMaybePluginHandle :: PluginID -> Float -> Int -> Int -> Int -> [(Int, Float)] -> (Maybe HVPluginDescriptor -> Maybe HVPluginHandle -> IO (Maybe a)) -> IO (Maybe a)
withMaybePluginHandle plgId sampleRate inputChannels stepSize blockSize params f =
loadMaybePluginDescPtr plgId >>= peekDescriptor
where
peekDescriptor (Just ptr) = do
desc <- peek ptr
bracket
(instantiateMaybePluginFromDesc ptr sampleRate
>>= setMaybePluginHandleParams desc params
>>= initialiseMaybePlugin desc inputChannels stepSize blockSize)
(maybeM_ (pluginCleanup desc))
(f $ Just desc)
peekDescriptor Nothing = return Nothing
withMaybePluginHandle_ :: PluginID -> Float -> Int -> Int -> Int -> [(Int, Float)] -> (Maybe HVPluginDescriptor -> Maybe HVPluginHandle -> IO ()) -> IO ()
withMaybePluginHandle_ plgId sampleRate inputChannels stepSize blockSize params f =
withMaybePluginHandle plgId sampleRate inputChannels stepSize blockSize params discardRes >> return ()
where
discardRes d h = do
res <- f d h
return $ Just undefined
| TransformingMusicology/HVamp | src/HVamp.hs | lgpl-3.0 | 6,315 | 0 | 17 | 1,244 | 1,773 | 891 | 882 | 113 | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.