code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- | A store for stroing and retreiving items
--
{-# LANGUAGE ExistentialQuantification, ScopedTypeVariables #-}
module Hakyll.Core.Store
( Store
, StoreGet (..)
, makeStore
, storeSet
, storeGet
) where
import Control.Applicative ((<$>))
import Control.Concurrent.MVar (MVar, newMVar, readMVar, modifyMVar_)
import System.FilePath ((</>))
import System.Directory (doesFileExist)
import Data.Maybe (fromMaybe)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Binary (Binary, encodeFile, decodeFile)
import Data.Typeable (Typeable, TypeRep, cast, typeOf)
import Hakyll.Core.Identifier
import Hakyll.Core.Util.File
-- | Items we can store
--
data Storable = forall a. (Binary a, Typeable a) => Storable a
-- | Result when an item from the store
--
data StoreGet a = Found a
| NotFound
| WrongType TypeRep TypeRep
deriving (Show, Eq)
-- | Data structure used for the store
--
data Store = Store
{ -- | All items are stored on the filesystem
storeDirectory :: FilePath
, -- | And some items are also kept in-memory
storeMap :: Maybe (MVar (Map FilePath Storable))
}
-- | Initialize the store
--
makeStore :: Bool -- ^ Use in-memory caching
-> FilePath -- ^ Directory to use for hard disk storage
-> IO Store -- ^ Store
makeStore inMemory directory = do
mvar <- if inMemory then Just <$> newMVar M.empty else return Nothing
return Store
{ storeDirectory = directory
, storeMap = mvar
}
-- | Auxiliary: add an item to the map
--
cacheInsert :: (Binary a, Typeable a) => Store -> FilePath -> a -> IO ()
cacheInsert (Store _ Nothing) _ _ = return ()
cacheInsert (Store _ (Just mv)) path value =
modifyMVar_ mv $ return . M.insert path (Storable value)
-- | Auxiliary: get an item from the cache
--
cacheLookup :: forall a. (Binary a, Typeable a)
=> Store -> FilePath -> IO (StoreGet a)
cacheLookup (Store _ Nothing) _ = return NotFound
cacheLookup (Store _ (Just mv)) path = do
map' <- readMVar mv
case M.lookup path map' of
Nothing -> return NotFound
Just (Storable s) -> return $ case cast s of
Nothing -> WrongType (typeOf s) $ typeOf (undefined :: a)
Just s' -> Found s'
-- | Create a path
--
makePath :: Store -> String -> Identifier a -> FilePath
makePath store name identifier = storeDirectory store </> name
</> group </> toFilePath identifier </> "hakyllstore"
where
group = fromMaybe "" $ identifierGroup identifier
-- | Store an item
--
storeSet :: (Binary a, Typeable a)
=> Store -> String -> Identifier a -> a -> IO ()
storeSet store name identifier value = do
makeDirectories path
encodeFile path value
cacheInsert store path value
where
path = makePath store name identifier
-- | Load an item
--
storeGet :: (Binary a, Typeable a)
=> Store -> String -> Identifier a -> IO (StoreGet a)
storeGet store name identifier = do
-- First check the in-memory map
mv <- cacheLookup store path
case mv of
-- Not found in the map, try the filesystem
NotFound -> do
exists <- doesFileExist path
if not exists
-- Not found in the filesystem either
then return NotFound
-- Found in the filesystem
else do v <- decodeFile path
cacheInsert store path v
return $ Found v
-- Found in the in-memory map, just return
s -> return s
where
path = makePath store name identifier
| sol/hakyll | src/Hakyll/Core/Store.hs | bsd-3-clause | 3,676 | 0 | 17 | 1,045 | 986 | 523 | 463 | 73 | 3 |
module ResultWorthy.Tests.Util where
import ResultWorthy.Util
import Test.HUnit
tests = TestLabel "The Util Tests" $ TestList [
TestLabel "withIndeces" $ TestCase
$ assertEqual "Arrays should be equal"
(withIndeces ["foo", "bar", "baz", "bong"])
[(0, "foo"), (1, "bar"), (2, "baz"), (3, "bong")]
,
TestLabel "trimLeadingWhitespace" $ TestCase
$ assertEqual "Output should make sense"
[" foo", "bar", " baz"]
(trimLeadingWhitespace [" foo", " bar", " baz"])
]
| lawrencelomax/ResultWorthy | ResultWorthy/Tests/Util.hs | bsd-3-clause | 596 | 0 | 12 | 200 | 152 | 88 | 64 | 12 | 1 |
{-|
Description: SDL audio support.
-}
module Graphics.UI.SDL.Audio
(
) where
| abbradar/MySDL | src/Graphics/UI/SDL/Audio.hs | bsd-3-clause | 94 | 0 | 3 | 27 | 12 | 9 | 3 | 2 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-|
Module : AERN2.Poly.Basics
Description : Basics of unary sparse polynomials
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Basics of unary sparse polynomials
-}
module AERN2.Poly.Basics
(
PolyCoeffRing, PolyCoeffField, PolyCoeffBall
, Poly(..), Degree, Terms
, terms_empty
, terms_size
, terms_insertWith
, terms_toList, terms_toDescList
, terms_fromList, terms_fromListAddCoeffs
, terms_unionWith
, terms_map
, terms_filterKeepConst
, terms_filterMayLoseConst
, terms_degree, terms_degrees
, terms_coeffs
, terms_updateConst, terms_updateReturnConst
, terms_lookupCoeff, terms_lookupCoeffDoubleConstTerm
, formatTerms
)
where
import MixedTypesNumPrelude
import qualified Prelude as P
-- import Text.Printf
import qualified Data.Map as Map
import qualified Data.List as List
-- import Test.Hspec
-- import Test.QuickCheck
import Control.CollectErrors
-- import AERN2.MP.ErrorBound
import AERN2.MP.Ball
import AERN2.MP.Dyadic
import AERN2.Real
-- import AERN2.Interval
-- import AERN2.RealFun.Operations
-- import AERN2.RealFun.UnaryBallFun
{- types -}
{-|
An aggregate sub-class for
types suitable as coefficients of our polynomials,
loose enough to permit Integer coefficients.
-}
class
(Ring c, HasIntegers c, HasAccuracy c, HasNorm c, Show c) =>
PolyCoeffRing c
instance PolyCoeffRing Integer
instance PolyCoeffRing Dyadic
instance PolyCoeffRing Rational
instance PolyCoeffRing MPBall
{-|
An aggregate sub-class for
types suitable as coefficients of our polynomials,
loose enough to permit Rational coefficients.
-}
class
(PolyCoeffRing c, Field c, HasDyadics c, CanAddSubMulDivCNBy c Dyadic) =>
PolyCoeffField c
instance PolyCoeffField Rational
instance PolyCoeffField MPBall
{-|
An aggregate sub-class for
types suitable as coefficients of our polynomials
-}
class
(PolyCoeffField c, CanAddSubMulDivCNBy c CauchyReal
, IsInterval c, CanMinMaxSameType (IntervalEndpoint c), IsBall c, CanSetPrecision c)
=>
PolyCoeffBall c
instance PolyCoeffBall MPBall
newtype Poly c = Poly { poly_terms :: Terms c }
instance (CanBeErrors es) => CanEnsureCE es (Poly c)
instance (CanBeErrors es) => CanExtractCE es Poly
where
extractCE sample_es (Poly terms) =
fmap Poly (extractCE sample_es terms)
type Terms c = Map.Map Degree c
type Degree = Integer
instance (CanBeErrors es) => CanExtractCE es (Map.Map Degree)
terms_empty :: Terms c
terms_empty = Map.empty
terms_size :: Terms c -> Integer
terms_size = integer . Map.size
terms_insertWith :: (c -> c -> c) -> Degree -> c -> Terms c -> Terms c
terms_insertWith = Map.insertWith
terms_toList :: Terms c -> [(Degree, c)]
terms_toList = Map.toList
terms_toDescList :: Terms c -> [(Degree, c)]
terms_toDescList = Map.toDescList
terms_fromList :: (HasIntegers c) => [(Degree, c)] -> Terms c
terms_fromList coeffs =
case Map.lookup 0 ts of
Nothing -> Map.insert 0 (convertExactly 0) ts
_ -> ts
where
ts = Map.fromList coeffs
terms_fromListAddCoeffs :: (CanAddSameType c, HasIntegers c) => [(Degree, c)] -> Terms c
terms_fromListAddCoeffs newTerms =
foldl addTerm terms_empty ((0, convertExactly 0) : newTerms)
where
addTerm prevTerms (i,a) =
terms_insertWith (+) i a prevTerms
terms_unionWith :: (c -> c -> c) -> Terms c -> Terms c -> Terms c
terms_unionWith = Map.unionWith
terms_filterMayLoseConst :: (Degree -> c -> Bool) -> Terms c -> Terms c
terms_filterMayLoseConst = Map.filterWithKey
terms_filterKeepConst :: (Degree -> c -> Bool) -> Terms c -> Terms c
terms_filterKeepConst cond = Map.filterWithKey cond_leaveConst
where
cond_leaveConst k a
| k == 0 = True
| otherwise = cond k a
terms_degree :: Terms c -> Degree
terms_degree ts
| null ts = error "terms_degree called with empty terms"
| otherwise = fst $ Map.findMax ts
terms_degrees :: Terms c -> [Degree]
terms_degrees = Map.keys
terms_coeffs :: Terms c -> [c]
terms_coeffs = Map.elems
terms_map :: (c1 -> c2) -> Terms c1 -> Terms c2
terms_map = Map.map
terms_updateConst :: (HasIntegers c) => (c -> c) -> Terms c -> Terms c
terms_updateConst updateFn ts =
case Map.lookup 0 ts of
Nothing -> Map.insert 0 (updateFn $ convertExactly 0) ts
Just _ -> Map.adjust updateFn 0 ts
terms_updateReturnConst :: (HasIntegers c) => (c -> c) -> Terms c -> (Terms c,c,c)
terms_updateReturnConst updateFn ts =
case Map.lookup 0 ts of
Nothing ->
let new = updateFn z in (Map.insert 0 new ts, z, new)
Just old ->
let new = updateFn old in (Map.insert 0 new ts, old, new)
where
z = convertExactly 0
terms_lookupCoeffDoubleConstTerm ::
(HasIntegers c, CanAddSameType c) =>
(Terms c) -> Degree -> c
terms_lookupCoeffDoubleConstTerm t i
| i == 0 = c+c
| otherwise = c
where
c = terms_lookupCoeff t i
terms_lookupCoeff ::
(HasIntegers c) =>
(Terms c) -> Degree -> c
terms_lookupCoeff t i =
case Map.lookup i t of
Just c -> c
_ -> convertExactly 0
{- precision -}
instance (HasPrecision c) => HasPrecision (Poly c) where
getPrecision (Poly ts) = foldl1 max $ map getPrecision $ terms_coeffs ts
instance (CanSetPrecision c) => CanSetPrecision (Poly c) where
setPrecision p (Poly ts) = Poly $ terms_map (setPrecision p) ts
{- accuracy -}
instance (HasAccuracy c) => HasAccuracy (Poly c) where
getAccuracy (Poly ts) = foldl1 min $ map getAccuracy $ terms_coeffs ts
getFiniteAccuracy (Poly ts) = foldl1 min $ map getFiniteAccuracy $ terms_coeffs ts
{- negation -}
instance (CanNegSameType c) => CanNeg (Poly c) where
type NegType (Poly c) = Poly c
negate (Poly t1) = Poly $ terms_map negate t1
{- addition -}
instance (CanAddSameType c) => CanAddAsymmetric (Poly c) (Poly c) where
type AddType (Poly c) (Poly c) = Poly c
add (Poly t1) (Poly t2) = Poly $ terms_unionWith (+) t1 t2
$(declForTypes
[[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |], [t| MPBall |], [t| CauchyReal |]]
(\ t -> [d|
instance (CanAddThis c $t, HasIntegers c) => CanAddAsymmetric $t (Poly c) where
type AddType $t (Poly c) = Poly c
add n (Poly t2) = Poly $ terms_updateConst (+ n) t2
instance (CanAddThis c $t, HasIntegers c) => CanAddAsymmetric (Poly c) $t where
type AddType (Poly c) $t = Poly c
add (Poly t1) n = Poly $ terms_updateConst (+ n) t1
|]))
{- subtraction -}
instance (CanNegSameType c, CanAddSameType c) => CanSub (Poly c) (Poly c)
$(declForTypes
[[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |], [t| MPBall |], [t| CauchyReal |]]
(\ t -> [d|
instance (CanNegSameType c, CanAddThis c $t, HasIntegers c) => CanSub $t (Poly c)
instance (CanAddThis c $t, HasIntegers c) => CanSub (Poly c) $t
|]))
{- scaling -}
$(declForTypes
[[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |], [t| MPBall |], [t| CauchyReal |]]
(\ t -> [d|
instance (CanMulBy c $t) => CanMulAsymmetric $t (Poly c) where
type MulType $t (Poly c) = Poly c
mul n (Poly t2) = Poly $ terms_map (* n) t2
instance (CanMulBy c $t) => CanMulAsymmetric (Poly c) $t where
type MulType (Poly c) $t = Poly c
mul (Poly t1) n = Poly $ terms_map (* n) t1
|]))
$(declForTypes
[[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |], [t| MPBall |], [t| CauchyReal |]]
(\ t -> [d|
instance (CanDivCNBy c $t, CanEnsureCN (DivType c $t), EnsureNoCN (DivType c $t) ~ c) => CanDiv (Poly c) $t where
type DivType (Poly c) $t = (Poly (EnsureCN c))
divide (Poly t1) n = Poly $ terms_map (/ n) t1
type DivTypeNoCN (Poly c) $t = Poly c
divideNoCN (Poly t1) n = Poly $ terms_map (/! n) t1
|]))
{- show -}
instance (Show c, HasIntegers c) => Show (Poly c) where
show (Poly terms) =
formatTerms showCf terms
where
showCf c =
--(show (c::MPBall), (c == (convertExactly 0)) == Just True, (c == (convertExactly 1)) == Just True)
(show c, False, False)
formatTerms ::
(HasIntegers c) =>
(c -> (String, Bool, Bool)) -> Terms c -> String
formatTerms showCf terms =
showTerms ("", "-") $
List.sortBy (\(a,_) (b,_) -> P.compare a b) $
termsToShow
where
showTerms (connectivePos, connectiveNeg) (term : rest) =
termS ++ (showTerms (" + ", " - ") rest)
where
termS =
case s of
'-':ss -> connectiveNeg ++ ss
_ -> connectivePos ++ s
s = showTerm term
showTerms _ [] = ""
termsToShow =
if null termsToShow_pre
then [(0, convertExactly 0)]
else termsToShow_pre
termsToShow_pre =
filter coeffNotExactZero $
terms_toList terms
coeffNotExactZero (_, cf) =
not isZero
where
(_, isZero, _) = showCf cf
showTerm (deg, coeff)
| deg == 0 = coeffS
| isOne = showPower
| otherwise = coeffS ++ "*" ++ showPower
where
(coeffS, _, isOne) = showCf coeff
showPower
| deg == 1 = "x"
| otherwise = "x^" ++ show deg
| michalkonecny/aern2 | aern2-fun-univariate/src/AERN2/Poly/Basics.hs | bsd-3-clause | 9,205 | 0 | 12 | 2,135 | 2,579 | 1,408 | 1,171 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module React.Flux.Mui.Badge where
import Protolude
import Data.Aeson
import Data.Aeson.Casing
import Data.String (String)
import React.Flux
import React.Flux.Mui.Util
data Badge = Badge
{ badgeClassName :: !(Maybe Text)
, badgePrimary :: !(Maybe Bool)
, badgeSecondary :: !(Maybe Bool)
} deriving (Generic, Show)
instance ToJSON Badge where
toJSON = genericToJSON $ aesonDrop (length ("Badge" :: String)) camelCase
defBadge :: Badge
defBadge =
Badge
{ badgeClassName = Nothing
, badgePrimary = Just False
, badgeSecondary = Just False
}
badge_ ::
Badge
-> [PropertyOrHandler handler]
-> ReactElementM handler ()
-> ReactElementM handler ()
badge_ args props = foreign_ "Badge" (fromMaybe [] (toProps args) ++ props)
| pbogdan/react-flux-mui | react-flux-mui/src/React/Flux/Mui/Badge.hs | bsd-3-clause | 820 | 0 | 11 | 146 | 247 | 137 | 110 | 34 | 1 |
{-# LANGUAGE DeriveGeneric, OverloadedStrings #-}
module Pygmalion.Core
( CommandInfo (..)
, SourceFile
, SourceFileHash
, SourceFileWrapper
, unwrapSourceFile
, SourceFileHashWrapper
, unwrapSourceFileHash
, WorkingPath
, Time
, TimeHash
, Language (..)
, Inclusion (..)
, DefInfo (..)
, DefUpdate (..)
, SourceLocation (..)
, SourceRange (..)
, Identifier
, USRHash
, SourceLine
, SourceCol
, SourceKind (..)
, Override (..)
, Invocation (..)
, Reference (..)
, ReferenceUpdate (..)
, SourceReferenced (..)
, SourceReference (..)
, SourceContext
, mkSourceFile
, unSourceFile
--, unSourceFileText
, queryExecutable
, daemonExecutable
, indexExecutable
, pygmalionDir
, dbFile
, configFile
, socketFile
, compileCommandsFile
, tagsFile
, stableHash
, stableHashWithSalt
) where
import Control.Applicative
import qualified Data.ByteString.UTF8 as B
import Data.Int
import Data.Hashable
import Data.Serialize
import Database.SQLite.Simple (FromRow(..), field)
import GHC.Generics
import System.FilePath.Posix ((</>))
import Pygmalion.SourceKind
-- The information we collect about a compilation command.
data CommandInfo = CommandInfo
{ ciSourceFile :: !SourceFile
, ciWorkingPath :: !WorkingPath
, ciCommand :: !B.ByteString
, ciArgs :: ![B.ByteString]
, ciLanguage :: !Language
} deriving (Eq, Read, Show, Generic)
{-
instance Serialize T.Text where
put = put . TE.encodeUtf16BE
get = liftM (TE.decodeUtf16BEWith onError) get
where onError _ _ = Nothing
-}
instance Serialize CommandInfo
instance FromRow CommandInfo where
fromRow = CommandInfo <$> field -- ciSourceFile
<*> field -- ciWorkingPath
<*> field -- ciCommand
<*> (B.lines <$> field) -- ciArgs
<*> fromRow -- ciLanguage
type SourceFile = B.ByteString
type SourceFileHash = Int
mkSourceFile :: FilePath -> SourceFile
mkSourceFile = B.fromString
unSourceFile :: SourceFile -> FilePath
unSourceFile = B.toString
newtype SourceFileWrapper = SourceFileWrapper SourceFile
unwrapSourceFile :: SourceFileWrapper -> SourceFile
unwrapSourceFile (SourceFileWrapper sf) = sf
instance FromRow SourceFileWrapper where
fromRow = SourceFileWrapper <$> field
newtype SourceFileHashWrapper = SourceFileHashWrapper SourceFileHash
unwrapSourceFileHash :: SourceFileHashWrapper -> SourceFileHash
unwrapSourceFileHash (SourceFileHashWrapper sf) = sf
instance FromRow SourceFileHashWrapper where
fromRow = SourceFileHashWrapper <$> field
type WorkingPath = B.ByteString
type Time = Int64
type TimeHash = Int
data Language = CLanguage
| CPPLanguage
| UnknownLanguage
deriving (Eq, Enum, Generic, Ord, Read, Show)
instance Serialize Language
instance FromRow Language where
fromRow = toEnum <$> field
-- | Inclusion metadata.
data Inclusion = Inclusion
{ icInclusion :: !SourceFile -- ^ The included file.
, icIncluder :: !SourceFileHash -- ^ The file which does the including.
} deriving (Eq, Show, Generic)
instance Serialize Inclusion
-- The information we collect about definitions in source code.
data DefInfo = DefInfo
{ diIdentifier :: !Identifier
, diUSR :: !USRHash
, diSourceLocation :: !SourceLocation
, diDefKind :: !SourceKind
, diContext :: !USRHash
} deriving (Eq, Show, Generic)
instance Serialize DefInfo
instance FromRow DefInfo where
fromRow = DefInfo <$> field <*> field <*> fromRow <*> fromRow <*> field
-- Cheaper variant of DefInfo used for database updates.
data DefUpdate = DefUpdate
{ diuIdentifier :: !Identifier
, diuUSR :: !USRHash
, diuFileHash :: !SourceFileHash
, diuLine :: !SourceLine
, diuCol :: !SourceCol
, diuDefKind :: !SourceKind
, diuContext :: !USRHash
} deriving (Eq, Show, Generic)
instance Serialize DefUpdate
data SourceLocation = SourceLocation
{ slFile :: !SourceFile
, slLine :: !SourceLine
, slCol :: !SourceCol
} deriving (Eq, Show, Generic)
instance Serialize SourceLocation
instance FromRow SourceLocation where
fromRow = SourceLocation <$> field <*> field <*> field
data SourceRange = SourceRange
{ srFile :: !SourceFile
, srLine :: !SourceLine
, srCol :: !SourceCol
, srEndLine :: !SourceLine
, srEndCol :: !SourceCol
} deriving (Eq, Show, Generic)
instance Serialize SourceRange
instance FromRow SourceRange where
fromRow = SourceRange <$> field <*> field <*> field <*> field <*> field
type Identifier = B.ByteString
type USRHash = Int
type RefHash = Int
type SourceLine = Int
type SourceCol = Int
-- This would be the cheaper variant of Override, but we never return these
-- directly from queries (we always return DefInfos) so we don't need the full
-- version at all.
data Override = Override
{ orDef :: !USRHash
, orOverrided :: !USRHash
} deriving (Eq, Show, Generic)
instance Serialize Override
data Invocation = Invocation
{ ivDefInfo :: !DefInfo
, ivSourceLocation :: !SourceLocation
} deriving (Eq, Show, Generic)
instance Serialize Invocation
instance FromRow Invocation where
fromRow = Invocation <$> fromRow <*> fromRow
data Reference = Reference
{ rfRange :: !SourceRange
, rfKind :: !SourceKind
, rfContext :: !USRHash
, rfUSR :: !USRHash
} deriving (Eq, Show, Generic)
instance Serialize Reference
-- Cheaper variant of Reference used for database updates.
data ReferenceUpdate = ReferenceUpdate
{ rfuId :: !RefHash
, rfuFileHash :: !SourceFileHash
, rfuLine :: !SourceLine
, rfuCol :: !SourceCol
, rfuEndLine :: !SourceLine
, rfuEndCol :: !SourceCol
, rfuKind :: !SourceKind
, rfuViaHash :: !USRHash
, rfuDeclHash :: !RefHash
, rfuContextHash :: !USRHash
, rfuUSRHash :: !USRHash
} deriving (Eq, Show, Generic)
instance Serialize ReferenceUpdate
data SourceReferenced = SourceReferenced
{ sdDef :: !DefInfo
, sdRange :: !SourceRange
, sdKind :: !SourceKind
, sdViaHash :: !USRHash
, sdDeclHash :: !RefHash
} deriving (Eq, Show, Generic)
instance Serialize SourceReferenced
instance FromRow SourceReferenced where
fromRow = SourceReferenced <$> fromRow <*> fromRow <*> fromRow <*> field <*> field
data SourceReference = SourceReference
{ srLocation :: !SourceLocation
, srKind :: !SourceKind
, srContext :: !SourceContext
} deriving (Eq, Show, Generic)
instance Serialize SourceReference
instance FromRow SourceReference where
fromRow = SourceReference <$> fromRow <*> fromRow <*> field
type SourceContext = B.ByteString
-- Tool names.
queryExecutable, daemonExecutable, indexExecutable :: String
queryExecutable = "pyg"
daemonExecutable = "pygd"
indexExecutable = "pygindex-clang"
-- Data files.
pygmalionDir, dbFile, configFile, socketFile, compileCommandsFile, tagsFile :: FilePath
pygmalionDir = ".pygmalion"
dbFile = pygmalionDir </> "index.sqlite"
configFile = pygmalionDir </> "pygmalion.yaml"
socketFile = pygmalionDir </> "socket"
compileCommandsFile = "compile_commands.json"
tagsFile = "TAGS"
-- | The value returned by hashable's 'hash' is different for every process
-- because it uses a number derived from the process's start time as a salt.
-- We need a stable hash, so we use 0 as a salt no matter what.
stableHash :: Hashable a => a -> Int
stableHash = hashWithSalt 0
-- | Like 'stableHash', but for 'hashWithSalt'. This is identical to
-- 'hashWithSalt', but having this means that we can avoid importing
-- "Data.Hashable" at all and be sure that we don't accidentally use
-- 'hash' without realizing it.
stableHashWithSalt :: Hashable a => Int -> a -> Int
stableHashWithSalt = hashWithSalt
| sethfowler/pygmalion | src/Pygmalion/Core.hs | bsd-3-clause | 7,991 | 0 | 11 | 1,784 | 1,653 | 956 | 697 | 315 | 1 |
-- |NullPoint:
-- Pointed types (usually containers) that can be empty.
-- Corresponds to Data.Monoid.mempty
module Data.NullPoint (
-- * Classes
NullPoint (..)
)
where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
-- ----------------------------------------------
-- |NullPoint class. Containers that have a null representation, corresponding
-- to Data.Monoid.mempty.
class NullPoint c where
empty :: c
instance NullPoint [a] where
empty = []
instance NullPoint B.ByteString where
empty = B.empty
instance NullPoint L.ByteString where
empty = L.empty
| iteloo/tsuru-sample | iteratee-0.8.9.6/src/Data/NullPoint.hs | bsd-3-clause | 609 | 0 | 6 | 99 | 105 | 65 | 40 | 12 | 0 |
-- * Implements set theoristic operators for the Ord type.
-- | Haskell Prelude "intersect", "(\\)" and "nub" implementations require an
-- Eq instance.
-- If we have an implementations for Ord type as well, the evaluation can be
-- sped up significantly.
module Data.Extension.Ord
( intersect
, nub
, subset
, (\\)
) where
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
intersect :: (Ord a) => [a] -> [a] -> [a]
intersect a b = filter (`Set.member` bset) a
where bset = Set.fromList b
nub :: (Ord a) => [a] -> [a]
nub = nub' Set.empty
where nub' _ [] = []
nub' acc (x:xs) = if x `Set.member` acc
then nub' acc xs
else x : nub' (Set.insert x acc) xs
subset :: Ord a => [a] -> [a] -> Bool
x `subset` y = null (x \\ y)
infix 5 \\
(\\) :: (Ord a) => [a] -> [a] -> [a]
a \\ b = diff' init a
where init = Map.fromListWith (+) [(x, 1 :: Int) | x <- b]
diff' _ [] = []
diff' hist (x:xs) = case Map.lookup x hist of
Just n | n > 0 -> diff' (Map.insert x (n - 1) hist) xs
_ -> x : diff' hist xs
| shingoOKAWA/hsarg-haskell | src/Data/Extension/Ord.hs | bsd-3-clause | 1,231 | 0 | 15 | 432 | 457 | 253 | 204 | 26 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -Wall -fenable-rewrite-rules -ddump-rule-firings -ddump-to-file #-}
import Control.Applicative
import Control.Arrow (first)
import qualified Control.Exception as Ex
import Control.Monad.Writer
import qualified Data.Foldable as Fold
import Data.IORef
import Data.Maybe
import qualified Data.Sequence as Q
import Data.Typeable
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck (Arbitrary, arbitrary)
import qualified Test.QuickCheck as QC
import qualified Test.QuickCheck.Property as QC
import Text.PrettyPrint
import Text.Show.Functions ()
import qualified NoRewrite as NR
import qualified Transducers.Fold as TrFold
import Transducers.FreeMonad
import Transducers.Par
import Transducers.Transducers as Tr
main :: IO ()
main = defaultMain
[ testProperty "yield" prop_yield
, testProperty "tryAwait" prop_tryAwait
, testProperty "panic" prop_panic
, testProperty "return" prop_return
, testProperty "bind" prop_bind
, testProperty "bind_assoc" prop_bind_assoc
, testProperty "comp" prop_comp
-- , testProperty "comp_assoc" _prop_comp_assoc
-- , testProperty "comp_left_id" _prop_comp_left_id
-- , testProperty "comp_right_id" _prop_comp_right_id
, testProperty "tmap" prop_tmap
, testProperty "tfilter" prop_tfilter
, testProperty "mapM" prop_mapM
, testProperty "mapM" prop_dropWhileM
, testProperty "tfold" prop_tfold
, testProperty "tscanl" prop_tscanl
, testProperty "feed" prop_feed
, testProperty "rewrite_tmap" prop_rewrite_tmap
, testProperty "rewrite_tfilter" prop_rewrite_tfilter
, testProperty "rewrite_flatten" prop_rewrite_flatten
, testProperty "rewrite_tscanl" prop_rewrite_tscanl
, testProperty "rewrite_tfold" prop_rewrite_tfold
, testProperty "rewrite_mapM" prop_rewrite_mapM
, testProperty "rewrite_dropWhileM" prop_rewrite_dropWhileM
, testProperty "flatten/tfilter" prop_flatten_tfilter
, testProperty "par" prop_par
]
-- | A monad where we can easily observe all side effects.
type TestMonad = Writer (Q.Seq SideEffect)
-- | A side effect in 'TestMonad'.
type SideEffect = Int
-- | Result of executing a transducer
data Trace i o a = Trace
[i] -- ^ remaining input
[Event o] -- ^ history of events
(Maybe a) -- ^ result, or Nothing if the input ends prematurely
deriving (Eq, Show)
-- | A thing a transducer can do.
data Event o
= TryE
| YieldE o
| PanicE TestException
| TLiftE (Q.Seq SideEffect)
deriving (Show, Eq)
-- | An exception type for testing. We only allow this type of exception,
-- because other exception types are not comparable in general.
newtype TestException = TestException Int
deriving (Typeable, Show, Eq)
instance Ex.Exception TestException
instance Arbitrary TestException where
arbitrary = TestException <$> arbitrary
-- | A type-restricted version of 'Transducer', together with a string
-- representation.
data TestTransducer = TestTransducer
{ testTransducerExpression :: String
, unTestTransducer :: Transducer Ex.SomeException Int Int TestMonad Int
}
instance Show TestTransducer where
show = testTransducerExpression
instance Arbitrary TestTransducer where
arbitrary = fmap (uncurry TestTransducer . first renderPDoc) $
arbitraryTransducerWith $ \gen -> do
(doc, val) <- gen
effects <- arbitrary
let doc' =
opl 1 ">>"
(app (lit "tell") $ app (lit "Q.fromList") (lit $ show effects))
(app (lit "return") doc)
return (doc', val <$ tell (Q.fromList effects))
-- | Pretty-printed expression with operator precedence.
type PDoc = Int -> Doc
-- | Turn a PDoc into String.
renderPDoc :: PDoc -> String
renderPDoc = render . ($0)
-- | Left-associative operator
opl :: Int -> String -> PDoc -> PDoc -> PDoc
opl prec opr x y p = parensIf (prec < p) $ fsep
[ x prec
, text opr
, y (prec + 1)
]
-- | Literal
lit :: String -> PDoc
lit str _ = text str
-- | Function application
app :: PDoc -> PDoc -> PDoc
app x y p = parensIf (10 < p) $ fsep
[ x 10
, y 11
]
parensIf :: Bool -> Doc -> Doc
parensIf True = parens
parensIf False = id
arbitraryTransducerWith
:: forall o a i m
. (Arbitrary o, Arbitrary a, Show a, Show o)
=> (forall r. QC.Gen (PDoc, r) -> QC.Gen (PDoc, m r))
-> QC.Gen (PDoc, Transducer Ex.SomeException i o m a)
arbitraryTransducerWith genM = fun1 "Trs" Trs <$> go
where
go :: QC.Gen (PDoc, FreeMonad (TransducerF Ex.SomeException i o m) a)
go = fmap (fun1 "fromView" fromView) $ QC.sized $ \size ->
if size == 0
then fun1 "Pure" Pure . addDoc <$> arbitrary
else fmap (fun1 "Impure" Impure) $ QC.resize (size-1) $ QC.oneof
[ fun1 "Try" Try . mkConst <$> go
, fun2 "Yield" Yield . addDoc <$> arbitrary <*> go
, fun2 "Panic" Panic
<$> (fun1 "Ex.toException" Ex.toException
. fun1 "TestException" TestException . addDoc <$> arbitrary)
<*> go
, fun1 "TLift" TLift <$> genM go
]
mkConst = fun1 "const" const
addDoc x = (\_ -> parens $ text (show x), x)
fun1 str f (doc, val) = (app (lit str) doc, f val)
fun2 str f (doc0, val0) (doc1, val1)
= (app (app (lit str) doc0) doc1, f val0 val1)
--arbitraryFunction :: CoArbitrary a => QC.Gen b -> QC.Gen (a -> b)
--arbitraryFunction gen = QC.promote (`QC.coarbitrary` gen)
-- | Run a transducer using the given input.
exec :: Transducer Ex.SomeException i o TestMonad a -> [i] -> Trace i o a
exec = execWith True
-- | Run a transducer using the given input. Does not assume the EOF
-- at the end of the input.
_execPartial :: Transducer Ex.SomeException i o TestMonad a -> [i] -> Trace i o a
_execPartial = execWith False
execWith :: Bool -> Transducer Ex.SomeException i o TestMonad a -> [i] -> Trace i o a
execWith terminate trans0 is0 = Trace remaining (Fold.toList events) out
where
((out, remaining), events) = runWriter $ go (unTRS trans0) is0
go trans is = case toView trans of
Pure x -> return (Just x, is)
Impure act -> case act of
Try cont -> do
emit TryE
case is of
[]
| terminate -> go (cont Nothing) []
| otherwise -> return (Nothing, is)
i:rest -> go (cont (Just i)) rest
Yield o cont -> do
emit $ YieldE o
go cont is
Panic e cont -> case Ex.fromException e of
Just myEx -> do
emit $ PanicE myEx
go cont is
Nothing -> error $ "exec: unknown exception " ++ show e
TLift action -> do
let !(cont, sideEffects) = runWriter action
emit $ TLiftE sideEffects
go cont is
emit :: MonadWriter (Q.Seq a) m => a -> m ()
emit = tell . Q.singleton
-- | Run a transducer, and returns a summary of execution
summary
:: Transducer Ex.SomeException i o TestMonad a
-> (Bool, [i])
-> ([i], [o], [SideEffect], Maybe a)
summary trans (terminate, inp) = (remaining, out, effects, end)
where
!(Trace remaining evts end) = execWith terminate trans inp
out = [o | YieldE o <- evts ]
effects = Fold.toList $ mconcat [e | TLiftE e <- evts ]
output' :: Transducer Ex.SomeException i o TestMonad a -> (Bool, [i]) -> (Bool, [o])
output' trans inp = (isJust end, out)
where
!(_, out, _, end) = summary trans inp
output :: Transducer Ex.SomeException i o TestMonad a -> (Bool, [i]) -> [o]
output trans inp = snd $ output' trans inp
-- Primitives
prop_yield :: Int -> [Int] -> Bool
prop_yield x is = exec (yield x) is == Trace is [YieldE x] (Just ())
prop_tryAwait :: [Int] -> Bool
prop_tryAwait is = exec tryAwait is == case is of
i:rest -> Trace rest [TryE::Event ()] (Just (Just i))
[] -> Trace is [TryE] (Just Nothing)
prop_panic :: TestException -> [Int] -> Bool
prop_panic ex is = exec (panic (Ex.toException ex)) is
== Trace is [PanicE ex::Event()] (Just ())
-- Transducers as a monad
prop_return :: Int -> [Int] -> Bool
prop_return x is = exec (return x) is == Trace is ([]::[Event()]) (Just x)
prop_bind :: TestTransducer -> (Int -> TestTransducer) -> [Int] -> Bool
prop_bind (TestTransducer _ x) f is
= exec (x >>= unTestTransducer . f) is == expected
where
expected = let
r0@(Trace is1 evts0 out0) = exec x is
in case out0 of
Nothing -> r0
Just val -> let
!(Trace is2 evts1 out1) = exec (unTestTransducer $ f val) is1
in Trace is2 (evts0 ++ evts1) out1
prop_bind_assoc
:: TestTransducer
-> (Int -> TestTransducer)
-> (Int -> TestTransducer)
-> [Int]
-> Bool
prop_bind_assoc (TestTransducer _ x) f g is =
exec (x >>= (unTestTransducer . f) >>= (unTestTransducer . g)) is ==
exec (x >>= (\v -> unTestTransducer (f v) >>= (unTestTransducer . g))) is
-- Composition
prop_comp
:: TestTransducer -> TestTransducer -> [Int] -> Bool
prop_comp (TestTransducer _ x) (TestTransducer _ y) is =
output (x ><> y) (True, is) == output y (output' x (True, is))
-- Note: this is not a complete specification of (><>), because it
-- doesn't care how other events are ordered except for YieldEs.
-- does not hold, see counterexamples below.
_prop_comp_assoc
:: TestTransducer -> TestTransducer -> TestTransducer -> [Int] -> Bool
_prop_comp_assoc (TestTransducer _ x) (TestTransducer _ y) (TestTransducer _ z) is =
exec (x ><> (y ><> z)) is == exec ((x ><> y) ><> z) is
-- | The identity transducer
identity :: (Monad m) => Transducer Ex.SomeException i i m ()
identity = tryAwait >>= Fold.mapM_ (\item -> yield item >> identity)
-- does not hold, because (identity ><> x) asks for input even if x does not.
_prop_comp_left_id
:: TestTransducer -> [Int] -> Bool
_prop_comp_left_id (TestTransducer _ x) is = exec (identity ><> x) is == exec x is
-- does not hold
_prop_comp_right_id
:: TestTransducer -> [Int] -> Bool
_prop_comp_right_id (TestTransducer _ x) is
= exec (x ><> identity) is == exec (void x) is
-- Higher-level API
prop_tmap :: [Int] -> Bool
prop_tmap is =
summary (tmap (+1)) (True, is) == ([], map (+1) is, [], Just ())
prop_tfilter :: [Int] -> Bool
prop_tfilter is =
summary (tfilter even) (True, is) == ([], filter even is, [], Just ())
prop_mapM :: [Int] -> Bool
prop_mapM is =
summary (Tr.mapM f) (True, is) == ([], map (+1) is, is, Just ())
where
f x = do
emit x
return $ x + 1
prop_dropWhileM :: [Int] -> Bool
prop_dropWhileM is =
summary (Tr.dropWhileM f) (True, is) == ([], rest, dropped, Just ())
where
(dropped,rest) = span even is
f x = do
-- dropWhileM has to run the predicate until it fails, so
-- here we check the predicate before emitting the tested value
when (even x) $ emit x
return $ even x
prop_tfold :: [Int] -> Bool
prop_tfold is = summary (tfold (TrFold.foldM f 0)) (True, is)
== ([], []::[()], is, Just (sum is))
where
f x y = do
emit y
return $! x + y
prop_tscanl :: [Int] -> Bool
prop_tscanl is = summary (tscanl (TrFold.foldM f 0)) (True, is)
== ([], scanl1 (+) is, is, Just ())
where
f x y = do
emit y
return $! x + y
prop_feed :: TestTransducer -> Int -> [Int] -> Bool
prop_feed (TestTransducer _ x) i is =
output (feed i x) (True,is) == output x (True, i:is)
-- Fusion
prop_rewrite_tmap :: [Int] -> Bool
prop_rewrite_tmap iss
= exec (tmap (+1)) iss == exec (NR.tmap (+1)) iss
prop_rewrite_tfilter :: [Int] -> Bool
prop_rewrite_tfilter iss
= exec (tfilter even) iss == exec (NR.tfilter even) iss
prop_rewrite_flatten :: [[Int]] -> Bool
prop_rewrite_flatten iss
= exec flatten iss == exec NR.flatten iss
prop_rewrite_tscanl :: [Int] -> Bool
prop_rewrite_tscanl iss
= exec (tscanl (TrFold.foldM f 0)) iss
== exec (NR.tscanl (TrFold.foldM f 0)) iss
where
f x y = do
emit y
return $! x + y
-- does not hold, bug?
prop_rewrite_tfold :: [Int] -> Bool
prop_rewrite_tfold iss
= exec (tfold (TrFold.foldM f 0)) iss
== (exec (NR.tfold (TrFold.foldM f 0)) iss :: Trace Int () Int)
where
f x y = do
emit y
return $! x + y
prop_rewrite_mapM :: [Int] -> Bool
prop_rewrite_mapM iss
= exec (Tr.mapM f) iss == exec (NR.mapM f) iss
where
f x = do
emit x
return $! x + 1
prop_rewrite_dropWhileM :: [Int] -> Bool
prop_rewrite_dropWhileM iss
= exec (Tr.dropWhileM f) iss == exec (NR.dropWhileM f) iss
where
f x = do
emit x
return $ even x
prop_flatten_tfilter :: [[Int]] -> Bool
prop_flatten_tfilter iss
= exec (flatten ><> tfilter even) iss
== exec (flatten ><> noFusion (tfilter even)) iss
noFusion :: a -> a
noFusion = id
{-# NOINLINE noFusion #-}
-- Parallel
prop_par :: [Int] -> QC.Property
prop_par is =
QC.morallyDubiousIOProperty $ do
historyRef <- newIORef Q.empty
return $ QC.forAll (arbitraryIOTrans historyRef) $ \(IOTrans _ iot) ->
QC.morallyDubiousIOProperty $
(==)
<$> evalIOTrans historyRef iot is
<*> evalIOTrans historyRef (parT iot) is
evalIOTrans
:: IORef (Q.Seq Int)
-> Transducer Ex.SomeException i o IO a
-> [i]
-> IO ([o], [Int], Maybe a)
evalIOTrans historyRef trans input = do
writeIORef historyRef Q.empty
outRef <- newIORef Q.empty
resultRef <- newIORef Nothing
runIOTrans $
yieldList input
><> (trans >>= lift . writeIORef resultRef . Just)
><> Tr.tfold (TrFold.mapM_ $ \a -> modifyIORef outRef (Q.|>a))
out <- readIORef outRef
effects <- readIORef historyRef
result <- readIORef resultRef
return (Fold.toList out, Fold.toList effects, result)
runIOTrans :: Transducer Ex.SomeException i o IO a -> IO ()
runIOTrans (Trs j) = loop j
where
loop x = case toView x of
Pure _ -> return ()
Impure (Yield _ cont) -> loop cont
Impure (Try cont) -> loop (cont Nothing)
Impure (Panic _ cont) -> loop cont
Impure (TLift a) -> a >>= loop
arbitraryIOTrans :: IORef (Q.Seq Int) -> QC.Gen IOTrans
arbitraryIOTrans historyRef = fmap make $ arbitraryTransducerWith $ \gen -> do
(doc, val) <- gen
effect <- arbitrary
let doc' =
opl 1 ">>"
(app (lit "write") $ lit $ show effect)
(app (lit "return") doc)
return (doc', val <$ modifyIORef historyRef (Q.|>effect))
where
make (doc, trans) = IOTrans (renderPDoc doc) trans
data IOTrans = IOTrans String (Transducer Ex.SomeException Int Int IO Int)
instance Show IOTrans where
show (IOTrans s _) = s
-- Counterexamples
-- prop_comp_assoc
_ce_trans30, _ce_trans31, _ce_trans32 :: Transducer Ex.SomeException Int Int TestMonad Int
_ce_trans30 = Trs (fromView (Impure (Try (const (fromView (Pure (1)))))))
_ce_trans31 = Trs (fromView (Pure 1))
_ce_trans32 = Trs
(fromView
(Impure
(Try
(const
(fromView
(Impure
(TLift
(tell (Q.fromList [1]) >> return (fromView (Pure (0)))))))))))
| JohnLato/transducers | tests/transducers.hs | bsd-3-clause | 14,965 | 166 | 26 | 3,416 | 5,383 | 2,801 | 2,582 | 351 | 7 |
{-# LINE 1 "GHC.Stats.hsc" #-}
{-# LANGUAGE Trustworthy #-}
{-# LINE 2 "GHC.Stats.hsc" #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-----------------------------------------------------------------------------
-- | This module provides access to internal garbage collection and
-- memory usage statistics. These statistics are not available unless
-- a program is run with the @-T@ RTS flag.
--
-- This module is GHC-only and should not be considered portable.
--
-- @since 4.5.0.0
-----------------------------------------------------------------------------
module GHC.Stats
( GCStats(..)
, getGCStats
, getGCStatsEnabled
) where
import Control.Monad
import Data.Int
import GHC.Base
import GHC.Read ( Read )
import GHC.Show ( Show )
import GHC.IO.Exception
import Foreign.Marshal.Alloc
import Foreign.Storable
import Foreign.Ptr
{-# LINE 32 "GHC.Stats.hsc" #-}
foreign import ccall "getGCStats" getGCStats_ :: Ptr () -> IO ()
-- | Returns whether GC stats have been enabled (with @+RTS -T@, for example).
--
-- @since 4.6.0.0
foreign import ccall "getGCStatsEnabled" getGCStatsEnabled :: IO Bool
-- I'm probably violating a bucket of constraints here... oops.
-- | Statistics about memory usage and the garbage collector. Apart from
-- 'currentBytesUsed' and 'currentBytesSlop' all are cumulative values since
-- the program started.
--
-- @since 4.5.0.0
data GCStats = GCStats
{ -- | Total number of bytes allocated
bytesAllocated :: !Int64
-- | Number of garbage collections performed (any generation, major and
-- minor)
, numGcs :: !Int64
-- | Maximum number of live bytes seen so far
, maxBytesUsed :: !Int64
-- | Number of byte usage samples taken, or equivalently
-- the number of major GCs performed.
, numByteUsageSamples :: !Int64
-- | Sum of all byte usage samples, can be used with
-- 'numByteUsageSamples' to calculate averages with
-- arbitrary weighting (if you are sampling this record multiple
-- times).
, cumulativeBytesUsed :: !Int64
-- | Number of bytes copied during GC
, bytesCopied :: !Int64
-- | Number of live bytes at the end of the last major GC
, currentBytesUsed :: !Int64
-- | Current number of bytes lost to slop
, currentBytesSlop :: !Int64
-- | Maximum number of bytes lost to slop at any one time so far
, maxBytesSlop :: !Int64
-- | Maximum number of megabytes allocated
, peakMegabytesAllocated :: !Int64
-- | CPU time spent running mutator threads. This does not include
-- any profiling overhead or initialization.
, mutatorCpuSeconds :: !Double
-- | Wall clock time spent running mutator threads. This does not
-- include initialization.
, mutatorWallSeconds :: !Double
-- | CPU time spent running GC
, gcCpuSeconds :: !Double
-- | Wall clock time spent running GC
, gcWallSeconds :: !Double
-- | Total CPU time elapsed since program start
, cpuSeconds :: !Double
-- | Total wall clock time elapsed since start
, wallSeconds :: !Double
-- | Number of bytes copied during GC, minus space held by mutable
-- lists held by the capabilities. Can be used with
-- 'parMaxBytesCopied' to determine how well parallel GC utilized
-- all cores.
, parTotBytesCopied :: !Int64
-- | Sum of number of bytes copied each GC by the most active GC
-- thread each GC. The ratio of 'parTotBytesCopied' divided by
-- 'parMaxBytesCopied' approaches 1 for a maximally sequential
-- run and approaches the number of threads (set by the RTS flag
-- @-N@) for a maximally parallel run.
, parMaxBytesCopied :: !Int64
} deriving (Show, Read)
{-
, initCpuSeconds :: !Double
, initWallSeconds :: !Double
-}
-- | Retrieves garbage collection and memory statistics as of the last
-- garbage collection. If you would like your statistics as recent as
-- possible, first run a 'System.Mem.performGC'.
--
-- @since 4.5.0.0
getGCStats :: IO GCStats
getGCStats = do
statsEnabled <- getGCStatsEnabled
unless statsEnabled . ioError $ IOError
Nothing
UnsupportedOperation
""
"getGCStats: GC stats not enabled. Use `+RTS -T -RTS' to enable them."
Nothing
Nothing
allocaBytes ((144)) $ \p -> do
{-# LINE 123 "GHC.Stats.hsc" #-}
getGCStats_ p
bytesAllocated <- ((\hsc_ptr -> peekByteOff hsc_ptr 0)) p
{-# LINE 125 "GHC.Stats.hsc" #-}
numGcs <- ((\hsc_ptr -> peekByteOff hsc_ptr 8)) p
{-# LINE 126 "GHC.Stats.hsc" #-}
numByteUsageSamples <- ((\hsc_ptr -> peekByteOff hsc_ptr 16)) p
{-# LINE 127 "GHC.Stats.hsc" #-}
maxBytesUsed <- ((\hsc_ptr -> peekByteOff hsc_ptr 24)) p
{-# LINE 128 "GHC.Stats.hsc" #-}
cumulativeBytesUsed <- ((\hsc_ptr -> peekByteOff hsc_ptr 32)) p
{-# LINE 129 "GHC.Stats.hsc" #-}
bytesCopied <- ((\hsc_ptr -> peekByteOff hsc_ptr 40)) p
{-# LINE 130 "GHC.Stats.hsc" #-}
currentBytesUsed <- ((\hsc_ptr -> peekByteOff hsc_ptr 48)) p
{-# LINE 131 "GHC.Stats.hsc" #-}
currentBytesSlop <- ((\hsc_ptr -> peekByteOff hsc_ptr 56)) p
{-# LINE 132 "GHC.Stats.hsc" #-}
maxBytesSlop <- ((\hsc_ptr -> peekByteOff hsc_ptr 64)) p
{-# LINE 133 "GHC.Stats.hsc" #-}
peakMegabytesAllocated <- ((\hsc_ptr -> peekByteOff hsc_ptr 72)) p
{-# LINE 134 "GHC.Stats.hsc" #-}
{-
initCpuSeconds <- (# peek GCStats, init_cpu_seconds) p
initWallSeconds <- (# peek GCStats, init_wall_seconds) p
-}
mutatorCpuSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 96)) p
{-# LINE 139 "GHC.Stats.hsc" #-}
mutatorWallSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 104)) p
{-# LINE 140 "GHC.Stats.hsc" #-}
gcCpuSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 112)) p
{-# LINE 141 "GHC.Stats.hsc" #-}
gcWallSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 120)) p
{-# LINE 142 "GHC.Stats.hsc" #-}
cpuSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 128)) p
{-# LINE 143 "GHC.Stats.hsc" #-}
wallSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 136)) p
{-# LINE 144 "GHC.Stats.hsc" #-}
parTotBytesCopied <- ((\hsc_ptr -> peekByteOff hsc_ptr 80)) p
{-# LINE 145 "GHC.Stats.hsc" #-}
parMaxBytesCopied <- ((\hsc_ptr -> peekByteOff hsc_ptr 88)) p
{-# LINE 146 "GHC.Stats.hsc" #-}
return GCStats { .. }
{-
-- Nontrivial to implement: TaskStats needs arbitrarily large
-- amounts of memory, spark stats wants to use SparkCounters
-- but that needs a new rts/ header.
data TaskStats = TaskStats
{ taskMutCpuSeconds :: Int64
, taskMutWallSeconds :: Int64
, taskGcCpuSeconds :: Int64
, taskGcWallSeconds :: Int64
} deriving (Show, Read)
data SparkStats = SparkStats
{ sparksCreated :: Int64
, sparksDud :: Int64
, sparksOverflowed :: Int64
, sparksConverted :: Int64
, sparksGcd :: Int64
, sparksFizzled :: Int64
} deriving (Show, Read)
-- We also could get per-generation stats, which requires a
-- non-constant but at runtime known about of memory.
-}
| phischu/fragnix | builtins/base/GHC.Stats.hs | bsd-3-clause | 7,011 | 17 | 16 | 1,446 | 902 | 511 | 391 | 106 | 1 |
{-- snippet plus --}
a `plus` b = a + b
data a `Pair` b = a `Pair` b
deriving (Show)
-- we can use the constructor either prefix or infix
foo = Pair 1 2
bar = True `Pair` "quux"
{-- /snippet plus --}
| binesiyu/ifl | examples/ch04/Plus.hs | mit | 220 | 0 | 6 | 65 | 68 | 40 | 28 | 5 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="el-GR">
<title>Customizable HTML Report</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/customreport/src/main/javahelp/org/zaproxy/zap/extension/customreport/resources/help_el_GR/helpset_el_GR.hs | apache-2.0 | 970 | 79 | 66 | 158 | 411 | 208 | 203 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Cardano.Wallet.Kernel.CoinSelection.Generic.Random (
PrivacyMode(..)
, random
, findRandomOutput
) where
import Universum
import Cardano.Wallet.Kernel.CoinSelection.Generic
import qualified Cardano.Wallet.Kernel.CoinSelection.Generic.LargestFirst as LargestFirst
{-------------------------------------------------------------------------------
Random input selection algorithm
-------------------------------------------------------------------------------}
data PrivacyMode = PrivacyModeOn | PrivacyModeOff
-- | Random input selection
--
-- Random input selection has the advantage that is it self correcting, in the
-- following sense: suppose that 90% of our UTxO consists of small outputs;
-- then random selection has a 90% change of choosing those small outputs.
--
-- For each output we add a change output that is between 0.5 and 2 times the
-- size of the output, making it hard to identify. This has the additional
-- benefit of introducing another self-correction: if there are frequent
-- requests for payments around certain size, the UTxO will contain lots of
-- available change outputs of around that size.
random :: forall utxo m. (MonadRandom m, PickFromUtxo utxo)
=> PrivacyMode -- ^ Hide change addresses?
-> Word64 -- ^ Maximum number of inputs
-> [Output (Dom utxo)] -- ^ Outputs to include
-> CoinSelT utxo CoinSelHardErr m [CoinSelResult (Dom utxo)]
random privacyMode maxNumInputs outs = do
balance <- gets utxoBalance
mapCoinSelErr (withTotalBalance balance) $
coinSelPerGoal step maxNumInputs outs `catchError`
(\_ -> LargestFirst.largestFirst maxNumInputs outs)
where
-- | Perform a coin selection on the next output using the remaining
-- inputs. `coinSelPerGoal` reduces the UTxO (and the number of allowed)
-- inputs as it maps over the outputs. So, in the first iteration we have:
--
-- `remainingNumInputs == maxNumInputs`, and for the second one, we have
--
-- `remainingNumInputs == maxNumInputs - k`, where `k` is the number of
-- inputs selected during the first iteration.
step
:: Word64
-> Output (Dom utxo)
-> CoinSelT utxo CoinSelHardErr m (CoinSelResult (Dom utxo))
step remainingNumInputs out = defCoinSelResult out <$>
inRange remainingNumInputs (target privacyMode (outVal out))
-- | Because of the recursive and stateful nature of `coinSelPerGoal`,
-- errors are thrown within each step using values available at the moment
-- where the error gets thrown. As a result, errors reports non-sensical
-- balances and UTxO state.
-- As a work-around, we remap errors to what they ought to be...
withTotalBalance
:: Value (Dom utxo)
-> CoinSelHardErr
-> CoinSelHardErr
withTotalBalance balance = \case
e@CoinSelHardErrOutputCannotCoverFee{} -> e
e@CoinSelHardErrOutputIsRedeemAddress{} -> e
e@CoinSelHardErrCannotCoverFee{} -> e
CoinSelHardErrMaxInputsReached _ -> CoinSelHardErrMaxInputsReached
(show maxNumInputs)
CoinSelHardErrUtxoExhausted _ _ -> CoinSelHardErrUtxoExhausted
(pretty balance)
(pretty payment)
where
payment = unsafeValueSum $ outVal <$> outs
target :: PrivacyMode -> Value (Dom utxo) -> TargetRange (Dom utxo)
target PrivacyModeOn val = fromMaybe (target PrivacyModeOff val)
(idealRange val)
target PrivacyModeOff val = TargetRange {
targetMin = val
, targetAim = val
, targetMax = val
}
idealRange :: Value (Dom utxo) -> Maybe (TargetRange (Dom utxo))
idealRange val = do
-- Minimum value: no change at all
let targetMin = val
-- Ideal case: change equal to the value
targetAim <- valueAdjust RoundUp 2.0 val
-- Terminating condition: change twice the value
targetMax <- valueAdjust RoundUp 3.0 val
return TargetRange{..}
{-------------------------------------------------------------------------------
Lower level API
-------------------------------------------------------------------------------}
-- | Target range for picking inputs
data TargetRange dom = TargetRange {
targetMin :: Value dom
, targetAim :: Value dom
, targetMax :: Value dom
}
-- | Select random inputs in the specified range
--
-- If we exceed the maximum number of inputs whilst trying to reach the minimum
-- end of the range, fallback on largest first to cover the minimum, then
-- proceed as normal with random selection to try and improve the change amount.
inRange :: (PickFromUtxo utxo, MonadRandom m)
=> Word64
-> TargetRange (Dom utxo)
-> CoinSelT utxo CoinSelHardErr m (SelectedUtxo (Dom utxo))
inRange maxNumInputs TargetRange{..} = do
atLeastWithFallback maxNumInputs targetMin
>>= improve maxNumInputs targetAim targetMax
-- | Select random inputs to cover the required minimum value.
--
-- Falls back on 'LargestFirst.atLeast' if we exceed 'maxNumInputs'
atLeastWithFallback :: forall utxo m. (PickFromUtxo utxo, MonadRandom m)
=> Word64
-> Value (Dom utxo)
-> CoinSelT utxo CoinSelHardErr m (SelectedUtxo (Dom utxo))
atLeastWithFallback maxNumInputs targetMin =
atLeastNoFallback maxNumInputs targetMin `catchJustSoft` \_ ->
LargestFirst.atLeast maxNumInputs targetMin
-- | Select random inputs to cover the required minimum value.
--
-- Fails if we exceed 'maxNumInputs'
atLeastNoFallback :: forall utxo m. (PickFromUtxo utxo, MonadRandom m)
=> Word64
-> Value (Dom utxo)
-> CoinSelT utxo CoinSelErr m (SelectedUtxo (Dom utxo))
atLeastNoFallback maxNumInputs targetMin = do
balance <- gets utxoBalance
go emptySelection balance
where
go :: SelectedUtxo (Dom utxo)
-> Value (Dom utxo)
-> CoinSelT utxo CoinSelErr m (SelectedUtxo (Dom utxo))
go selected balance
| sizeToWord (selectedSize selected) > maxNumInputs =
throwError $ CoinSelErrSoft CoinSelSoftErr
| selectedBalance selected >= targetMin =
return selected
| otherwise = do
io <- findRandomOutput >>= maybe (throwError $ errUtxoExhausted balance) return
go (select io selected) balance
errUtxoExhausted :: Value (Dom utxo) -> CoinSelErr
errUtxoExhausted balance =
CoinSelErrHard $ CoinSelHardErrUtxoExhausted
(pretty balance)
(pretty targetMin)
-- | Select random additional inputs with the aim of improving the change amount
--
-- This never throws an error.
improve :: forall utxo e m. (PickFromUtxo utxo, MonadRandom m)
=> Word64 -- ^ Total maximum number of inputs
-> Value (Dom utxo) -- ^ Total UTxO balance to aim for
-> Value (Dom utxo) -- ^ Maximum total UTxO balance
-> SelectedUtxo (Dom utxo) -- ^ UTxO selected so far
-> CoinSelT utxo e m (SelectedUtxo (Dom utxo))
improve maxNumInputs targetAim targetMax = go
where
-- Preconditions
--
-- > 0 <= acc < targetAim
--
-- Invariant:
--
-- > acc == utxoBalance selected
--
-- Relies on the following self-correcting property: if the UTxO
-- has many small entries, then we should be able to reach close
-- to the aim value. BUT if this is the case, then the probability
-- that when we pick a random value from the UTxO that we overshoot
-- the upper end of the range is low. Here we terminate early if we
-- happen to pick a value from the UTxO that overshoots the upper
-- of the range; this is likely to happen precisely when we have
-- a low probability of finding a value close to the aim.
go :: SelectedUtxo (Dom utxo) -> CoinSelT utxo e m (SelectedUtxo (Dom utxo))
go selected = do
mIO <- tryFindRandomOutput isImprovement
case mIO of
Nothing -> return selected
Just selected' ->
if selectedBalance selected' >= targetAim
then return selected'
else go selected'
where
-- A new value is an improvement if
--
-- * We don't overshoot the upper end of the range
-- * We get closer to the aim
-- * We don't use more than the maximum number of inputs
--
-- Note that the second property is a bit subtle: it is trivially
-- true if both @acc@ and @acc + val@ are smaller than @targetAim@
--
-- > value | ------|------------|----------------|-------------
-- > acc (acc + val) targetAim
--
-- but if @acc + val@ exceeds the aim, we are comparing (absolute)
-- distance to the aim
--
-- > value | ------|-----------|---------------|--------
-- > acc targetAim (acc + val)
isImprovement :: UtxoEntry (Dom utxo) -> Maybe (SelectedUtxo (Dom utxo))
isImprovement io = do
guard $ and [
selectedBalance selected' <= targetMax
, valueDist targetAim (selectedBalance selected')
< valueDist targetAim (selectedBalance selected)
, sizeToWord (selectedSize selected') <= maxNumInputs
]
return selected'
where
selected' = select io selected
{-------------------------------------------------------------------------------
Auxiliary: selecting random outputs
-------------------------------------------------------------------------------}
-- | Select a random output
findRandomOutput :: (MonadRandom m, PickFromUtxo utxo)
=> CoinSelT utxo e m (Maybe (UtxoEntry (Dom utxo)))
findRandomOutput =
tryFindRandomOutput Just
-- | Find a random output, and return it if it satisfies the predicate
--
-- If the predicate is not satisfied, state is not changed.
tryFindRandomOutput :: forall utxo e m a. (MonadRandom m, PickFromUtxo utxo)
=> (UtxoEntry (Dom utxo) -> Maybe a)
-> CoinSelT utxo e m (Maybe a)
tryFindRandomOutput p = do
utxo <- get
mIO <- (>>= p') <$> pickRandom utxo
case mIO of
Nothing -> return Nothing
Just (a, utxo') -> do put utxo' ; return $ Just a
where
p' :: (UtxoEntry (Dom utxo), utxo) -> Maybe (a, utxo)
p' (io, utxo) = (, utxo) <$> p io
| input-output-hk/cardano-sl | wallet/src/Cardano/Wallet/Kernel/CoinSelection/Generic/Random.hs | apache-2.0 | 10,737 | 0 | 17 | 2,919 | 1,800 | 947 | 853 | -1 | -1 |
{-# LANGUAGE BangPatterns, FlexibleInstances, UndecidableInstances, CPP #-}
#include "fusion-phases.h"
-- | Irregular two dimensional arrays.
---
-- * TODO: The inner arrays should be unboxed so we don't get an unboxing overhead
-- for every call to unsafeIndex2. This might need an extension to the GHC
-- runtime if we alwo want to convert a U.Vector directly to this form.
--
-- * TODO: We currently only allow primitive types to be in a Vectors, but
-- in future we'll want `Vectors` of tuples etc.
--
module Data.Array.Parallel.Unlifted.Vectors
( Vectors(..)
, Unboxes
, empty
, singleton
, length
, index
, index2
, unsafeIndex
, unsafeIndex2
, unsafeIndexUnpack
, append
, fromVector
, toVector)
where
import qualified Data.Array.Parallel.Base as B
import qualified Data.Array.Parallel.Unlifted.ArrayArray as AA
import qualified Data.Primitive.ByteArray as P
import qualified Data.Primitive.Types as P
import qualified Data.Primitive as P
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Primitive as R
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector as V
import Data.Vector.Unboxed (Unbox)
import Prelude hiding (length)
import Data.Word
import Control.Monad.ST
-- | Class of element types that can be used in a `Vectors`
class R.Prim a => Unboxes a
instance Unboxes Int
instance Unboxes Word8
instance Unboxes Float
instance Unboxes Double
-- | A 2-dimensional array,
-- where the inner arrays can all have different lengths.
data Vectors a
= Vectors
{-# UNPACK #-} !Int -- number of inner vectors
{-# UNPACK #-} !P.ByteArray -- starting index of each vector in its chunk
{-# UNPACK #-} !P.ByteArray -- lengths of each inner vector
{-# UNPACK #-} !(AA.ArrayArray P.ByteArray) -- chunks
instance (Unboxes a, Unbox a, Show a) => Show (Vectors a) where
show = show . toVector
{-# NOINLINE show #-}
-- | Construct an empty `Vectors` with no arrays of no elements.
empty :: Vectors a
empty
= runST
$ do mba <- P.newByteArray 0
ba <- P.unsafeFreezeByteArray mba
maa <- AA.newArrayArray 0
AA.writeArrayArray maa 0 ba
aa <- AA.unsafeFreezeArrayArray maa
return $ Vectors 0 ba ba aa
{-# INLINE_U empty #-}
-- | Construct a `Vectors` containing data from a single unboxed array.
singleton :: (Unboxes a, Unbox a) => U.Vector a -> Vectors a
singleton vec
= runST
$ do R.MVector start len mbaData <- R.unsafeThaw $ G.convert vec
baData <- P.unsafeFreezeByteArray mbaData
mbaStarts <- P.newByteArray (P.sizeOf (undefined :: Int))
P.writeByteArray mbaStarts 0 start
baStarts <- P.unsafeFreezeByteArray mbaStarts
mbaLengths <- P.newByteArray (P.sizeOf (undefined :: Int))
P.writeByteArray mbaLengths 0 len
baLengths <- P.unsafeFreezeByteArray mbaLengths
maaChunks <- AA.newArrayArray 1
AA.writeArrayArray maaChunks 0 baData
aaChunks <- AA.unsafeFreezeArrayArray maaChunks
return $ Vectors 1 baStarts baLengths aaChunks
{-# INLINE_U singleton #-}
-- | Yield the number of vectors in a `Vectors`.
length :: Unboxes a => Vectors a -> Int
length (Vectors len _ _ _) = len
{-# INLINE_U length #-}
-- | Take one of the outer vectors from a `Vectors`.
unsafeIndex :: (Unboxes a, Unbox a) => Vectors a -> Int -> U.Vector a
unsafeIndex (Vectors _ starts lens arrs) ix
= G.convert
$ runST
$ do let start = P.indexByteArray starts ix
let len = P.indexByteArray lens ix
let arr = AA.indexArrayArray arrs ix
marr <- P.unsafeThawByteArray arr
let mvec = R.MVector start len marr
R.unsafeFreeze mvec
{-# INLINE_U unsafeIndex #-}
-- | Take one of the outer vectors from a `Vectors`, with bounds checking
index :: (Unboxes a, Unbox a)
=> String -- ^ source position
-> Vectors a -> Int -> U.Vector a
index here vec ix
= B.check here (length vec) ix
$ unsafeIndex vec ix
{-# INLINE_U index #-}
-- | Retrieve a single element from a `Vectors`,
-- given the outer and inner indices.
unsafeIndex2 :: Unboxes a => Vectors a -> Int -> Int -> a
unsafeIndex2 (Vectors _ starts _ arrs) ix1 ix2
= (arrs `AA.indexArrayArray` ix1) `P.indexByteArray` ((starts `P.indexByteArray` ix1) + ix2)
{-# INLINE_U unsafeIndex2 #-}
-- | Retrieve a single element from a `Vectors`,
-- given the outer and inner indices, with bounds checking.
index2 :: Unboxes a
=> String -- ^ source position
-> Vectors a -> Int -> Int -> a
index2 here vec@(Vectors _ _ lens _) ix1 ix2
= B.check (here++"(index2.ix1)") (length vec) ix1
$ B.check (here++"(index2.ix2)") (lens `P.indexByteArray` ix1) ix2
$ unsafeIndex2 vec ix1 ix2
{-# INLINE_U index2 #-}
-- | Retrieve an inner array from a `Vectors`, returning the array data,
-- starting index in the data, and vector length.
unsafeIndexUnpack :: Unboxes a => Vectors a -> Int -> (P.ByteArray, Int, Int)
unsafeIndexUnpack (Vectors _ starts lens arrs) ix
= ( arrs `AA.indexArrayArray` ix
, starts `P.indexByteArray` ix
, lens `P.indexByteArray` ix)
{-# INLINE_U unsafeIndexUnpack #-}
-- | Appending two `Vectors` uses work proportional to
-- the length of the outer arrays.
append :: (Unboxes a, Unbox a) => Vectors a -> Vectors a -> Vectors a
append (Vectors len1 starts1 lens1 chunks1)
(Vectors len2 starts2 lens2 chunks2)
= runST
$ do let len' = len1 + len2
-- append starts into result
let lenStarts1 = P.sizeofByteArray starts1
let lenStarts2 = P.sizeofByteArray starts2
maStarts <- P.newByteArray (lenStarts1 + lenStarts2)
P.copyByteArray maStarts 0 starts1 0 lenStarts1
P.copyByteArray maStarts lenStarts1 starts2 0 lenStarts2
starts' <- P.unsafeFreezeByteArray maStarts
-- append lens into result
let lenLens1 = P.sizeofByteArray lens1
let lenLens2 = P.sizeofByteArray lens2
maLens <- P.newByteArray (lenLens1 + lenLens2)
P.copyByteArray maLens 0 lens1 0 lenLens1
P.copyByteArray maLens lenStarts1 lens2 0 lenLens2
lens' <- P.unsafeFreezeByteArray maLens
-- append arrs into result
maChunks <- AA.newArrayArray len'
AA.copyArrayArray maChunks 0 chunks1 0 len1
AA.copyArrayArray maChunks len1 chunks2 0 len2
chunks' <- AA.unsafeFreezeArrayArray maChunks
let result = Vectors len' starts' lens' chunks'
return $ result
{-# INLINE_U append #-}
-- | Convert a boxed vector of unboxed vectors to a `Vectors`.
fromVector :: (Unboxes a, Unbox a) => V.Vector (U.Vector a) -> Vectors a
fromVector vecs
= runST
$ do let len = V.length vecs
let (_, vstarts, vlens) = V.unzip3 $ V.map unpackUVector vecs
let (baStarts, _, _) = unpackUVector $ V.convert vstarts
let (baLens, _, _) = unpackUVector $ V.convert vlens
mchunks <- AA.newArrayArray len
V.zipWithM_
(\i vec
-> let (ba, _, _) = unpackUVector vec
in AA.writeArrayArray mchunks i ba)
(V.enumFromN 0 len)
vecs
chunks <- AA.unsafeFreezeArrayArray mchunks
return $ Vectors len baStarts baLens chunks
{-# INLINE_U fromVector #-}
-- | Convert a `Vectors` to a boxed vector of unboxed vectors.
toVector :: (Unboxes a, Unbox a) => Vectors a -> V.Vector (U.Vector a)
toVector vectors
= V.map (unsafeIndex vectors)
$ V.enumFromN 0 (length vectors)
{-# INLINE_U toVector #-}
-- | Unpack an unboxed vector into array data, starting index, and vector length.
unpackUVector :: (Unbox a, P.Prim a) => U.Vector a -> (P.ByteArray, Int, Int)
unpackUVector vec
= runST
$ do let pvec = V.convert vec
R.MVector start len mba <- R.unsafeThaw pvec
ba <- P.unsafeFreezeByteArray mba
return (ba, start, len)
{-# INLINE_U unpackUVector #-}
| mainland/dph | dph-prim-seq/Data/Array/Parallel/Unlifted/Vectors.hs | bsd-3-clause | 8,684 | 0 | 15 | 2,617 | 2,045 | 1,048 | 997 | -1 | -1 |
-- |
-- Module: Network.Riak
-- Copyright: (c) 2011 MailRank, Inc.
-- License: Apache
-- Maintainer: Mark Hibberd <mark@hibberd.id.au>, Nathan Hunter <nhunter@janrain.com>
-- Stability: experimental
-- Portability: portable
--
-- A client for the Riak decentralized data store.
--
-- The functions in this module use JSON as the storage
-- representation, and automatically perform conflict resolution
-- during storage and retrieval.
--
-- This library is organized to allow a tradeoff between power
-- and ease of use. If you would like a different degree of
-- automation with storage and conflict resolution, you may want to
-- use one of the following modules (ranked from easiest to most
-- tricky to use):
--
-- [Network.Riak.JSON.Resolvable] JSON for storage, automatic conflict
-- resolution. (This module actually re-exports its definitions.)
-- This is the easiest module to work with.
--
-- [Network.Riak.JSON] JSON for storage, manual conflict resolution.
--
-- [Network.Riak.Value.Resolvable] More complex (but still automatic)
-- storage, automatic conflict resolution.
--
-- [Network.Riak.Value] More complex (but still automatic) storage,
-- manual conflict resolution.
--
-- [Network.Riak.Basic] manual storage, manual conflict resolution.
-- This is the most demanding module to work with, as you must encode
-- and decode data yourself, and handle all conflict resolution
-- yourself.
--
-- [Network.Riak.CRDT] CRDT operations.
--
-- A short getting started guide is available at <http://docs.basho.com/riak/latest/dev/taste-of-riak/haskell/>
--
module Network.Riak
(
-- * Client configuration and identification
ClientID
, Client(..)
, defaultClient
, getClientID
-- * Connection management
, Connection(..)
, connect
, disconnect
, ping
, getServerInfo
-- * Data management
, Quorum(..)
, Resolvable(..)
, get
, getMany
, getByIndex
, addIndexes
, modify
, modify_
, delete
-- ** Low-level modification functions
, put
, putIndexed
, putMany
-- * Metadata
, listBuckets
, foldKeys
, getBucket
, setBucket
-- * Map/reduce
, mapReduce
-- * Types
, IndexQuery(..)
, IndexValue(..)
) where
import Network.Riak.Basic hiding (get, put, put_)
import Network.Riak.JSON.Resolvable (get, getMany, modify, modify_, put, putIndexed, putMany)
import Network.Riak.Resolvable (Resolvable(..))
import Network.Riak.Value (getByIndex, addIndexes)
import Network.Riak.Types.Internal (IndexQuery(..), IndexValue(..))
| k-bx/riak-haskell-client | src/Network/Riak.hs | apache-2.0 | 2,586 | 0 | 6 | 506 | 264 | 194 | 70 | 35 | 0 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-} -- Imports internal modules
#endif
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
-- |
-- Module : Data.Attoparsec.Text.Lazy
-- Copyright : Bryan O'Sullivan 2007-2015
-- License : BSD3
--
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : unknown
--
-- Simple, efficient combinator parsing that can consume lazy 'Text'
-- strings, loosely based on the Parsec library.
--
-- This is essentially the same code as in the 'Data.Attoparsec.Text'
-- module, only with a 'parse' function that can consume a lazy
-- 'Text' incrementally, and a 'Result' type that does not allow
-- more input to be fed in. Think of this as suitable for use with a
-- lazily read file, e.g. via 'L.readFile' or 'L.hGetContents'.
--
-- /Note:/ The various parser functions and combinators such as
-- 'string' still expect /strict/ 'T.Text' parameters, and return
-- strict 'T.Text' results. Behind the scenes, strict 'T.Text' values
-- are still used internally to store parser input and manipulate it
-- efficiently.
module Data.Attoparsec.Text.Lazy
(
Result(..)
, module Data.Attoparsec.Text
-- * Running parsers
, parse
, parseTest
-- ** Result conversion
, maybeResult
, eitherResult
) where
import Control.DeepSeq (NFData(rnf))
import Data.List (intercalate)
import Data.Text.Lazy.Internal (Text(..), chunk)
import qualified Data.Attoparsec.Internal.Types as T
import qualified Data.Attoparsec.Text as A
import qualified Data.Text as T
import Data.Attoparsec.Text hiding (IResult(..), Result, eitherResult,
maybeResult, parse, parseWith, parseTest)
-- | The result of a parse.
data Result r = Fail Text [String] String
-- ^ The parse failed. The 'Text' is the input
-- that had not yet been consumed when the failure
-- occurred. The @[@'String'@]@ is a list of contexts
-- in which the error occurred. The 'String' is the
-- message describing the error, if any.
| Done Text r
-- ^ The parse succeeded. The 'Text' is the
-- input that had not yet been consumed (if any) when
-- the parse succeeded.
deriving (Show)
instance NFData r => NFData (Result r) where
rnf (Fail bs ctxs msg) = rnf bs `seq` rnf ctxs `seq` rnf msg
rnf (Done bs r) = rnf bs `seq` rnf r
{-# INLINE rnf #-}
fmapR :: (a -> b) -> Result a -> Result b
fmapR _ (Fail st stk msg) = Fail st stk msg
fmapR f (Done bs r) = Done bs (f r)
instance Functor Result where
fmap = fmapR
-- | Run a parser and return its result.
parse :: A.Parser a -> Text -> Result a
parse p s = case s of
Chunk x xs -> go (A.parse p x) xs
empty -> go (A.parse p T.empty) empty
where
go (T.Fail x stk msg) ys = Fail (chunk x ys) stk msg
go (T.Done x r) ys = Done (chunk x ys) r
go (T.Partial k) (Chunk y ys) = go (k y) ys
go (T.Partial k) empty = go (k T.empty) empty
-- | Run a parser and print its result to standard output.
parseTest :: (Show a) => A.Parser a -> Text -> IO ()
parseTest p s = print (parse p s)
-- | Convert a 'Result' value to a 'Maybe' value.
maybeResult :: Result r -> Maybe r
maybeResult (Done _ r) = Just r
maybeResult _ = Nothing
-- | Convert a 'Result' value to an 'Either' value.
eitherResult :: Result r -> Either String r
eitherResult (Done _ r) = Right r
eitherResult (Fail _ [] msg) = Left msg
eitherResult (Fail _ ctxs msg) = Left (intercalate " > " ctxs ++ ": " ++ msg)
| beni55/attoparsec | Data/Attoparsec/Text/Lazy.hs | bsd-3-clause | 3,702 | 0 | 11 | 961 | 814 | 451 | 363 | 47 | 5 |
module MediaWiki.API.Query.Revisions.Import where
import MediaWiki.API.Types
import MediaWiki.API.Utils
import MediaWiki.API.Query.Revisions
import Text.XML.Light.Types
import Text.XML.Light.Proc ( strContent )
import Control.Monad
import Data.Maybe
stringXml :: String -> Either (String,[{-Error msg-}String]) RevisionsResponse
stringXml s = parseDoc xml s
xml :: Element -> Maybe RevisionsResponse
xml e = do
guard (elName e == nsName "api")
let es1 = children e
p <- pNode "query" es1
let es = children p
ps <- fmap (mapMaybe xmlPage) (fmap children $ pNode "pages" es)
let cont = pNode "query-continue" es1 >>= xmlContinue "revisions" "rvstartid"
return emptyRevisionsResponse
{ rvPages = ps
, rvContinue = cont
}
xmlPage :: Element -> Maybe (PageTitle,[Revision])
xmlPage e = do
guard (elName e == nsName "page")
let ns = fromMaybe mainNamespace $ pAttr "ns" e
let tit = fromMaybe "" $ pAttr "title" e
let pid = pAttr "pageid" e
let es = children e
p <- pNode "revisions" es
let pg = emptyPageTitle{pgNS = ns, pgTitle=tit, pgMbId = pid}
rs <- fmap (mapMaybe (xmlRevision pg)) (fmap children $ pNode "rev" (children p))
return (pg, rs)
xmlRevision :: PageTitle -> Element -> Maybe Revision
xmlRevision pg e = do
guard (elName e == nsName "page")
let rid = fromMaybe "" $ pAttr "revid" e
let mino = isJust (pAttr "minor" e)
let usr = fromMaybe "" $ pAttr "user" e
let anon = isJust (pAttr "anon" e)
let ts = fromMaybe "" $ pAttr "timestamp" e
let size = fromMaybe 0 (pAttr "size" e >>= readMb)
let com = pAttr "comment" e
let con = case strContent e of { "" -> Nothing ; xs -> Just xs}
return (emptyRevision pg)
{ revRevId = rid
, revIsMinor = mino
, revUser = usr
, revIsAnon = anon
, revTimestamp = ts
, revSize = size
, revComment = com
, revContent = con
}
| neobrain/neobot | mediawiki/MediaWiki/API/Query/Revisions/Import.hs | bsd-3-clause | 1,926 | 4 | 13 | 463 | 747 | 371 | 376 | 52 | 2 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ < 709
import qualified HastePkg708 as Real
#else
import qualified HastePkg710 as Real
#endif
main :: IO ()
main = Real.main
| beni55/haste-compiler | utils/haste-pkg/haste-pkg.hs | bsd-3-clause | 171 | 0 | 6 | 29 | 28 | 18 | 10 | 4 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[NameEnv]{@NameEnv@: name environments}
-}
{-# LANGUAGE CPP #-}
module NameEnv (
-- * Var, Id and TyVar environments (maps)
NameEnv,
-- ** Manipulating these environments
mkNameEnv,
emptyNameEnv, isEmptyNameEnv,
unitNameEnv, nameEnvElts,
extendNameEnv_C, extendNameEnv_Acc, extendNameEnv,
extendNameEnvList, extendNameEnvList_C,
filterNameEnv, anyNameEnv,
plusNameEnv, plusNameEnv_C, alterNameEnv,
lookupNameEnv, lookupNameEnv_NF, delFromNameEnv, delListFromNameEnv,
elemNameEnv, mapNameEnv, disjointNameEnv,
DNameEnv,
emptyDNameEnv,
lookupDNameEnv,
mapDNameEnv,
alterDNameEnv,
-- ** Dependency analysis
depAnal
) where
#include "HsVersions.h"
import Digraph
import Name
import UniqFM
import UniqDFM
import Maybes
{-
************************************************************************
* *
\subsection{Name environment}
* *
************************************************************************
-}
{-
Note [depAnal determinism]
~~~~~~~~~~~~~~~~~~~~~~~~~~
depAnal is deterministic provided it gets the nodes in a deterministic order.
The order of lists that get_defs and get_uses return doesn't matter, as these
are only used to construct the edges, and stronglyConnCompFromEdgedVertices is
deterministic even when the edges are not in deterministic order as explained
in Note [Deterministic SCC] in Digraph.
-}
depAnal :: (node -> [Name]) -- Defs
-> (node -> [Name]) -- Uses
-> [node]
-> [SCC node]
-- Peform dependency analysis on a group of definitions,
-- where each definition may define more than one Name
--
-- The get_defs and get_uses functions are called only once per node
depAnal get_defs get_uses nodes
= stronglyConnCompFromEdgedVerticesUniq (map mk_node keyed_nodes)
where
keyed_nodes = nodes `zip` [(1::Int)..]
mk_node (node, key) = (node, key, mapMaybe (lookupNameEnv key_map) (get_uses node))
key_map :: NameEnv Int -- Maps a Name to the key of the decl that defines it
key_map = mkNameEnv [(name,key) | (node, key) <- keyed_nodes, name <- get_defs node]
{-
************************************************************************
* *
\subsection{Name environment}
* *
************************************************************************
-}
-- | Name Environment
type NameEnv a = UniqFM a -- Domain is Name
emptyNameEnv :: NameEnv a
isEmptyNameEnv :: NameEnv a -> Bool
mkNameEnv :: [(Name,a)] -> NameEnv a
nameEnvElts :: NameEnv a -> [a]
alterNameEnv :: (Maybe a-> Maybe a) -> NameEnv a -> Name -> NameEnv a
extendNameEnv_C :: (a->a->a) -> NameEnv a -> Name -> a -> NameEnv a
extendNameEnv_Acc :: (a->b->b) -> (a->b) -> NameEnv b -> Name -> a -> NameEnv b
extendNameEnv :: NameEnv a -> Name -> a -> NameEnv a
plusNameEnv :: NameEnv a -> NameEnv a -> NameEnv a
plusNameEnv_C :: (a->a->a) -> NameEnv a -> NameEnv a -> NameEnv a
extendNameEnvList :: NameEnv a -> [(Name,a)] -> NameEnv a
extendNameEnvList_C :: (a->a->a) -> NameEnv a -> [(Name,a)] -> NameEnv a
delFromNameEnv :: NameEnv a -> Name -> NameEnv a
delListFromNameEnv :: NameEnv a -> [Name] -> NameEnv a
elemNameEnv :: Name -> NameEnv a -> Bool
unitNameEnv :: Name -> a -> NameEnv a
lookupNameEnv :: NameEnv a -> Name -> Maybe a
lookupNameEnv_NF :: NameEnv a -> Name -> a
filterNameEnv :: (elt -> Bool) -> NameEnv elt -> NameEnv elt
anyNameEnv :: (elt -> Bool) -> NameEnv elt -> Bool
mapNameEnv :: (elt1 -> elt2) -> NameEnv elt1 -> NameEnv elt2
disjointNameEnv :: NameEnv a -> NameEnv a -> Bool
nameEnvElts x = eltsUFM x
emptyNameEnv = emptyUFM
isEmptyNameEnv = isNullUFM
unitNameEnv x y = unitUFM x y
extendNameEnv x y z = addToUFM x y z
extendNameEnvList x l = addListToUFM x l
lookupNameEnv x y = lookupUFM x y
alterNameEnv = alterUFM
mkNameEnv l = listToUFM l
elemNameEnv x y = elemUFM x y
plusNameEnv x y = plusUFM x y
plusNameEnv_C f x y = plusUFM_C f x y
extendNameEnv_C f x y z = addToUFM_C f x y z
mapNameEnv f x = mapUFM f x
extendNameEnv_Acc x y z a b = addToUFM_Acc x y z a b
extendNameEnvList_C x y z = addListToUFM_C x y z
delFromNameEnv x y = delFromUFM x y
delListFromNameEnv x y = delListFromUFM x y
filterNameEnv x y = filterUFM x y
anyNameEnv f x = foldUFM ((||) . f) False x
disjointNameEnv x y = isNullUFM (intersectUFM x y)
lookupNameEnv_NF env n = expectJust "lookupNameEnv_NF" (lookupNameEnv env n)
-- | Deterministic Name Environment
--
-- See Note [Deterministic UniqFM] in UniqDFM for explanation why we need
-- DNameEnv.
type DNameEnv a = UniqDFM a
emptyDNameEnv :: DNameEnv a
emptyDNameEnv = emptyUDFM
lookupDNameEnv :: DNameEnv a -> Name -> Maybe a
lookupDNameEnv = lookupUDFM
mapDNameEnv :: (a -> b) -> DNameEnv a -> DNameEnv b
mapDNameEnv = mapUDFM
alterDNameEnv :: (Maybe a -> Maybe a) -> DNameEnv a -> Name -> DNameEnv a
alterDNameEnv = alterUDFM
| olsner/ghc | compiler/basicTypes/NameEnv.hs | bsd-3-clause | 5,502 | 0 | 11 | 1,463 | 1,323 | 697 | 626 | 87 | 1 |
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.InstalledPackageInfo
-- Copyright : (c) The University of Glasgow 2004
--
-- Maintainer : libraries@haskell.org
-- Portability : portable
--
-- This is the information about an /installed/ package that
-- is communicated to the @ghc-pkg@ program in order to register
-- a package. @ghc-pkg@ now consumes this package format (as of version
-- 6.4). This is specific to GHC at the moment.
--
-- The @.cabal@ file format is for describing a package that is not yet
-- installed. It has a lot of flexibility, like conditionals and dependency
-- ranges. As such, that format is not at all suitable for describing a package
-- that has already been built and installed. By the time we get to that stage,
-- we have resolved all conditionals and resolved dependency version
-- constraints to exact versions of dependent packages. So, this module defines
-- the 'InstalledPackageInfo' data structure that contains all the info we keep
-- about an installed package. There is a parser and pretty printer. The
-- textual format is rather simpler than the @.cabal@ format: there are no
-- sections, for example.
-- This module is meant to be local-only to Distribution...
module Distribution.InstalledPackageInfo (
InstalledPackageInfo(..),
libraryName,
OriginalModule(..), ExposedModule(..),
ParseResult(..), PError(..), PWarning,
emptyInstalledPackageInfo,
parseInstalledPackageInfo,
showInstalledPackageInfo,
showInstalledPackageInfoField,
showSimpleInstalledPackageInfoField,
fieldsInstalledPackageInfo,
) where
import Distribution.ParseUtils
( FieldDescr(..), ParseResult(..), PError(..), PWarning
, simpleField, listField, parseLicenseQ
, showFields, showSingleNamedField, showSimpleSingleNamedField
, parseFieldsFlat
, parseFilePathQ, parseTokenQ, parseModuleNameQ, parsePackageNameQ
, showFilePath, showToken, boolField, parseOptVersion
, parseFreeText, showFreeText, parseOptCommaList )
import Distribution.License ( License(..) )
import Distribution.Package
( PackageName(..), PackageIdentifier(..)
, PackageId, InstalledPackageId(..)
, packageName, packageVersion, PackageKey(..)
, LibraryName(..) )
import qualified Distribution.Package as Package
import Distribution.ModuleName
( ModuleName )
import Distribution.Version
( Version(..) )
import Distribution.Text
( Text(disp, parse) )
import Text.PrettyPrint as Disp
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.Binary (Binary)
import Data.Maybe (fromMaybe)
import GHC.Generics (Generic)
-- -----------------------------------------------------------------------------
-- The InstalledPackageInfo type
data InstalledPackageInfo
= InstalledPackageInfo {
-- these parts are exactly the same as PackageDescription
installedPackageId :: InstalledPackageId,
sourcePackageId :: PackageId,
packageKey :: PackageKey,
license :: License,
copyright :: String,
maintainer :: String,
author :: String,
stability :: String,
homepage :: String,
pkgUrl :: String,
synopsis :: String,
description :: String,
category :: String,
-- these parts are required by an installed package only:
exposed :: Bool,
exposedModules :: [ExposedModule],
instantiatedWith :: [(ModuleName, OriginalModule)],
hiddenModules :: [ModuleName],
trusted :: Bool,
importDirs :: [FilePath],
libraryDirs :: [FilePath],
dataDir :: FilePath,
hsLibraries :: [String],
extraLibraries :: [String],
extraGHCiLibraries:: [String], -- overrides extraLibraries for GHCi
includeDirs :: [FilePath],
includes :: [String],
depends :: [InstalledPackageId],
ccOptions :: [String],
ldOptions :: [String],
frameworkDirs :: [FilePath],
frameworks :: [String],
haddockInterfaces :: [FilePath],
haddockHTMLs :: [FilePath],
pkgRoot :: Maybe FilePath
}
deriving (Generic, Read, Show)
libraryName :: InstalledPackageInfo -> LibraryName
libraryName ipi = Package.packageKeyLibraryName (sourcePackageId ipi) (packageKey ipi)
instance Binary InstalledPackageInfo
instance Package.Package InstalledPackageInfo where
packageId = sourcePackageId
instance Package.HasInstalledPackageId InstalledPackageInfo where
installedPackageId = installedPackageId
instance Package.PackageInstalled InstalledPackageInfo where
installedDepends = depends
emptyInstalledPackageInfo :: InstalledPackageInfo
emptyInstalledPackageInfo
= InstalledPackageInfo {
installedPackageId = InstalledPackageId "",
sourcePackageId = PackageIdentifier (PackageName "") noVersion,
packageKey = OldPackageKey (PackageIdentifier
(PackageName "") noVersion),
license = UnspecifiedLicense,
copyright = "",
maintainer = "",
author = "",
stability = "",
homepage = "",
pkgUrl = "",
synopsis = "",
description = "",
category = "",
exposed = False,
exposedModules = [],
hiddenModules = [],
instantiatedWith = [],
trusted = False,
importDirs = [],
libraryDirs = [],
dataDir = "",
hsLibraries = [],
extraLibraries = [],
extraGHCiLibraries= [],
includeDirs = [],
includes = [],
depends = [],
ccOptions = [],
ldOptions = [],
frameworkDirs = [],
frameworks = [],
haddockInterfaces = [],
haddockHTMLs = [],
pkgRoot = Nothing
}
noVersion :: Version
noVersion = Version [] []
-- -----------------------------------------------------------------------------
-- Exposed modules
data OriginalModule
= OriginalModule {
originalPackageId :: InstalledPackageId,
originalModuleName :: ModuleName
}
deriving (Generic, Eq, Read, Show)
data ExposedModule
= ExposedModule {
exposedName :: ModuleName,
exposedReexport :: Maybe OriginalModule,
exposedSignature :: Maybe OriginalModule -- This field is unused for now.
}
deriving (Generic, Read, Show)
instance Text OriginalModule where
disp (OriginalModule ipi m) =
disp ipi <> Disp.char ':' <> disp m
parse = do
ipi <- parse
_ <- Parse.char ':'
m <- parse
return (OriginalModule ipi m)
instance Text ExposedModule where
disp (ExposedModule m reexport signature) =
Disp.sep [ disp m
, case reexport of
Just m' -> Disp.sep [Disp.text "from", disp m']
Nothing -> Disp.empty
, case signature of
Just m' -> Disp.sep [Disp.text "is", disp m']
Nothing -> Disp.empty
]
parse = do
m <- parseModuleNameQ
Parse.skipSpaces
reexport <- Parse.option Nothing $ do
_ <- Parse.string "from"
Parse.skipSpaces
fmap Just parse
Parse.skipSpaces
signature <- Parse.option Nothing $ do
_ <- Parse.string "is"
Parse.skipSpaces
fmap Just parse
return (ExposedModule m reexport signature)
instance Binary OriginalModule
instance Binary ExposedModule
-- To maintain backwards-compatibility, we accept both comma/non-comma
-- separated variants of this field. You SHOULD use the comma syntax if you
-- use any new functions, although actually it's unambiguous due to a quirk
-- of the fact that modules must start with capital letters.
showExposedModules :: [ExposedModule] -> Disp.Doc
showExposedModules xs
| all isExposedModule xs = fsep (map disp xs)
| otherwise = fsep (Disp.punctuate comma (map disp xs))
where isExposedModule (ExposedModule _ Nothing Nothing) = True
isExposedModule _ = False
parseExposedModules :: Parse.ReadP r [ExposedModule]
parseExposedModules = parseOptCommaList parse
-- -----------------------------------------------------------------------------
-- Parsing
parseInstalledPackageInfo :: String -> ParseResult InstalledPackageInfo
parseInstalledPackageInfo =
parseFieldsFlat (fieldsInstalledPackageInfo ++ deprecatedFieldDescrs)
emptyInstalledPackageInfo
parseInstantiatedWith :: Parse.ReadP r (ModuleName, OriginalModule)
parseInstantiatedWith = do k <- parse
_ <- Parse.char '='
n <- parse
_ <- Parse.char '@'
p <- parse
return (k, OriginalModule p n)
-- -----------------------------------------------------------------------------
-- Pretty-printing
showInstalledPackageInfo :: InstalledPackageInfo -> String
showInstalledPackageInfo = showFields fieldsInstalledPackageInfo
showInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String)
showInstalledPackageInfoField = showSingleNamedField fieldsInstalledPackageInfo
showSimpleInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String)
showSimpleInstalledPackageInfoField = showSimpleSingleNamedField fieldsInstalledPackageInfo
showInstantiatedWith :: (ModuleName, OriginalModule) -> Doc
showInstantiatedWith (k, OriginalModule p m) = disp k <> text "=" <> disp m <> text "@" <> disp p
-- -----------------------------------------------------------------------------
-- Description of the fields, for parsing/printing
fieldsInstalledPackageInfo :: [FieldDescr InstalledPackageInfo]
fieldsInstalledPackageInfo = basicFieldDescrs ++ installedFieldDescrs
basicFieldDescrs :: [FieldDescr InstalledPackageInfo]
basicFieldDescrs =
[ simpleField "name"
disp parsePackageNameQ
packageName (\name pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgName=name}})
, simpleField "version"
disp parseOptVersion
packageVersion (\ver pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgVersion=ver}})
, simpleField "id"
disp parse
installedPackageId (\ipid pkg -> pkg{installedPackageId=ipid})
, simpleField "key"
disp parse
packageKey (\pk pkg -> pkg{packageKey=pk})
, simpleField "license"
disp parseLicenseQ
license (\l pkg -> pkg{license=l})
, simpleField "copyright"
showFreeText parseFreeText
copyright (\val pkg -> pkg{copyright=val})
, simpleField "maintainer"
showFreeText parseFreeText
maintainer (\val pkg -> pkg{maintainer=val})
, simpleField "stability"
showFreeText parseFreeText
stability (\val pkg -> pkg{stability=val})
, simpleField "homepage"
showFreeText parseFreeText
homepage (\val pkg -> pkg{homepage=val})
, simpleField "package-url"
showFreeText parseFreeText
pkgUrl (\val pkg -> pkg{pkgUrl=val})
, simpleField "synopsis"
showFreeText parseFreeText
synopsis (\val pkg -> pkg{synopsis=val})
, simpleField "description"
showFreeText parseFreeText
description (\val pkg -> pkg{description=val})
, simpleField "category"
showFreeText parseFreeText
category (\val pkg -> pkg{category=val})
, simpleField "author"
showFreeText parseFreeText
author (\val pkg -> pkg{author=val})
]
installedFieldDescrs :: [FieldDescr InstalledPackageInfo]
installedFieldDescrs = [
boolField "exposed"
exposed (\val pkg -> pkg{exposed=val})
, simpleField "exposed-modules"
showExposedModules parseExposedModules
exposedModules (\xs pkg -> pkg{exposedModules=xs})
, listField "hidden-modules"
disp parseModuleNameQ
hiddenModules (\xs pkg -> pkg{hiddenModules=xs})
, listField "instantiated-with"
showInstantiatedWith parseInstantiatedWith
instantiatedWith (\xs pkg -> pkg{instantiatedWith=xs})
, boolField "trusted"
trusted (\val pkg -> pkg{trusted=val})
, listField "import-dirs"
showFilePath parseFilePathQ
importDirs (\xs pkg -> pkg{importDirs=xs})
, listField "library-dirs"
showFilePath parseFilePathQ
libraryDirs (\xs pkg -> pkg{libraryDirs=xs})
, simpleField "data-dir"
showFilePath (parseFilePathQ Parse.<++ return "")
dataDir (\val pkg -> pkg{dataDir=val})
, listField "hs-libraries"
showFilePath parseTokenQ
hsLibraries (\xs pkg -> pkg{hsLibraries=xs})
, listField "extra-libraries"
showToken parseTokenQ
extraLibraries (\xs pkg -> pkg{extraLibraries=xs})
, listField "extra-ghci-libraries"
showToken parseTokenQ
extraGHCiLibraries (\xs pkg -> pkg{extraGHCiLibraries=xs})
, listField "include-dirs"
showFilePath parseFilePathQ
includeDirs (\xs pkg -> pkg{includeDirs=xs})
, listField "includes"
showFilePath parseFilePathQ
includes (\xs pkg -> pkg{includes=xs})
, listField "depends"
disp parse
depends (\xs pkg -> pkg{depends=xs})
, listField "cc-options"
showToken parseTokenQ
ccOptions (\path pkg -> pkg{ccOptions=path})
, listField "ld-options"
showToken parseTokenQ
ldOptions (\path pkg -> pkg{ldOptions=path})
, listField "framework-dirs"
showFilePath parseFilePathQ
frameworkDirs (\xs pkg -> pkg{frameworkDirs=xs})
, listField "frameworks"
showToken parseTokenQ
frameworks (\xs pkg -> pkg{frameworks=xs})
, listField "haddock-interfaces"
showFilePath parseFilePathQ
haddockInterfaces (\xs pkg -> pkg{haddockInterfaces=xs})
, listField "haddock-html"
showFilePath parseFilePathQ
haddockHTMLs (\xs pkg -> pkg{haddockHTMLs=xs})
, simpleField "pkgroot"
(const Disp.empty) parseFilePathQ
(fromMaybe "" . pkgRoot) (\xs pkg -> pkg{pkgRoot=Just xs})
]
deprecatedFieldDescrs :: [FieldDescr InstalledPackageInfo]
deprecatedFieldDescrs = [
listField "hugs-options"
showToken parseTokenQ
(const []) (const id)
]
| x-y-z/cabal | Cabal/Distribution/InstalledPackageInfo.hs | bsd-3-clause | 16,058 | 0 | 14 | 5,123 | 3,109 | 1,773 | 1,336 | 308 | 2 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Dependency.Types
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Common types for dependency resolution.
-----------------------------------------------------------------------------
module Distribution.Client.Dependency.TopDown (
topDownResolver
) where
import Distribution.Client.Dependency.TopDown.Types
import qualified Distribution.Client.Dependency.TopDown.Constraints as Constraints
import Distribution.Client.Dependency.TopDown.Constraints
( Satisfiable(..) )
import Distribution.Client.Types
( SourcePackage(..), ConfiguredPackage(..)
, enableStanzas, ConfiguredId(..), fakeComponentId )
import Distribution.Client.Dependency.Types
( DependencyResolver, ResolverPackage(..)
, PackageConstraint(..), unlabelPackageConstraint
, PackagePreferences(..), InstalledPreference(..)
, Progress(..), foldProgress )
import qualified Distribution.Client.PackageIndex as PackageIndex
import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
import Distribution.Client.ComponentDeps
( ComponentDeps )
import qualified Distribution.Client.ComponentDeps as CD
import Distribution.Client.PackageIndex
( PackageIndex )
import Distribution.Package
( PackageName(..), PackageId, PackageIdentifier(..)
, ComponentId(..)
, Package(..), packageVersion, packageName
, Dependency(Dependency), thisPackageVersion, simplifyDependency )
import Distribution.PackageDescription
( PackageDescription(buildDepends) )
import Distribution.Client.PackageUtils
( externalBuildDepends )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription, flattenPackageDescription )
import Distribution.Version
( Version(..), VersionRange, withinRange, simplifyVersionRange
, UpperBound(..), asVersionIntervals )
import Distribution.Compiler
( CompilerInfo )
import Distribution.System
( Platform )
import Distribution.Simple.Utils
( equating, comparing )
import Distribution.Text
( display )
import Data.List
( foldl', maximumBy, minimumBy, nub, sort, sortBy, groupBy )
import Data.Maybe
( fromJust, fromMaybe, catMaybes )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
( Monoid(mempty) )
#endif
import Control.Monad
( guard )
import qualified Data.Set as Set
import Data.Set (Set)
import qualified Data.Map as Map
import qualified Data.Graph as Graph
import qualified Data.Array as Array
import Control.Exception
( assert )
-- ------------------------------------------------------------
-- * Search state types
-- ------------------------------------------------------------
type Constraints = Constraints.Constraints
InstalledPackageEx UnconfiguredPackage ExclusionReason
type SelectedPackages = PackageIndex SelectedPackage
-- ------------------------------------------------------------
-- * The search tree type
-- ------------------------------------------------------------
data SearchSpace inherited pkg
= ChoiceNode inherited [[(pkg, SearchSpace inherited pkg)]]
| Failure Failure
-- ------------------------------------------------------------
-- * Traverse a search tree
-- ------------------------------------------------------------
explore :: (PackageName -> PackagePreferences)
-> SearchSpace (SelectedPackages, Constraints, SelectionChanges)
SelectablePackage
-> Progress Log Failure (SelectedPackages, Constraints)
explore _ (Failure failure) = Fail failure
explore _ (ChoiceNode (s,c,_) []) = Done (s,c)
explore pref (ChoiceNode _ choices) =
case [ choice | [choice] <- choices ] of
((_, node'):_) -> Step (logInfo node') (explore pref node')
[] -> Step (logInfo node') (explore pref node')
where
choice = minimumBy (comparing topSortNumber) choices
pkgname = packageName . fst . head $ choice
(_, node') = maximumBy (bestByPref pkgname) choice
where
topSortNumber choice = case fst (head choice) of
InstalledOnly (InstalledPackageEx _ i _) -> i
SourceOnly (UnconfiguredPackage _ i _ _) -> i
InstalledAndSource _ (UnconfiguredPackage _ i _ _) -> i
bestByPref pkgname = case packageInstalledPreference of
PreferLatest ->
comparing (\(p,_) -> ( isPreferred p, packageId p))
PreferInstalled ->
comparing (\(p,_) -> (isInstalled p, isPreferred p, packageId p))
where
isInstalled (SourceOnly _) = False
isInstalled _ = True
isPreferred p = packageVersion p `withinRange` preferredVersions
(PackagePreferences preferredVersions packageInstalledPreference)
= pref pkgname
logInfo node = Select selected discarded
where (selected, discarded) = case node of
Failure _ -> ([], [])
ChoiceNode (_,_,changes) _ -> changes
-- ------------------------------------------------------------
-- * Generate a search tree
-- ------------------------------------------------------------
type ConfigurePackage = PackageIndex SelectablePackage
-> SelectablePackage
-> Either [Dependency] SelectedPackage
-- | (packages selected, packages discarded)
type SelectionChanges = ([SelectedPackage], [PackageId])
searchSpace :: ConfigurePackage
-> Constraints
-> SelectedPackages
-> SelectionChanges
-> Set PackageName
-> SearchSpace (SelectedPackages, Constraints, SelectionChanges)
SelectablePackage
searchSpace configure constraints selected changes next =
assert (Set.null (selectedSet `Set.intersection` next)) $
assert (selectedSet `Set.isSubsetOf` Constraints.packages constraints) $
assert (next `Set.isSubsetOf` Constraints.packages constraints) $
ChoiceNode (selected, constraints, changes)
[ [ (pkg, select name pkg)
| pkg <- PackageIndex.lookupPackageName available name ]
| name <- Set.elems next ]
where
available = Constraints.choices constraints
selectedSet = Set.fromList (map packageName (PackageIndex.allPackages selected))
select name pkg = case configure available pkg of
Left missing -> Failure $ ConfigureFailed pkg
[ (dep, Constraints.conflicting constraints dep)
| dep <- missing ]
Right pkg' ->
case constrainDeps pkg' newDeps (addDeps constraints newPkgs) [] of
Left failure -> Failure failure
Right (constraints', newDiscarded) ->
searchSpace configure
constraints' selected' (newSelected, newDiscarded) next'
where
selected' = foldl' (flip PackageIndex.insert) selected newSelected
newSelected =
case Constraints.isPaired constraints (packageId pkg) of
Nothing -> [pkg']
Just pkgid' -> [pkg', pkg'']
where
Just pkg'' = fmap (\(InstalledOnly p) -> InstalledOnly p)
(PackageIndex.lookupPackageId available pkgid')
newPkgs = [ name'
| (Dependency name' _, _) <- newDeps
, null (PackageIndex.lookupPackageName selected' name') ]
newDeps = concatMap packageConstraints newSelected
next' = Set.delete name
$ foldl' (flip Set.insert) next newPkgs
packageConstraints :: SelectedPackage -> [(Dependency, Bool)]
packageConstraints = either installedConstraints availableConstraints
. preferSource
where
preferSource (InstalledOnly pkg) = Left pkg
preferSource (SourceOnly pkg) = Right pkg
preferSource (InstalledAndSource _ pkg) = Right pkg
installedConstraints (InstalledPackageEx _ _ deps) =
[ (thisPackageVersion dep, True)
| dep <- deps ]
availableConstraints (SemiConfiguredPackage _ _ _ deps) =
[ (dep, False) | dep <- deps ]
addDeps :: Constraints -> [PackageName] -> Constraints
addDeps =
foldr $ \pkgname cs ->
case Constraints.addTarget pkgname cs of
Satisfiable cs' () -> cs'
_ -> impossible "addDeps unsatisfiable"
constrainDeps :: SelectedPackage -> [(Dependency, Bool)] -> Constraints
-> [PackageId]
-> Either Failure (Constraints, [PackageId])
constrainDeps pkg [] cs discard =
case addPackageSelectConstraint (packageId pkg) cs of
Satisfiable cs' discard' -> Right (cs', discard' ++ discard)
_ -> impossible "constrainDeps unsatisfiable(1)"
constrainDeps pkg ((dep, installedConstraint):deps) cs discard =
case addPackageDependencyConstraint (packageId pkg) dep installedConstraint cs of
Satisfiable cs' discard' -> constrainDeps pkg deps cs' (discard' ++ discard)
Unsatisfiable -> impossible "constrainDeps unsatisfiable(2)"
ConflictsWith conflicts ->
Left (DependencyConflict pkg dep installedConstraint conflicts)
-- ------------------------------------------------------------
-- * The main algorithm
-- ------------------------------------------------------------
search :: ConfigurePackage
-> (PackageName -> PackagePreferences)
-> Constraints
-> Set PackageName
-> Progress Log Failure (SelectedPackages, Constraints)
search configure pref constraints =
explore pref . searchSpace configure constraints mempty ([], [])
-- ------------------------------------------------------------
-- * The top level resolver
-- ------------------------------------------------------------
-- | The main exported resolver, with string logging and failure types to fit
-- the standard 'DependencyResolver' interface.
--
topDownResolver :: DependencyResolver
topDownResolver platform cinfo installedPkgIndex sourcePkgIndex
preferences constraints targets =
mapMessages $ topDownResolver'
platform cinfo
(convertInstalledPackageIndex installedPkgIndex)
sourcePkgIndex
preferences
(map unlabelPackageConstraint constraints)
targets
where
mapMessages :: Progress Log Failure a -> Progress String String a
mapMessages = foldProgress (Step . showLog) (Fail . showFailure) Done
-- | The native resolver with detailed structured logging and failure types.
--
topDownResolver' :: Platform -> CompilerInfo
-> PackageIndex InstalledPackage
-> PackageIndex SourcePackage
-> (PackageName -> PackagePreferences)
-> [PackageConstraint]
-> [PackageName]
-> Progress Log Failure [ResolverPackage]
topDownResolver' platform cinfo installedPkgIndex sourcePkgIndex
preferences constraints targets =
fmap (uncurry finalise)
. (\cs -> search configure preferences cs initialPkgNames)
=<< pruneBottomUp platform cinfo
=<< addTopLevelConstraints constraints
=<< addTopLevelTargets targets emptyConstraintSet
where
configure = configurePackage platform cinfo
emptyConstraintSet :: Constraints
emptyConstraintSet = Constraints.empty
(annotateInstalledPackages topSortNumber installedPkgIndex')
(annotateSourcePackages constraints topSortNumber sourcePkgIndex')
(installedPkgIndex', sourcePkgIndex') =
selectNeededSubset installedPkgIndex sourcePkgIndex initialPkgNames
topSortNumber = topologicalSortNumbering installedPkgIndex' sourcePkgIndex'
initialPkgNames = Set.fromList targets
finalise selected' constraints' =
map toResolverPackage
. PackageIndex.allPackages
. fst . improvePlan installedPkgIndex' constraints'
. PackageIndex.fromList
$ finaliseSelectedPackages preferences selected' constraints'
toResolverPackage :: FinalSelectedPackage -> ResolverPackage
toResolverPackage (SelectedInstalled (InstalledPackage pkg _))
= PreExisting pkg
toResolverPackage (SelectedSource pkg) = Configured pkg
addTopLevelTargets :: [PackageName]
-> Constraints
-> Progress a Failure Constraints
addTopLevelTargets [] cs = Done cs
addTopLevelTargets (pkg:pkgs) cs =
case Constraints.addTarget pkg cs of
Satisfiable cs' () -> addTopLevelTargets pkgs cs'
Unsatisfiable -> Fail (NoSuchPackage pkg)
ConflictsWith _conflicts -> impossible "addTopLevelTargets conflicts"
addTopLevelConstraints :: [PackageConstraint] -> Constraints
-> Progress Log Failure Constraints
addTopLevelConstraints [] cs = Done cs
addTopLevelConstraints (PackageConstraintFlags _ _ :deps) cs =
addTopLevelConstraints deps cs
addTopLevelConstraints (PackageConstraintVersion pkg ver:deps) cs =
case addTopLevelVersionConstraint pkg ver cs of
Satisfiable cs' pkgids ->
Step (AppliedVersionConstraint pkg ver pkgids)
(addTopLevelConstraints deps cs')
Unsatisfiable ->
Fail (TopLevelVersionConstraintUnsatisfiable pkg ver)
ConflictsWith conflicts ->
Fail (TopLevelVersionConstraintConflict pkg ver conflicts)
addTopLevelConstraints (PackageConstraintInstalled pkg:deps) cs =
case addTopLevelInstalledConstraint pkg cs of
Satisfiable cs' pkgids ->
Step (AppliedInstalledConstraint pkg InstalledConstraint pkgids)
(addTopLevelConstraints deps cs')
Unsatisfiable ->
Fail (TopLevelInstallConstraintUnsatisfiable pkg InstalledConstraint)
ConflictsWith conflicts ->
Fail (TopLevelInstallConstraintConflict pkg InstalledConstraint conflicts)
addTopLevelConstraints (PackageConstraintSource pkg:deps) cs =
case addTopLevelSourceConstraint pkg cs of
Satisfiable cs' pkgids ->
Step (AppliedInstalledConstraint pkg SourceConstraint pkgids)
(addTopLevelConstraints deps cs')
Unsatisfiable ->
Fail (TopLevelInstallConstraintUnsatisfiable pkg SourceConstraint)
ConflictsWith conflicts ->
Fail (TopLevelInstallConstraintConflict pkg SourceConstraint conflicts)
addTopLevelConstraints (PackageConstraintStanzas _ _ : deps) cs =
addTopLevelConstraints deps cs
-- | Add exclusion on available packages that cannot be configured.
--
pruneBottomUp :: Platform -> CompilerInfo
-> Constraints -> Progress Log Failure Constraints
pruneBottomUp platform comp constraints =
foldr prune Done (initialPackages constraints) constraints
where
prune pkgs rest cs = foldr addExcludeConstraint rest unconfigurable cs
where
unconfigurable =
[ (pkg, missing) -- if necessary we could look up missing reasons
| (Just pkg', pkg) <- zip (map getSourcePkg pkgs) pkgs
, Left missing <- [configure cs pkg'] ]
addExcludeConstraint (pkg, missing) rest cs =
let reason = ExcludedByConfigureFail missing in
case addPackageExcludeConstraint (packageId pkg) reason cs of
Satisfiable cs' [pkgid]| packageId pkg == pkgid
-> Step (ExcludeUnconfigurable pkgid) (rest cs')
Satisfiable _ _ -> impossible "pruneBottomUp satisfiable"
_ -> Fail $ ConfigureFailed pkg
[ (dep, Constraints.conflicting cs dep)
| dep <- missing ]
configure cs (UnconfiguredPackage (SourcePackage _ pkg _ _) _ flags stanzas) =
finalizePackageDescription flags (dependencySatisfiable cs)
platform comp [] (enableStanzas stanzas pkg)
dependencySatisfiable cs =
not . null . PackageIndex.lookupDependency (Constraints.choices cs)
-- collect each group of packages (by name) in reverse topsort order
initialPackages =
reverse
. sortBy (comparing (topSortNumber . head))
. PackageIndex.allPackagesByName
. Constraints.choices
topSortNumber (InstalledOnly (InstalledPackageEx _ i _)) = i
topSortNumber (SourceOnly (UnconfiguredPackage _ i _ _)) = i
topSortNumber (InstalledAndSource _ (UnconfiguredPackage _ i _ _)) = i
getSourcePkg (InstalledOnly _ ) = Nothing
getSourcePkg (SourceOnly spkg) = Just spkg
getSourcePkg (InstalledAndSource _ spkg) = Just spkg
configurePackage :: Platform -> CompilerInfo -> ConfigurePackage
configurePackage platform cinfo available spkg = case spkg of
InstalledOnly ipkg -> Right (InstalledOnly ipkg)
SourceOnly apkg -> fmap SourceOnly (configure apkg)
InstalledAndSource ipkg apkg -> fmap (InstalledAndSource ipkg)
(configure apkg)
where
configure (UnconfiguredPackage apkg@(SourcePackage _ p _ _) _ flags stanzas) =
case finalizePackageDescription flags dependencySatisfiable
platform cinfo []
(enableStanzas stanzas p) of
Left missing -> Left missing
Right (pkg, flags') -> Right $
SemiConfiguredPackage apkg flags' stanzas (externalBuildDepends pkg)
dependencySatisfiable = not . null . PackageIndex.lookupDependency available
-- | Annotate each installed packages with its set of transitive dependencies
-- and its topological sort number.
--
annotateInstalledPackages :: (PackageName -> TopologicalSortNumber)
-> PackageIndex InstalledPackage
-> PackageIndex InstalledPackageEx
annotateInstalledPackages dfsNumber installed = PackageIndex.fromList
[ InstalledPackageEx pkg (dfsNumber (packageName pkg)) (transitiveDepends pkg)
| pkg <- PackageIndex.allPackages installed ]
where
transitiveDepends :: InstalledPackage -> [PackageId]
transitiveDepends = map (packageId . toPkg) . tail . Graph.reachable graph
. fromJust . toVertex . packageId
(graph, toPkg, toVertex) = dependencyGraph installed
-- | Annotate each available packages with its topological sort number and any
-- user-supplied partial flag assignment.
--
annotateSourcePackages :: [PackageConstraint]
-> (PackageName -> TopologicalSortNumber)
-> PackageIndex SourcePackage
-> PackageIndex UnconfiguredPackage
annotateSourcePackages constraints dfsNumber sourcePkgIndex =
PackageIndex.fromList
[ UnconfiguredPackage pkg (dfsNumber name) (flagsFor name) (stanzasFor name)
| pkg <- PackageIndex.allPackages sourcePkgIndex
, let name = packageName pkg ]
where
flagsFor = fromMaybe [] . flip Map.lookup flagsMap
flagsMap = Map.fromList
[ (name, flags)
| PackageConstraintFlags name flags <- constraints ]
stanzasFor = fromMaybe [] . flip Map.lookup stanzasMap
stanzasMap = Map.fromListWith (++)
[ (name, stanzas)
| PackageConstraintStanzas name stanzas <- constraints ]
-- | One of the heuristics we use when guessing which path to take in the
-- search space is an ordering on the choices we make. It's generally better
-- to make decisions about packages higer in the dep graph first since they
-- place constraints on packages lower in the dep graph.
--
-- To pick them in that order we annotate each package with its topological
-- sort number. So if package A depends on package B then package A will have
-- a lower topological sort number than B and we'll make a choice about which
-- version of A to pick before we make a choice about B (unless there is only
-- one possible choice for B in which case we pick that immediately).
--
-- To construct these topological sort numbers we combine and flatten the
-- installed and source package sets. We consider only dependencies between
-- named packages, not including versions and for not-yet-configured packages
-- we look at all the possible dependencies, not just those under any single
-- flag assignment. This means we can actually get impossible combinations of
-- edges and even cycles, but that doesn't really matter here, it's only a
-- heuristic.
--
topologicalSortNumbering :: PackageIndex InstalledPackage
-> PackageIndex SourcePackage
-> (PackageName -> TopologicalSortNumber)
topologicalSortNumbering installedPkgIndex sourcePkgIndex =
\pkgname -> let Just vertex = toVertex pkgname
in topologicalSortNumbers Array.! vertex
where
topologicalSortNumbers = Array.array (Array.bounds graph)
(zip (Graph.topSort graph) [0..])
(graph, _, toVertex) = Graph.graphFromEdges $
[ ((), packageName pkg, nub deps)
| pkgs@(pkg:_) <- PackageIndex.allPackagesByName installedPkgIndex
, let deps = [ packageName dep
| pkg' <- pkgs
, dep <- sourceDeps pkg' ] ]
++ [ ((), packageName pkg, nub deps)
| pkgs@(pkg:_) <- PackageIndex.allPackagesByName sourcePkgIndex
, let deps = [ depName
| SourcePackage _ pkg' _ _ <- pkgs
, Dependency depName _ <-
buildDepends (flattenPackageDescription pkg') ] ]
-- | We don't need the entire index (which is rather large and costly if we
-- force it by examining the whole thing). So trace out the maximul subset of
-- each index that we could possibly ever need. Do this by flattening packages
-- and looking at the names of all possible dependencies.
--
selectNeededSubset :: PackageIndex InstalledPackage
-> PackageIndex SourcePackage
-> Set PackageName
-> (PackageIndex InstalledPackage
,PackageIndex SourcePackage)
selectNeededSubset installedPkgIndex sourcePkgIndex = select mempty mempty
where
select :: PackageIndex InstalledPackage
-> PackageIndex SourcePackage
-> Set PackageName
-> (PackageIndex InstalledPackage
,PackageIndex SourcePackage)
select installedPkgIndex' sourcePkgIndex' remaining
| Set.null remaining = (installedPkgIndex', sourcePkgIndex')
| otherwise = select installedPkgIndex'' sourcePkgIndex'' remaining''
where
(next, remaining') = Set.deleteFindMin remaining
moreInstalled = PackageIndex.lookupPackageName installedPkgIndex next
moreSource = PackageIndex.lookupPackageName sourcePkgIndex next
moreRemaining = -- we filter out packages already included in the indexes
-- this avoids an infinite loop if a package depends on itself
-- like base-3.0.3.0 with base-4.0.0.0
filter notAlreadyIncluded
$ [ packageName dep
| pkg <- moreInstalled
, dep <- sourceDeps pkg ]
++ [ name
| SourcePackage _ pkg _ _ <- moreSource
, Dependency name _ <-
buildDepends (flattenPackageDescription pkg) ]
installedPkgIndex'' = foldl' (flip PackageIndex.insert)
installedPkgIndex' moreInstalled
sourcePkgIndex'' = foldl' (flip PackageIndex.insert)
sourcePkgIndex' moreSource
remaining'' = foldl' (flip Set.insert)
remaining' moreRemaining
notAlreadyIncluded name =
null (PackageIndex.lookupPackageName installedPkgIndex' name)
&& null (PackageIndex.lookupPackageName sourcePkgIndex' name)
-- | The old top down solver assumes that installed packages are indexed by
-- their source package id. But these days they're actually indexed by an
-- installed package id and there can be many installed packages with the same
-- source package id. This function tries to do a convertion, but it can only
-- be partial.
--
convertInstalledPackageIndex :: InstalledPackageIndex
-> PackageIndex InstalledPackage
convertInstalledPackageIndex index' = PackageIndex.fromList
-- There can be multiple installed instances of each package version,
-- like when the same package is installed in the global & user DBs.
-- InstalledPackageIndex.allPackagesBySourcePackageId gives us the
-- installed packages with the most preferred instances first, so by
-- picking the first we should get the user one. This is almost but not
-- quite the same as what ghc does.
[ InstalledPackage ipkg (sourceDepsOf index' ipkg)
| (_,ipkg:_) <- InstalledPackageIndex.allPackagesBySourcePackageId index' ]
where
-- The InstalledPackageInfo only lists dependencies by the
-- ComponentId, which means we do not directly know the corresponding
-- source dependency. The only way to find out is to lookup the
-- ComponentId to get the InstalledPackageInfo and look at its
-- source PackageId. But if the package is broken because it depends on
-- other packages that do not exist then we have a problem we cannot find
-- the original source package id. Instead we make up a bogus package id.
-- This should have the same effect since it should be a dependency on a
-- nonexistent package.
sourceDepsOf index ipkg =
[ maybe (brokenPackageId depid) packageId mdep
| let depids = InstalledPackageInfo.depends ipkg
getpkg = InstalledPackageIndex.lookupComponentId index
, (depid, mdep) <- zip depids (map getpkg depids) ]
brokenPackageId (ComponentId str) =
PackageIdentifier (PackageName (str ++ "-broken")) (Version [] [])
-- ------------------------------------------------------------
-- * Post processing the solution
-- ------------------------------------------------------------
finaliseSelectedPackages :: (PackageName -> PackagePreferences)
-> SelectedPackages
-> Constraints
-> [FinalSelectedPackage]
finaliseSelectedPackages pref selected constraints =
map finaliseSelected (PackageIndex.allPackages selected)
where
remainingChoices = Constraints.choices constraints
finaliseSelected (InstalledOnly ipkg ) = finaliseInstalled ipkg
finaliseSelected (SourceOnly apkg) = finaliseSource Nothing apkg
finaliseSelected (InstalledAndSource ipkg apkg) =
case PackageIndex.lookupPackageId remainingChoices (packageId ipkg) of
--picked package not in constraints
Nothing -> impossible "finaliseSelected no pkg"
-- to constrain to avail only:
Just (SourceOnly _) -> impossible "finaliseSelected src only"
Just (InstalledOnly _) -> finaliseInstalled ipkg
Just (InstalledAndSource _ _) -> finaliseSource (Just ipkg) apkg
finaliseInstalled (InstalledPackageEx pkg _ _) = SelectedInstalled pkg
finaliseSource mipkg (SemiConfiguredPackage pkg flags stanzas deps) =
SelectedSource (ConfiguredPackage pkg flags stanzas deps')
where
-- We cheat in the cabal solver, and classify all dependencies as
-- library dependencies.
deps' :: ComponentDeps [ConfiguredId]
deps' = CD.fromLibraryDeps $ map (confId . pickRemaining mipkg) deps
-- InstalledOrSource indicates that we either have a source package
-- available, or an installed one, or both. In the case that we have both
-- available, we don't yet know if we can pick the installed one (the
-- dependencies may not match up, for instance); this is verified in
-- `improvePlan`.
--
-- This means that at this point we cannot construct a valid installed
-- package ID yet for the dependencies. We therefore have two options:
--
-- * We could leave the installed package ID undefined here, and have a
-- separate pass over the output of the top-down solver, fixing all
-- dependencies so that if we depend on an already installed package we
-- use the proper installed package ID.
--
-- * We can _always_ use fake installed IDs, irrespective of whether we the
-- dependency is on an already installed package or not. This is okay
-- because (i) the top-down solver does not (and never will) support
-- multiple package instances, and (ii) we initialize the FakeMap with
-- fake IDs for already installed packages.
--
-- For now we use the second option; if however we change the implementation
-- of these fake IDs so that we do away with the FakeMap and update a
-- package reverse dependencies as we execute the install plan and discover
-- real package IDs, then this is no longer possible and we have to
-- implement the first option (see also Note [FakeMap] in Cabal).
confId :: InstalledOrSource InstalledPackageEx UnconfiguredPackage -> ConfiguredId
confId pkg = ConfiguredId {
confSrcId = packageId pkg
, confInstId = fakeComponentId (packageId pkg)
}
pickRemaining mipkg dep@(Dependency _name versionRange) =
case PackageIndex.lookupDependency remainingChoices dep of
[] -> impossible "pickRemaining no pkg"
[pkg'] -> pkg'
remaining -> assert (checkIsPaired remaining)
$ maximumBy bestByPref remaining
where
-- We order candidate packages to pick for a dependency by these
-- three factors. The last factor is just highest version wins.
bestByPref =
comparing (\p -> (isCurrent p, isPreferred p, packageVersion p))
-- Is the package already used by the installed version of this
-- package? If so we should pick that first. This stops us from doing
-- silly things like deciding to rebuild haskell98 against base 3.
isCurrent = case mipkg :: Maybe InstalledPackageEx of
Nothing -> \_ -> False
Just ipkg -> \p -> packageId p `elem` sourceDeps ipkg
-- If there is no upper bound on the version range then we apply a
-- preferred version according to the hackage or user's suggested
-- version constraints. TODO: distinguish hacks from prefs
bounded = boundedAbove versionRange
isPreferred p
| bounded = True -- any constant will do
| otherwise = packageVersion p `withinRange` preferredVersions
where (PackagePreferences preferredVersions _) = pref (packageName p)
boundedAbove :: VersionRange -> Bool
boundedAbove vr = case asVersionIntervals vr of
[] -> True -- this is the inconsistent version range.
intervals -> case last intervals of
(_, UpperBound _ _) -> True
(_, NoUpperBound ) -> False
-- We really only expect to find more than one choice remaining when
-- we're finalising a dependency on a paired package.
checkIsPaired [p1, p2] =
case Constraints.isPaired constraints (packageId p1) of
Just p2' -> packageId p2' == packageId p2
Nothing -> False
checkIsPaired _ = False
-- | Improve an existing installation plan by, where possible, swapping
-- packages we plan to install with ones that are already installed.
-- This may add additional constraints due to the dependencies of installed
-- packages on other installed packages.
--
improvePlan :: PackageIndex InstalledPackage
-> Constraints
-> PackageIndex FinalSelectedPackage
-> (PackageIndex FinalSelectedPackage, Constraints)
improvePlan installed constraints0 selected0 =
foldl' improve (selected0, constraints0) (reverseTopologicalOrder selected0)
where
improve (selected, constraints) = fromMaybe (selected, constraints)
. improvePkg selected constraints
-- The idea is to improve the plan by swapping a configured package for
-- an equivalent installed one. For a particular package the condition is
-- that the package be in a configured state, that a the same version be
-- already installed with the exact same dependencies and all the packages
-- in the plan that it depends on are in the installed state
improvePkg selected constraints pkgid = do
SelectedSource pkg <- PackageIndex.lookupPackageId selected pkgid
ipkg <- PackageIndex.lookupPackageId installed pkgid
guard $ all (isInstalled selected) (sourceDeps pkg)
tryInstalled selected constraints [ipkg]
isInstalled selected pkgid =
case PackageIndex.lookupPackageId selected pkgid of
Just (SelectedInstalled _) -> True
_ -> False
tryInstalled :: PackageIndex FinalSelectedPackage -> Constraints
-> [InstalledPackage]
-> Maybe (PackageIndex FinalSelectedPackage, Constraints)
tryInstalled selected constraints [] = Just (selected, constraints)
tryInstalled selected constraints (pkg:pkgs) =
case constraintsOk (packageId pkg) (sourceDeps pkg) constraints of
Nothing -> Nothing
Just constraints' -> tryInstalled selected' constraints' pkgs'
where
selected' = PackageIndex.insert (SelectedInstalled pkg) selected
pkgs' = catMaybes (map notSelected (sourceDeps pkg)) ++ pkgs
notSelected pkgid =
case (PackageIndex.lookupPackageId installed pkgid
,PackageIndex.lookupPackageId selected pkgid) of
(Just pkg', Nothing) -> Just pkg'
_ -> Nothing
constraintsOk _ [] constraints = Just constraints
constraintsOk pkgid (pkgid':pkgids) constraints =
case addPackageDependencyConstraint pkgid dep True constraints of
Satisfiable constraints' _ -> constraintsOk pkgid pkgids constraints'
_ -> Nothing
where
dep = thisPackageVersion pkgid'
reverseTopologicalOrder :: PackageIndex FinalSelectedPackage -> [PackageId]
reverseTopologicalOrder index = map (packageId . toPkg)
. Graph.topSort
. Graph.transposeG
$ graph
where (graph, toPkg, _) = dependencyGraph index
-- ------------------------------------------------------------
-- * Adding and recording constraints
-- ------------------------------------------------------------
addPackageSelectConstraint :: PackageId -> Constraints
-> Satisfiable Constraints
[PackageId] ExclusionReason
addPackageSelectConstraint pkgid =
Constraints.constrain pkgname constraint reason
where
pkgname = packageName pkgid
constraint ver _ = ver == packageVersion pkgid
reason = SelectedOther pkgid
addPackageExcludeConstraint :: PackageId -> ExclusionReason
-> Constraints
-> Satisfiable Constraints
[PackageId] ExclusionReason
addPackageExcludeConstraint pkgid reason =
Constraints.constrain pkgname constraint reason
where
pkgname = packageName pkgid
constraint ver installed
| ver == packageVersion pkgid = installed
| otherwise = True
addPackageDependencyConstraint :: PackageId -> Dependency -> Bool
-> Constraints
-> Satisfiable Constraints
[PackageId] ExclusionReason
addPackageDependencyConstraint pkgid dep@(Dependency pkgname verrange)
installedConstraint =
Constraints.constrain pkgname constraint reason
where
constraint ver installed = ver `withinRange` verrange
&& if installedConstraint then installed else True
reason = ExcludedByPackageDependency pkgid dep installedConstraint
addTopLevelVersionConstraint :: PackageName -> VersionRange
-> Constraints
-> Satisfiable Constraints
[PackageId] ExclusionReason
addTopLevelVersionConstraint pkgname verrange =
Constraints.constrain pkgname constraint reason
where
constraint ver _installed = ver `withinRange` verrange
reason = ExcludedByTopLevelConstraintVersion pkgname verrange
addTopLevelInstalledConstraint,
addTopLevelSourceConstraint :: PackageName
-> Constraints
-> Satisfiable Constraints
[PackageId] ExclusionReason
addTopLevelInstalledConstraint pkgname =
Constraints.constrain pkgname constraint reason
where
constraint _ver installed = installed
reason = ExcludedByTopLevelConstraintInstalled pkgname
addTopLevelSourceConstraint pkgname =
Constraints.constrain pkgname constraint reason
where
constraint _ver installed = not installed
reason = ExcludedByTopLevelConstraintSource pkgname
-- ------------------------------------------------------------
-- * Reasons for constraints
-- ------------------------------------------------------------
-- | For every constraint we record we also record the reason that constraint
-- is needed. So if we end up failing due to conflicting constraints then we
-- can give an explnanation as to what was conflicting and why.
--
data ExclusionReason =
-- | We selected this other version of the package. That means we exclude
-- all the other versions.
SelectedOther PackageId
-- | We excluded this version of the package because it failed to
-- configure probably because of unsatisfiable deps.
| ExcludedByConfigureFail [Dependency]
-- | We excluded this version of the package because another package that
-- we selected imposed a dependency which this package did not satisfy.
| ExcludedByPackageDependency PackageId Dependency Bool
-- | We excluded this version of the package because it did not satisfy
-- a dependency given as an original top level input.
--
| ExcludedByTopLevelConstraintVersion PackageName VersionRange
| ExcludedByTopLevelConstraintInstalled PackageName
| ExcludedByTopLevelConstraintSource PackageName
deriving Eq
-- | Given an excluded package and the reason it was excluded, produce a human
-- readable explanation.
--
showExclusionReason :: PackageId -> ExclusionReason -> String
showExclusionReason pkgid (SelectedOther pkgid') =
display pkgid ++ " was excluded because " ++
display pkgid' ++ " was selected instead"
showExclusionReason pkgid (ExcludedByConfigureFail missingDeps) =
display pkgid ++ " was excluded because it could not be configured. "
++ "It requires " ++ listOf displayDep missingDeps
showExclusionReason pkgid (ExcludedByPackageDependency pkgid' dep installedConstraint)
= display pkgid ++ " was excluded because " ++ display pkgid' ++ " requires "
++ (if installedConstraint then "an installed instance of " else "")
++ displayDep dep
showExclusionReason pkgid (ExcludedByTopLevelConstraintVersion pkgname verRange) =
display pkgid ++ " was excluded because of the top level constraint " ++
displayDep (Dependency pkgname verRange)
showExclusionReason pkgid (ExcludedByTopLevelConstraintInstalled pkgname)
= display pkgid ++ " was excluded because of the top level constraint '"
++ display pkgname ++ " installed' which means that only installed instances "
++ "of the package may be selected."
showExclusionReason pkgid (ExcludedByTopLevelConstraintSource pkgname)
= display pkgid ++ " was excluded because of the top level constraint '"
++ display pkgname ++ " source' which means that only source versions "
++ "of the package may be selected."
-- ------------------------------------------------------------
-- * Logging progress and failures
-- ------------------------------------------------------------
data Log = Select [SelectedPackage] [PackageId]
| AppliedVersionConstraint PackageName VersionRange [PackageId]
| AppliedInstalledConstraint PackageName InstalledConstraint [PackageId]
| ExcludeUnconfigurable PackageId
data Failure
= NoSuchPackage
PackageName
| ConfigureFailed
SelectablePackage
[(Dependency, [(PackageId, [ExclusionReason])])]
| DependencyConflict
SelectedPackage Dependency Bool
[(PackageId, [ExclusionReason])]
| TopLevelVersionConstraintConflict
PackageName VersionRange
[(PackageId, [ExclusionReason])]
| TopLevelVersionConstraintUnsatisfiable
PackageName VersionRange
| TopLevelInstallConstraintConflict
PackageName InstalledConstraint
[(PackageId, [ExclusionReason])]
| TopLevelInstallConstraintUnsatisfiable
PackageName InstalledConstraint
showLog :: Log -> String
showLog (Select selected discarded) = case (selectedMsg, discardedMsg) of
("", y) -> y
(x, "") -> x
(x, y) -> x ++ " and " ++ y
where
selectedMsg = "selecting " ++ case selected of
[] -> ""
[s] -> display (packageId s) ++ " " ++ kind s
(s:ss) -> listOf id
$ (display (packageId s) ++ " " ++ kind s)
: [ display (packageVersion s') ++ " " ++ kind s'
| s' <- ss ]
kind (InstalledOnly _) = "(installed)"
kind (SourceOnly _) = "(source)"
kind (InstalledAndSource _ _) = "(installed or source)"
discardedMsg = case discarded of
[] -> ""
_ -> "discarding " ++ listOf id
[ element
| (pkgid:pkgids) <- groupBy (equating packageName) (sort discarded)
, element <- display pkgid : map (display . packageVersion) pkgids ]
showLog (AppliedVersionConstraint pkgname ver pkgids) =
"applying constraint " ++ display (Dependency pkgname ver)
++ if null pkgids
then ""
else " which excludes " ++ listOf display pkgids
showLog (AppliedInstalledConstraint pkgname inst pkgids) =
"applying constraint " ++ display pkgname ++ " '"
++ (case inst of InstalledConstraint -> "installed"; _ -> "source") ++ "' "
++ if null pkgids
then ""
else "which excludes " ++ listOf display pkgids
showLog (ExcludeUnconfigurable pkgid) =
"excluding " ++ display pkgid ++ " (it cannot be configured)"
showFailure :: Failure -> String
showFailure (NoSuchPackage pkgname) =
"The package " ++ display pkgname ++ " is unknown."
showFailure (ConfigureFailed pkg missingDeps) =
"cannot configure " ++ displayPkg pkg ++ ". It requires "
++ listOf (displayDep . fst) missingDeps
++ '\n' : unlines (map (uncurry whyNot) missingDeps)
where
whyNot (Dependency name ver) [] =
"There is no available version of " ++ display name
++ " that satisfies " ++ displayVer ver
whyNot dep conflicts =
"For the dependency on " ++ displayDep dep
++ " there are these packages: " ++ listOf display pkgs
++ ". However none of them are available.\n"
++ unlines [ showExclusionReason (packageId pkg') reason
| (pkg', reasons) <- conflicts, reason <- reasons ]
where pkgs = map fst conflicts
showFailure (DependencyConflict pkg dep installedConstraint conflicts) =
"dependencies conflict: "
++ displayPkg pkg ++ " requires "
++ (if installedConstraint then "an installed instance of " else "")
++ displayDep dep ++ " however:\n"
++ unlines [ showExclusionReason (packageId pkg') reason
| (pkg', reasons) <- conflicts, reason <- reasons ]
showFailure (TopLevelVersionConstraintConflict name ver conflicts) =
"constraints conflict: we have the top level constraint "
++ displayDep (Dependency name ver) ++ ", but\n"
++ unlines [ showExclusionReason (packageId pkg') reason
| (pkg', reasons) <- conflicts, reason <- reasons ]
showFailure (TopLevelVersionConstraintUnsatisfiable name ver) =
"There is no available version of " ++ display name
++ " that satisfies " ++ displayVer ver
showFailure (TopLevelInstallConstraintConflict name InstalledConstraint conflicts) =
"constraints conflict: "
++ "top level constraint '" ++ display name ++ " installed' however\n"
++ unlines [ showExclusionReason (packageId pkg') reason
| (pkg', reasons) <- conflicts, reason <- reasons ]
showFailure (TopLevelInstallConstraintUnsatisfiable name InstalledConstraint) =
"There is no installed version of " ++ display name
showFailure (TopLevelInstallConstraintConflict name SourceConstraint conflicts) =
"constraints conflict: "
++ "top level constraint '" ++ display name ++ " source' however\n"
++ unlines [ showExclusionReason (packageId pkg') reason
| (pkg', reasons) <- conflicts, reason <- reasons ]
showFailure (TopLevelInstallConstraintUnsatisfiable name SourceConstraint) =
"There is no available source version of " ++ display name
displayVer :: VersionRange -> String
displayVer = display . simplifyVersionRange
displayDep :: Dependency -> String
displayDep = display . simplifyDependency
-- ------------------------------------------------------------
-- * Utils
-- ------------------------------------------------------------
impossible :: String -> a
impossible msg = internalError $ "assertion failure: " ++ msg
internalError :: String -> a
internalError msg = error $ "internal error: " ++ msg
displayPkg :: Package pkg => pkg -> String
displayPkg = display . packageId
listOf :: (a -> String) -> [a] -> String
listOf _ [] = []
listOf disp [x0] = disp x0
listOf disp (x0:x1:xs) = disp x0 ++ go x1 xs
where go x [] = " and " ++ disp x
go x (x':xs') = ", " ++ disp x ++ go x' xs'
-- ------------------------------------------------------------
-- * Construct a dependency graph
-- ------------------------------------------------------------
-- | Builds a graph of the package dependencies.
--
-- Dependencies on other packages that are not in the index are discarded.
-- You can check if there are any such dependencies with 'brokenPackages'.
--
-- The top-down solver gets its own implementation, because both
-- `dependencyGraph` in `Distribution.Client.PlanIndex` (in cabal-install) and
-- `dependencyGraph` in `Distribution.Simple.PackageIndex` (in Cabal) both work
-- with `PackageIndex` from `Cabal` (that is, a package index indexed by
-- installed package IDs rather than package names).
--
-- Ideally we would switch the top-down solver over to use that too, so that
-- this duplication could be avoided, but that's a bit of work and the top-down
-- solver is legacy code anyway.
--
-- (NOTE: This is called at two types: InstalledPackage and FinalSelectedPackage.)
dependencyGraph :: PackageSourceDeps pkg
=> PackageIndex pkg
-> (Graph.Graph,
Graph.Vertex -> pkg,
PackageId -> Maybe Graph.Vertex)
dependencyGraph index = (graph, vertexToPkg, pkgIdToVertex)
where
graph = Array.listArray bounds $
map (catMaybes . map pkgIdToVertex . sourceDeps) pkgs
vertexToPkg vertex = pkgTable Array.! vertex
pkgIdToVertex = binarySearch 0 topBound
pkgTable = Array.listArray bounds pkgs
pkgIdTable = Array.listArray bounds (map packageId pkgs)
pkgs = sortBy (comparing packageId) (PackageIndex.allPackages index)
topBound = length pkgs - 1
bounds = (0, topBound)
binarySearch a b key
| a > b = Nothing
| otherwise = case compare key (pkgIdTable Array.! mid) of
LT -> binarySearch a (mid-1) key
EQ -> Just mid
GT -> binarySearch (mid+1) b key
where mid = (a + b) `div` 2
| randen/cabal | cabal-install/Distribution/Client/Dependency/TopDown.hs | bsd-3-clause | 48,703 | 0 | 21 | 12,351 | 9,331 | 4,873 | 4,458 | 724 | 13 |
module T7702Plugin ( plugin ) where
import GhcPlugins
-- A plugin that does nothing but tickle CoreM's writer.
plugin :: Plugin
plugin = defaultPlugin { installCoreToDos = install }
where
install :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo]
install _ todos = do
putMsgS "T7702Plugin"
-- 1 million times, so the allocation in this plugin dominates allocation due
-- to other compiler flags and the test framework can easily catch the difference
-- can't use replicateM_ because it causes its own problems
nothingX100000 ; nothingX100000 ; nothingX100000 ; nothingX100000 ; nothingX100000
nothingX100000 ; nothingX100000 ; nothingX100000 ; nothingX100000 ; nothingX100000
return todos
-- this will result in a call to plusWriter in CoreM's
-- >>= implementation, which was causing the space leak
nothing :: CoreM ()
nothing = liftIO (return ())
nothingX10 :: CoreM ()
nothingX10 = do
nothing ; nothing ; nothing ; nothing ; nothing
nothing ; nothing ; nothing ; nothing ; nothing
nothingX100 :: CoreM ()
nothingX100 = do
nothingX10 ; nothingX10 ; nothingX10 ; nothingX10 ; nothingX10
nothingX10 ; nothingX10 ; nothingX10 ; nothingX10 ; nothingX10
nothingX1000 :: CoreM ()
nothingX1000 = do
nothingX100 ; nothingX100 ; nothingX100 ; nothingX100 ; nothingX100
nothingX100 ; nothingX100 ; nothingX100 ; nothingX100 ; nothingX100
nothingX10000 :: CoreM ()
nothingX10000 = do
nothingX1000 ; nothingX1000 ; nothingX1000 ; nothingX1000 ; nothingX1000
nothingX1000 ; nothingX1000 ; nothingX1000 ; nothingX1000 ; nothingX1000
nothingX100000 :: CoreM ()
nothingX100000 = do
nothingX10000 ; nothingX10000 ; nothingX10000 ; nothingX10000 ; nothingX10000
nothingX10000 ; nothingX10000 ; nothingX10000 ; nothingX10000 ; nothingX10000
| ezyang/ghc | testsuite/tests/simplCore/should_compile/T7702plugin/T7702Plugin.hs | bsd-3-clause | 1,886 | 0 | 10 | 412 | 414 | 210 | 204 | 32 | 1 |
-- !!! Conflicting re-exportation of dcon
module M (module Mod144_A,module M) where
import Mod144_A
data Foo1 = Bar
| urbanslug/ghc | testsuite/tests/module/mod144.hs | bsd-3-clause | 119 | 0 | 5 | 21 | 25 | 17 | 8 | 3 | 0 |
-- This caused 6.10.1 to segfault when run with +RTS -hb
-- trac #3001
module Main (main) where
main :: IO ()
main = print $ replicate 40000 'x'
| urbanslug/ghc | testsuite/tests/profiling/should_run/T3001.hs | bsd-3-clause | 148 | 0 | 6 | 31 | 35 | 20 | 15 | 3 | 1 |
fork :: (a -> b, a -> c) -> a -> (b, c)
fork (f, g) x = (f x, g x)
unzip :: [(a,b)] -> ([a], [b])
unzip = fork (map fst, map snd)
cross :: (a -> b, a -> c) -> (a, a) -> (b, c)
cross (f, g) = fork (f . fst, g . snd)
| dirkz/Thinking_Functionally_With_Haskell | 4/Unzip.hs | isc | 217 | 0 | 7 | 62 | 182 | 103 | 79 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module MacFinder.Controllers.Delete (
deleteMac
) where
import Web.Scotty.Trans as S
import Web.Scotty.Hastache
import Control.Monad.IO.Class (liftIO)
import qualified Database.Redis as R
import qualified Data.Text.Lazy as T
import qualified Data.ByteString as B
import Control.Monad (when, unless)
import MacFinder.Util (convertTextToByteString)
deleteMac :: R.Connection -> ScottyH' ()
deleteMac redisConn = post "/delete" $ do
textMac <- param "mac"
when ((T.length . T.strip $ textMac) < 17) (raise "Bad MAC")
let keyToDelete = B.concat ["macs:", convertTextToByteString . T.toLower $ textMac]
value <- liftIO $ R.runRedis redisConn $ R.del [keyToDelete]
liftIO $ print value
either couldntDelete checkNumDeleted value
redirect "/"
`rescue`
text
where checkNumDeleted x = unless (x > 0) (couldntDelete ())
couldntDelete _ = raise "Couldn't delete"
| tlunter/MacFinder | src/MacFinder/Controllers/Delete.hs | mit | 954 | 0 | 16 | 183 | 295 | 159 | 136 | 24 | 1 |
module System.RedProx.Chaos (
) where
| adarqui/redprox-core | src/System/RedProx/Chaos.hs | mit | 38 | 0 | 3 | 5 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
module Text.Hamlet.Parse
( Result (..)
, Content (..)
, Doc (..)
, parseDoc
, HamletSettings (..)
, defaultHamletSettings
, xhtmlHamletSettings
, CloseStyle (..)
, Binding (..)
, NewlineStyle (..)
, specialOrIdent
, DataConstr (..)
, Module (..)
)
where
import Text.Shakespeare.Base
import Control.Applicative ((<$>), Applicative (..))
import Control.Monad
import Control.Arrow
import Data.Char (isUpper)
import Data.Data
import Text.ParserCombinators.Parsec hiding (Line)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Maybe (mapMaybe, fromMaybe, isNothing)
import Language.Haskell.TH.Syntax (Lift (..))
data Result v = Error String | Ok v
deriving (Show, Eq, Read, Data, Typeable)
instance Monad Result where
return = Ok
Error s >>= _ = Error s
Ok v >>= f = f v
fail = Error
instance Functor Result where
fmap = liftM
instance Applicative Result where
pure = return
(<*>) = ap
data Content = ContentRaw String
| ContentVar Deref
| ContentUrl Bool Deref -- ^ bool: does it include params?
| ContentEmbed Deref
| ContentMsg Deref
| ContentAttrs Deref
deriving (Show, Eq, Read, Data, Typeable)
data Line = LineForall Deref Binding
| LineIf Deref
| LineElseIf Deref
| LineElse
| LineWith [(Deref, Binding)]
| LineMaybe Deref Binding
| LineNothing
| LineCase Deref
| LineOf Binding
| LineTag
{ _lineTagName :: String
, _lineAttr :: [(Maybe Deref, String, Maybe [Content])]
, _lineContent :: [Content]
, _lineClasses :: [(Maybe Deref, [Content])]
, _lineAttrs :: [Deref]
, _lineNoNewline :: Bool
}
| LineContent [Content] Bool -- ^ True == avoid newlines
deriving (Eq, Show, Read)
parseLines :: HamletSettings -> String -> Result (Maybe NewlineStyle, HamletSettings, [(Int, Line)])
parseLines set s =
case parse parser s s of
Left e -> Error $ show e
Right x -> Ok x
where
parser = do
mnewline <- parseNewline
let set' =
case mnewline of
Nothing ->
case hamletNewlines set of
DefaultNewlineStyle -> set { hamletNewlines = AlwaysNewlines }
_ -> set
Just n -> set { hamletNewlines = n }
res <- many (parseLine set')
return (mnewline, set', res)
parseNewline =
(try (many eol' >> spaceTabs >> string "$newline ") >> parseNewline' >>= \nl -> eol' >> return nl) <|>
return Nothing
parseNewline' =
(try (string "always") >> return (Just AlwaysNewlines)) <|>
(try (string "never") >> return (Just NoNewlines)) <|>
(try (string "text") >> return (Just NewlinesText))
eol' = (char '\n' >> return ()) <|> (string "\r\n" >> return ())
parseLine :: HamletSettings -> Parser (Int, Line)
parseLine set = do
ss <- fmap sum $ many ((char ' ' >> return 1) <|>
(char '\t' >> fail "Tabs are not allowed in Hamlet indentation"))
x <- doctype <|>
doctypeDollar <|>
comment <|>
ssiInclude <|>
htmlComment <|>
doctypeRaw <|>
backslash <|>
controlIf <|>
controlElseIf <|>
(try (string "$else") >> spaceTabs >> eol >> return LineElse) <|>
controlMaybe <|>
(try (string "$nothing") >> spaceTabs >> eol >> return LineNothing) <|>
controlForall <|>
controlWith <|>
controlCase <|>
controlOf <|>
angle <|>
invalidDollar <|>
(eol' >> return (LineContent [] True)) <|>
(do
(cs, avoidNewLines) <- content InContent
isEof <- (eof >> return True) <|> return False
if null cs && ss == 0 && isEof
then fail "End of Hamlet template"
else return $ LineContent cs avoidNewLines)
return (ss, x)
where
eol' = (char '\n' >> return ()) <|> (string "\r\n" >> return ())
eol = eof <|> eol'
doctype = do
try $ string "!!!" >> eol
return $ LineContent [ContentRaw $ hamletDoctype set ++ "\n"] True
doctypeDollar = do
_ <- try $ string "$doctype "
name <- many $ noneOf "\r\n"
eol
case lookup name $ hamletDoctypeNames set of
Nothing -> fail $ "Unknown doctype name: " ++ name
Just val -> return $ LineContent [ContentRaw $ val ++ "\n"] True
doctypeRaw = do
x <- try $ string "<!"
y <- many $ noneOf "\r\n"
eol
return $ LineContent [ContentRaw $ concat [x, y, "\n"]] True
invalidDollar = do
_ <- char '$'
fail "Received a command I did not understand. If you wanted a literal $, start the line with a backslash."
comment = do
_ <- try $ string "$#"
_ <- many $ noneOf "\r\n"
eol
return $ LineContent [] True
ssiInclude = do
x <- try $ string "<!--#"
y <- many $ noneOf "\r\n"
eol
return $ LineContent [ContentRaw $ x ++ y] False
htmlComment = do
_ <- try $ string "<!--"
_ <- manyTill anyChar $ try $ string "-->"
x <- many nonComments
eol
return $ LineContent [ContentRaw $ concat x] False {- FIXME -} -- FIXME handle variables?
nonComments = (many1 $ noneOf "\r\n<") <|> (do
_ <- char '<'
(do
_ <- try $ string "!--"
_ <- manyTill anyChar $ try $ string "-->"
return "") <|> return "<")
backslash = do
_ <- char '\\'
(eol >> return (LineContent [ContentRaw "\n"] True))
<|> (uncurry LineContent <$> content InContent)
controlIf = do
_ <- try $ string "$if"
spaces
x <- parseDeref
_ <- spaceTabs
eol
return $ LineIf x
controlElseIf = do
_ <- try $ string "$elseif"
spaces
x <- parseDeref
_ <- spaceTabs
eol
return $ LineElseIf x
binding = do
y <- identPattern
spaces
_ <- string "<-"
spaces
x <- parseDeref
_ <- spaceTabs
return (x,y)
bindingSep = char ',' >> spaceTabs
controlMaybe = do
_ <- try $ string "$maybe"
spaces
(x,y) <- binding
eol
return $ LineMaybe x y
controlForall = do
_ <- try $ string "$forall"
spaces
(x,y) <- binding
eol
return $ LineForall x y
controlWith = do
_ <- try $ string "$with"
spaces
bindings <- (binding `sepBy` bindingSep) `endBy` eol
return $ LineWith $ concat bindings -- concat because endBy returns a [[(Deref,Ident)]]
controlCase = do
_ <- try $ string "$case"
spaces
x <- parseDeref
_ <- spaceTabs
eol
return $ LineCase x
controlOf = do
_ <- try $ string "$of"
spaces
x <- identPattern
_ <- spaceTabs
eol
return $ LineOf x
content cr = do
x <- many $ content' cr
case cr of
InQuotes -> void $ char '"'
NotInQuotes -> return ()
NotInQuotesAttr -> return ()
InContent -> eol
return (cc $ map fst x, any snd x)
where
cc [] = []
cc (ContentRaw a:ContentRaw b:c) = cc $ ContentRaw (a ++ b) : c
cc (a:b) = a : cc b
content' cr = contentHash <|> contentAt <|> contentCaret
<|> contentUnder
<|> contentReg' cr
contentHash = do
x <- parseHash
case x of
Left str -> return (ContentRaw str, null str)
Right deref -> return (ContentVar deref, False)
contentAt = do
x <- parseAt
return $ case x of
Left str -> (ContentRaw str, null str)
Right (s, y) -> (ContentUrl y s, False)
contentCaret = do
x <- parseCaret
case x of
Left str -> return (ContentRaw str, null str)
Right deref -> return (ContentEmbed deref, False)
contentUnder = do
x <- parseUnder
case x of
Left str -> return (ContentRaw str, null str)
Right deref -> return (ContentMsg deref, False)
contentReg' x = (flip (,) False) <$> contentReg x
contentReg InContent = (ContentRaw . return) <$> noneOf "#@^\r\n"
contentReg NotInQuotes = (ContentRaw . return) <$> noneOf "@^#. \t\n\r>"
contentReg NotInQuotesAttr = (ContentRaw . return) <$> noneOf "@^ \t\n\r>"
contentReg InQuotes = (ContentRaw . return) <$> noneOf "#@^\"\n\r"
tagAttribValue notInQuotes = do
cr <- (char '"' >> return InQuotes) <|> return notInQuotes
fst <$> content cr
tagIdent = char '#' >> TagIdent <$> tagAttribValue NotInQuotes
tagCond = do
d <- between (char ':') (char ':') parseDeref
tagClass (Just d) <|> tagAttrib (Just d)
tagClass x = do
clazz <- char '.' >> tagAttribValue NotInQuotes
let hasHash (ContentRaw s) = any (== '#') s
hasHash _ = False
if any hasHash clazz
then fail $ "Invalid class: " ++ show clazz ++ ". Did you want a space between a class and an ID?"
else return (TagClass (x, clazz))
tagAttrib cond = do
s <- many1 $ noneOf " \t=\r\n><"
v <- (char '=' >> Just <$> tagAttribValue NotInQuotesAttr) <|> return Nothing
return $ TagAttrib (cond, s, v)
tagAttrs = do
_ <- char '*'
d <- between (char '{') (char '}') parseDeref
return $ TagAttribs d
tag' = foldr tag'' ("div", [], [], [])
tag'' (TagName s) (_, y, z, as) = (s, y, z, as)
tag'' (TagIdent s) (x, y, z, as) = (x, (Nothing, "id", Just s) : y, z, as)
tag'' (TagClass s) (x, y, z, as) = (x, y, s : z, as)
tag'' (TagAttrib s) (x, y, z, as) = (x, s : y, z, as)
tag'' (TagAttribs s) (x, y, z, as) = (x, y, z, s : as)
ident :: Parser Ident
ident = do
i <- many1 (alphaNum <|> char '_' <|> char '\'')
white
return (Ident i)
<?> "identifier"
parens = between (char '(' >> white) (char ')' >> white)
brackets = between (char '[' >> white) (char ']' >> white)
braces = between (char '{' >> white) (char '}' >> white)
comma = char ',' >> white
atsign = char '@' >> white
equals = char '=' >> white
white = skipMany $ char ' '
wildDots = string ".." >> white
isVariable (Ident (x:_)) = not (isUpper x)
isVariable (Ident []) = error "isVariable: bad identifier"
isConstructor (Ident (x:_)) = isUpper x
isConstructor (Ident []) = error "isConstructor: bad identifier"
identPattern :: Parser Binding
identPattern = gcon True <|> apat
where
apat = choice
[ varpat
, gcon False
, parens tuplepat
, brackets listpat
]
varpat = do
v <- try $ do v <- ident
guard (isVariable v)
return v
option (BindVar v) $ do
atsign
b <- apat
return (BindAs v b)
<?> "variable"
gcon :: Bool -> Parser Binding
gcon allowArgs = do
c <- try $ do c <- dataConstr
return c
choice
[ record c
, fmap (BindConstr c) (guard allowArgs >> many apat)
, return (BindConstr c [])
]
<?> "constructor"
dataConstr = do
p <- dcPiece
ps <- many dcPieces
return $ toDataConstr p ps
dcPiece = do
x@(Ident y) <- ident
guard $ isConstructor x
return y
dcPieces = do
_ <- char '.'
dcPiece
toDataConstr x [] = DCUnqualified $ Ident x
toDataConstr x (y:ys) =
go (x:) y ys
where
go front next [] = DCQualified (Module $ front []) (Ident next)
go front next (rest:rests) = go (front . (next:)) rest rests
record c = braces $ do
(fields, wild) <- option ([], False) $ go
return (BindRecord c fields wild)
where
go = (wildDots >> return ([], True))
<|> (do x <- recordField
(xs,wild) <- option ([],False) (comma >> go)
return (x:xs,wild))
recordField = do
field <- ident
p <- option (BindVar field) -- support punning
(equals >> identPattern)
return (field,p)
tuplepat = do
xs <- identPattern `sepBy` comma
return $ case xs of
[x] -> x
_ -> BindTuple xs
listpat = BindList <$> identPattern `sepBy` comma
angle = do
_ <- char '<'
name' <- many $ noneOf " \t.#\r\n!>"
let name = if null name' then "div" else name'
xs <- many $ try ((many $ oneOf " \t\r\n") >>
(tagIdent <|> tagCond <|> tagClass Nothing <|> tagAttrs <|> tagAttrib Nothing))
_ <- many $ oneOf " \t\r\n"
_ <- char '>'
(c, avoidNewLines) <- content InContent
let (tn, attr, classes, attrsd) = tag' $ TagName name : xs
if '/' `elem` tn
then fail "A tag name may not contain a slash. Perhaps you have a closing tag in your HTML."
else return $ LineTag tn attr c classes attrsd avoidNewLines
data TagPiece = TagName String
| TagIdent [Content]
| TagClass (Maybe Deref, [Content])
| TagAttrib (Maybe Deref, String, Maybe [Content])
| TagAttribs Deref
deriving Show
data ContentRule = InQuotes | NotInQuotes | NotInQuotesAttr | InContent
data Nest = Nest Line [Nest]
nestLines :: [(Int, Line)] -> [Nest]
nestLines [] = []
nestLines ((i, l):rest) =
let (deeper, rest') = span (\(i', _) -> i' > i) rest
in Nest l (nestLines deeper) : nestLines rest'
data Doc = DocForall Deref Binding [Doc]
| DocWith [(Deref, Binding)] [Doc]
| DocCond [(Deref, [Doc])] (Maybe [Doc])
| DocMaybe Deref Binding [Doc] (Maybe [Doc])
| DocCase Deref [(Binding, [Doc])]
| DocContent Content
deriving (Show, Eq, Read, Data, Typeable)
nestToDoc :: HamletSettings -> [Nest] -> Result [Doc]
nestToDoc _set [] = Ok []
nestToDoc set (Nest (LineForall d i) inside:rest) = do
inside' <- nestToDoc set inside
rest' <- nestToDoc set rest
Ok $ DocForall d i inside' : rest'
nestToDoc set (Nest (LineWith dis) inside:rest) = do
inside' <- nestToDoc set inside
rest' <- nestToDoc set rest
Ok $ DocWith dis inside' : rest'
nestToDoc set (Nest (LineIf d) inside:rest) = do
inside' <- nestToDoc set inside
(ifs, el, rest') <- parseConds set ((:) (d, inside')) rest
rest'' <- nestToDoc set rest'
Ok $ DocCond ifs el : rest''
nestToDoc set (Nest (LineMaybe d i) inside:rest) = do
inside' <- nestToDoc set inside
(nothing, rest') <-
case rest of
Nest LineNothing ninside:x -> do
ninside' <- nestToDoc set ninside
return (Just ninside', x)
_ -> return (Nothing, rest)
rest'' <- nestToDoc set rest'
Ok $ DocMaybe d i inside' nothing : rest''
nestToDoc set (Nest (LineCase d) inside:rest) = do
let getOf (Nest (LineOf x) insideC) = do
insideC' <- nestToDoc set insideC
Ok (x, insideC')
getOf _ = Error "Inside a $case there may only be $of. Use '$of _' for a wildcard."
cases <- mapM getOf inside
rest' <- nestToDoc set rest
Ok $ DocCase d cases : rest'
nestToDoc set (Nest (LineTag tn attrs content classes attrsD avoidNewLine) inside:rest) = do
let attrFix (x, y, z) = (x, y, [(Nothing, z)])
let takeClass (a, "class", b) = Just (a, fromMaybe [] b)
takeClass _ = Nothing
let clazzes = classes ++ mapMaybe takeClass attrs
let notClass (_, x, _) = x /= "class"
let noclass = filter notClass attrs
let attrs' =
case clazzes of
[] -> map attrFix noclass
_ -> (testIncludeClazzes clazzes, "class", map (second Just) clazzes)
: map attrFix noclass
let closeStyle =
if not (null content) || not (null inside)
then CloseSeparate
else hamletCloseStyle set tn
let end = case closeStyle of
CloseSeparate ->
DocContent $ ContentRaw $ "</" ++ tn ++ ">"
_ -> DocContent $ ContentRaw ""
seal = case closeStyle of
CloseInside -> DocContent $ ContentRaw "/>"
_ -> DocContent $ ContentRaw ">"
start = DocContent $ ContentRaw $ "<" ++ tn
attrs'' = concatMap attrToContent attrs'
newline' = DocContent $ ContentRaw
$ case hamletNewlines set of { AlwaysNewlines | not avoidNewLine -> "\n"; _ -> "" }
inside' <- nestToDoc set inside
rest' <- nestToDoc set rest
Ok $ start
: attrs''
++ map (DocContent . ContentAttrs) attrsD
++ seal
: map DocContent content
++ inside'
++ end
: newline'
: rest'
nestToDoc set (Nest (LineContent content avoidNewLine) inside:rest) = do
inside' <- nestToDoc set inside
rest' <- nestToDoc set rest
let newline' = DocContent $ ContentRaw
$ case hamletNewlines set of { NoNewlines -> ""; _ -> if nextIsContent && not avoidNewLine then "\n" else "" }
nextIsContent =
case (inside, rest) of
([], Nest LineContent{} _:_) -> True
([], Nest LineTag{} _:_) -> True
_ -> False
Ok $ map DocContent content ++ newline':inside' ++ rest'
nestToDoc _set (Nest (LineElseIf _) _:_) = Error "Unexpected elseif"
nestToDoc _set (Nest LineElse _:_) = Error "Unexpected else"
nestToDoc _set (Nest LineNothing _:_) = Error "Unexpected nothing"
nestToDoc _set (Nest (LineOf _) _:_) = Error "Unexpected 'of' (did you forget a $case?)"
compressDoc :: [Doc] -> [Doc]
compressDoc [] = []
compressDoc (DocForall d i doc:rest) =
DocForall d i (compressDoc doc) : compressDoc rest
compressDoc (DocWith dis doc:rest) =
DocWith dis (compressDoc doc) : compressDoc rest
compressDoc (DocMaybe d i doc mnothing:rest) =
DocMaybe d i (compressDoc doc) (fmap compressDoc mnothing)
: compressDoc rest
compressDoc (DocCond [(a, x)] Nothing:DocCond [(b, y)] Nothing:rest)
| a == b = compressDoc $ DocCond [(a, x ++ y)] Nothing : rest
compressDoc (DocCond x y:rest) =
DocCond (map (second compressDoc) x) (compressDoc `fmap` y)
: compressDoc rest
compressDoc (DocCase d cs:rest) =
DocCase d (map (second compressDoc) cs) : compressDoc rest
compressDoc (DocContent (ContentRaw ""):rest) = compressDoc rest
compressDoc ( DocContent (ContentRaw x)
: DocContent (ContentRaw y)
: rest
) = compressDoc $ (DocContent $ ContentRaw $ x ++ y) : rest
compressDoc (DocContent x:rest) = DocContent x : compressDoc rest
parseDoc :: HamletSettings -> String -> Result (Maybe NewlineStyle, [Doc])
parseDoc set s = do
(mnl, set', ls) <- parseLines set s
let notEmpty (_, LineContent [] _) = False
notEmpty _ = True
let ns = nestLines $ filter notEmpty ls
ds <- nestToDoc set' ns
return (mnl, compressDoc ds)
attrToContent :: (Maybe Deref, String, [(Maybe Deref, Maybe [Content])]) -> [Doc]
attrToContent (Just cond, k, v) =
[DocCond [(cond, attrToContent (Nothing, k, v))] Nothing]
attrToContent (Nothing, k, []) = [DocContent $ ContentRaw $ ' ' : k]
attrToContent (Nothing, k, [(Nothing, Nothing)]) = [DocContent $ ContentRaw $ ' ' : k]
attrToContent (Nothing, k, [(Nothing, Just v)]) =
DocContent (ContentRaw (' ' : k ++ "=\""))
: map DocContent v
++ [DocContent $ ContentRaw "\""]
attrToContent (Nothing, k, v) = -- only for class
DocContent (ContentRaw (' ' : k ++ "=\""))
: concatMap go (init v)
++ go' (last v)
++ [DocContent $ ContentRaw "\""]
where
go (Nothing, x) = map DocContent (fromMaybe [] x) ++ [DocContent $ ContentRaw " "]
go (Just b, x) =
[ DocCond
[(b, map DocContent (fromMaybe [] x) ++ [DocContent $ ContentRaw " "])]
Nothing
]
go' (Nothing, x) = maybe [] (map DocContent) x
go' (Just b, x) =
[ DocCond
[(b, maybe [] (map DocContent) x)]
Nothing
]
-- | Settings for parsing of a hamlet document.
data HamletSettings = HamletSettings
{
-- | The value to replace a \"!!!\" with. Do not include the trailing
-- newline.
hamletDoctype :: String
-- | Should we add newlines to the output, making it more human-readable?
-- Useful for client-side debugging but may alter browser page layout.
, hamletNewlines :: NewlineStyle
-- | How a tag should be closed. Use this to switch between HTML, XHTML
-- or even XML output.
, hamletCloseStyle :: String -> CloseStyle
-- | Mapping from short names in \"$doctype\" statements to full doctype.
, hamletDoctypeNames :: [(String, String)]
}
data NewlineStyle = NoNewlines -- ^ never add newlines
| NewlinesText -- ^ add newlines between consecutive text lines
| AlwaysNewlines -- ^ add newlines everywhere
| DefaultNewlineStyle
deriving Show
instance Lift NewlineStyle where
lift NoNewlines = [|NoNewlines|]
lift NewlinesText = [|NewlinesText|]
lift AlwaysNewlines = [|AlwaysNewlines|]
lift DefaultNewlineStyle = [|DefaultNewlineStyle|]
instance Lift (String -> CloseStyle) where
lift _ = [|\s -> htmlCloseStyle s|]
instance Lift HamletSettings where
lift (HamletSettings a b c d) = [|HamletSettings $(lift a) $(lift b) $(lift c) $(lift d)|]
-- See the html specification for a list of all void elements:
-- https://www.w3.org/TR/html/syntax.html#void-elements
htmlEmptyTags :: Set String
htmlEmptyTags = Set.fromAscList
[ "area"
, "base"
, "basefont" -- not html 5
, "br"
, "col"
, "embed"
, "frame" -- not html 5
, "hr"
, "img"
, "input"
, "isindex" -- not html 5
, "keygen"
, "link"
, "meta"
, "param"
, "source"
, "track"
, "wbr"
]
-- | Defaults settings: HTML5 doctype and HTML-style empty tags.
defaultHamletSettings :: HamletSettings
defaultHamletSettings = HamletSettings "<!DOCTYPE html>" DefaultNewlineStyle htmlCloseStyle doctypeNames
xhtmlHamletSettings :: HamletSettings
xhtmlHamletSettings =
HamletSettings doctype DefaultNewlineStyle xhtmlCloseStyle doctypeNames
where
doctype =
"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" " ++
"\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">"
htmlCloseStyle :: String -> CloseStyle
htmlCloseStyle s =
if Set.member s htmlEmptyTags
then NoClose
else CloseSeparate
xhtmlCloseStyle :: String -> CloseStyle
xhtmlCloseStyle s =
if Set.member s htmlEmptyTags
then CloseInside
else CloseSeparate
data CloseStyle = NoClose | CloseInside | CloseSeparate
parseConds :: HamletSettings
-> ([(Deref, [Doc])] -> [(Deref, [Doc])])
-> [Nest]
-> Result ([(Deref, [Doc])], Maybe [Doc], [Nest])
parseConds set front (Nest LineElse inside:rest) = do
inside' <- nestToDoc set inside
Ok (front [], Just inside', rest)
parseConds set front (Nest (LineElseIf d) inside:rest) = do
inside' <- nestToDoc set inside
parseConds set (front . (:) (d, inside')) rest
parseConds _ front rest = Ok (front [], Nothing, rest)
doctypeNames :: [(String, String)]
doctypeNames =
[ ("5", "<!DOCTYPE html>")
, ("html", "<!DOCTYPE html>")
, ("1.1", "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.1//EN\" \"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd\">")
, ("strict", "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">")
]
data Binding = BindVar Ident
| BindAs Ident Binding
| BindConstr DataConstr [Binding]
| BindTuple [Binding]
| BindList [Binding]
| BindRecord DataConstr [(Ident, Binding)] Bool
deriving (Eq, Show, Read, Data, Typeable)
data DataConstr = DCQualified Module Ident
| DCUnqualified Ident
deriving (Eq, Show, Read, Data, Typeable)
newtype Module = Module [String]
deriving (Eq, Show, Read, Data, Typeable)
spaceTabs :: Parser String
spaceTabs = many $ oneOf " \t"
-- | When using conditional classes, it will often be a single class, e.g.:
--
-- > <div :isHome:.homepage>
--
-- If isHome is False, we do not want any class attribute to be present.
-- However, due to combining multiple classes together, the most obvious
-- implementation would produce a class="". The purpose of this function is to
-- work around that. It does so by checking if all the classes on this tag are
-- optional. If so, it will only include the class attribute if at least one
-- conditional is true.
testIncludeClazzes :: [(Maybe Deref, [Content])] -> Maybe Deref
testIncludeClazzes cs
| any (isNothing . fst) cs = Nothing
| otherwise = Just $ DerefBranch (DerefIdent specialOrIdent) $ DerefList $ mapMaybe fst cs
-- | This funny hack is to allow us to refer to the 'or' function without
-- requiring the user to have it in scope. See how this function is used in
-- Text.Hamlet.
specialOrIdent :: Ident
specialOrIdent = Ident "__or__hamlet__special"
| psibi/shakespeare | Text/Hamlet/Parse.hs | mit | 25,913 | 0 | 26 | 8,086 | 8,509 | 4,347 | 4,162 | 633 | 28 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.HTMLOptGroupElement (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.HTMLOptGroupElement
#else
module Graphics.UI.Gtk.WebKit.DOM.HTMLOptGroupElement
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.HTMLOptGroupElement
#else
import Graphics.UI.Gtk.WebKit.DOM.HTMLOptGroupElement
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/HTMLOptGroupElement.hs | mit | 480 | 0 | 5 | 39 | 33 | 26 | 7 | 4 | 0 |
data Tree a = Empty | Node a (Tree a) (Tree a) deriving Show
treeMap :: Tree a -> (a -> b) -> Tree b
treeMap Empty _ = Empty
treeMap (Node x l r) f = Node (f x) (treeMap l f) (treeMap r f)
clone :: Num a => Tree a -> a -> a -> Tree a
clone t x y = Node x increased increased
where increased = treeMap t (+ y)
cloningTrees :: Num t => [Tree t]
cloningTrees =
Node 1 Empty Empty : map (\ t@(Node x _ _) -> clone t x 1) cloningTrees
| fmi-lab/fp-elective-2017 | exams/02/variant-b/task-2.hs | mit | 437 | 0 | 11 | 109 | 250 | 126 | 124 | 10 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Main where
import Asm as A
import AsmOptimize
import Assembler
import Compiler
import Memory
import Parser
import Pretty ( pretty_ )
import PrettyAsm ( pretty )
import Data.Attoparsec.ByteString ( parseOnly )
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString.Char8 as C8
import qualified Data.Text.Lazy.IO as TIO
import Foreign
import System.Environment ( getArgs )
import System.Exit ( exitFailure )
assembleIO asm = case assemble asm of
Left e -> do
putStr "Assembler error: "
case e of
InvalidOpcode -> putStrLn "invalid opcode"
UnsupportedOpcode _ -> putStrLn "unsupported opcode"
UnassignedLabel -> putStrLn "unassigned label"
LabelError _ -> putStrLn "label lookup failed"
exitFailure
Right bs -> do
return bs
main :: IO ()
main = do
arg1:_ <- getArgs
file <- C8.readFile arg1
bf <- case parseOnly brainfuck file of
Left e -> do
putStrLn "Parse error: "
putStrLn e
exitFailure
Right x -> return x
let prettyBf = pretty_ bf
writeFile "out.bf" prettyBf
let asm = asmFunction $ do
mov rax rdi
compileFuck bf
let optimizedAsm = optimizeAsm asm
TIO.writeFile "opt.asm" (pretty optimizedAsm)
TIO.writeFile "out.asm" (pretty asm)
code <- assembleIO asm
optimizedCode <- assembleIO optimizedAsm
let b = BS.toStrict code
let bOpt = BS.toStrict optimizedCode
let nb = C8.length b
let nbOpt = C8.length bOpt
putStrLn $ "Assembled " ++ show nb ++ " bytes (unoptimized)."
putStrLn $ "Assembled " ++ show nbOpt ++ " bytes (optimized)."
putStrLn $ show (fromIntegral (nb - nbOpt) / fromIntegral nb * 100)
++ "% improvement"
f <- byteStringFunction bOpt
mem <- callocBytes 4096
() <- f mem
free mem
| djeik/fuckdown2 | src/Main.hs | mit | 2,041 | 0 | 15 | 546 | 572 | 273 | 299 | 63 | 5 |
{-|
Module : Main
Description : Parses command line and dispatches to correct backend
Copyright : (c) Rodrigo Setti, 2017
License : MIT
Maintainer : rodrigosetti@gmail.com
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE UnicodeSyntax #-}
module Main (main) where
import Data.List (intercalate)
import qualified Data.List.NonEmpty as NE
import Data.Maybe (catMaybes, fromMaybe)
import Data.Semigroup ((<>))
import qualified Data.Text.IO as TIO
import MasterPlan.Backend.Graph
import MasterPlan.Data
import qualified MasterPlan.Parser as P
import Options.Applicative
import System.Exit (die)
import System.IO (stdin)
-- |Type output from the command line parser
data Opts = Opts { inputPath :: Maybe FilePath
, outputPath :: Maybe FilePath
, rootKey :: ProjectKey -- ^ name of the root project
, projFilter :: ProjFilter -- ^ filter to consider
, renderParsingError :: Bool -- ^ will render the parsing error instead of printing
, parseStrict :: Bool -- ^ every project has to be defined
, renderOptions :: RenderOptions }
type ProjFilter = ProjectExpr → Bool
noFilter ∷ ProjFilter
noFilter = const True
readEnum ∷ [(String, a)] → ReadM a
readEnum mapping = maybeReader $ flip lookup mapping
-- |The command line parser
cmdParser ∷ Parser Opts
cmdParser = Opts <$> optional (strArgument ( help "plan file to read from (default from stdin)"
<> metavar "FILENAME" ))
<*> optional (strOption ( long "output"
<> short 'o'
<> help "output file name (.png, .tif, .bmp, .jpg and .pdf supported)"
<> metavar "FILENAME" ))
<*> strOption ( long "root"
<> short 'r'
<> help "name of the root project definition"
<> value "root"
<> showDefault
<> metavar "NAME")
<*> (filterParser <|> pure noFilter)
<*> switch ( long "render-parse-error"
<> help "instead of printing parsing errors, render as an image")
<*> switch ( long "strict"
<> help "strict parsing: every project has to be defined")
<*> renderOptionsParser
where
renderOptionsParser ∷ Parser RenderOptions
renderOptionsParser = RenderOptions <$> switch ( long "color"
<> short 'c'
<> help "color each project by progress")
<*> option auto ( long "width"
<> short 'w'
<> help "width of the output image"
<> value (-1)
<> metavar "NUMBER")
<*> option auto ( long "height"
<> help "height of the output image"
<> value (-1)
<> metavar "NUMBER")
<*> (invertProps <$> many (option property ( long "hide"
<> help "hide a particular property"
<> metavar (intercalate "|" $ map fst propertyNames))))
propertyNames = map (\p -> (show p, p)) [minBound :: ProjAttribute ..]
property = readEnum propertyNames
invertProps ∷ [ProjAttribute] → [ProjAttribute]
invertProps l = filter (`notElem` l) $ map snd propertyNames
filterParser ∷ Parser ProjFilter
filterParser = (mkProgressFilter . Progress) <$> option auto ( long "progress-below"
<> help "only display projects which progress is < N%"
<> metavar "N" )
where
mkProgressFilter n p = progress p * 100 < n
main ∷ IO ()
main = masterPlan =<< execParser opts
where
opts = info (cmdParser <**> helper)
( fullDesc
<> progDesc "See documentation on how to write project plan files"
<> header "master-plan - project management tool for hackers" )
filterProj ∷ ProjFilter -> ProjectExpr → Maybe ProjectExpr
filterProj f p@(Sum r ps) = filterHelper p f ps (Sum r)
filterProj f p@(Product r ps) = filterHelper p f ps (Product r)
filterProj f p@(Sequence r ps) = filterHelper p f ps (Sequence r)
filterProj f p = if f p then Just p else Nothing
filterHelper :: ProjectExpr
-> ProjFilter
-> NE.NonEmpty ProjectExpr
-> (NE.NonEmpty ProjectExpr -> ProjectExpr)
-> Maybe ProjectExpr
filterHelper p f ps c = if f p then c <$> filterProjs ps else Nothing
where
filterProjs ps' = NE.nonEmpty (catMaybes $ NE.toList $ filterProj f <$> ps')
masterPlan ∷ Opts → IO ()
masterPlan opts =
do contents <- maybe (TIO.hGetContents stdin) TIO.readFile $ inputPath opts
let outfile = fromMaybe (fromMaybe "output" (outputPath opts) ++ ".pdf") $ outputPath opts
case P.runParser (parseStrict opts) (fromMaybe "stdin" $ inputPath opts) contents (rootKey opts) of
Left e -> if renderParsingError opts
then renderText outfile (renderOptions opts) (lines e)
else die e
Right p ->
do let p' = fromMaybe defaultAtomic $ prioritize <$> filterProj (projFilter opts) p
render outfile (renderOptions opts) p'
| rodrigosetti/master-plan | app/Main.hs | mit | 6,328 | 0 | 18 | 2,709 | 1,300 | 662 | 638 | 98 | 3 |
module Melchior.EventSources.Mouse where
import Control.Applicative
import Melchior.Control
import Melchior.Data.String
import Melchior.Dom
import Melchior.Dom.Events
position :: Element -> Signal (Int, Int)
position el = (\x -> coords x) <$> s
where s = createEventedSignal (Of MouseMove) el (MouseEvt MouseMove)
click :: Element -> Signal MouseEvent
click e = createEventedSignal (Of ClickEvt) e (MouseEvt ClickEvt)
foreign import js "Events.coordinates(%1)"
coords :: MouseEvent -> (Int, Int) | kjgorman/melchior | Melchior/EventSources/Mouse.hs | mit | 515 | 1 | 9 | 82 | 155 | 86 | 69 | 13 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Data.Type.Nat
( Nat(..)
, type (+)
, Value(..)
) where
import Data.Proxy
data Nat = Z | S Nat
type family (n :: Nat) + (m :: Nat) :: Nat
type instance n + Z = n
type instance n + (S m) = (S n) + m
-- Links the type- and value-level
class Value (n :: Nat) where
value :: Proxy n -> Nat
instance Value Z where
value _ = Z
instance Value n => Value (S n) where
value _ = S $ value (Proxy :: Proxy n)
| nickspinale/lambda-calculi | src/Data/Type/Nat.hs | mit | 830 | 4 | 9 | 176 | 211 | 125 | 86 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module EDDA.Schema.ShipyardV1 where
import EDDA.Types
import EDDA.Schema.Util
import Data.Aeson
import Data.Aeson.Types
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
getShips :: Value -> ConfigT (Maybe [Str])
getShips v = return $ getStrArray v "ships"
parseShipyard :: Value -> ConfigT (Maybe MessageInfo)
parseShipyard v = do
ships <- getShips v
return $ do
systemName <- getStr v "systemName"
stationName <- getStr v "stationName"
timestamp <- getTimestamp v "timestamp"
s <- ships
Just $ ShipyardInfo { shipyardInfoSystemName = systemName,
shipyardInfoStationName = stationName,
shipyardInfoTimestamp = timestamp,
shipyardInfoShips = HS.fromList s }
| troydm/edda | src/EDDA/Schema/ShipyardV1.hs | mit | 1,160 | 0 | 14 | 468 | 226 | 125 | 101 | 24 | 1 |
{-# Language TupleSections, ViewPatterns #-}
{- |
Module : Language.Egison.Core
Copyright : Satoshi Egi
Licence : MIT
This module provides functions to evaluate various objects.
-}
module Language.Egison.Core
(
-- * Egison code evaluation
evalTopExprs
, evalTopExprsTestOnly
, evalTopExprsNoIO
, evalTopExpr
, evalExpr
, evalExprDeep
, evalRef
, evalRefDeep
, evalWHNF
, applyFunc
-- * Environment
, recursiveBind
-- * Pattern matching
, patternMatch
-- * Collection
, isEmptyCollection
, unconsCollection
, unsnocCollection
-- * Utiltiy functions
, packStringValue
) where
import Prelude hiding (mapM, mappend)
import Control.Arrow
import Control.Applicative
import Control.Monad.Error hiding (mapM)
import Control.Monad.State hiding (mapM, state)
import Control.Monad.Trans.Maybe
import Data.Sequence (Seq, ViewL(..), ViewR(..), (><))
import qualified Data.Sequence as Sq
import Data.Ratio
import Data.Foldable (toList)
import Data.Traversable (mapM)
import Data.IORef
import Data.Maybe
import Data.Array ((!))
import qualified Data.Array as Array
import qualified Data.HashMap.Lazy as HL
import Data.Text (Text)
import qualified Data.Text as T
import Language.Egison.Types
import Language.Egison.Parser
--
-- Evaluator
--
evalTopExprs :: Env -> [EgisonTopExpr] -> EgisonM Env
evalTopExprs env exprs = do
(bindings, rest) <- collectDefs exprs [] []
env <- recursiveBind env bindings
forM_ rest $ evalTopExpr env
return env
where
collectDefs (expr:exprs) bindings rest =
case expr of
Define name expr -> collectDefs exprs ((name, expr) : bindings) rest
Load file -> do
exprs' <- loadLibraryFile file
collectDefs (exprs' ++ exprs) bindings rest
LoadFile file -> do
exprs' <- loadFile file
collectDefs (exprs' ++ exprs) bindings rest
Execute _ -> collectDefs exprs bindings (expr : rest)
_ -> collectDefs exprs bindings rest
collectDefs [] bindings rest = return (bindings, reverse rest)
evalTopExprsTestOnly :: Env -> [EgisonTopExpr] -> EgisonM Env
evalTopExprsTestOnly env exprs = do
(bindings, rest) <- collectDefs exprs [] []
env <- recursiveBind env bindings
forM_ rest $ evalTopExpr env
return env
where
collectDefs (expr:exprs) bindings rest =
case expr of
Define name expr -> collectDefs exprs ((name, expr) : bindings) rest
Load file -> do
exprs' <- loadLibraryFile file
collectDefs (exprs' ++ exprs) bindings rest
LoadFile file -> do
exprs' <- loadFile file
collectDefs (exprs' ++ exprs) bindings rest
Test _ -> collectDefs exprs bindings (expr : rest)
_ -> collectDefs exprs bindings rest
collectDefs [] bindings rest = return (bindings, reverse rest)
evalTopExprsNoIO :: Env -> [EgisonTopExpr] -> EgisonM Env
evalTopExprsNoIO env exprs = do
(bindings, rest) <- collectDefs exprs [] []
env <- recursiveBind env bindings
forM_ rest $ evalTopExpr env
return env
where
collectDefs (expr:exprs) bindings rest =
case expr of
Define name expr -> collectDefs exprs ((name, expr) : bindings) rest
Load _ -> throwError $ strMsg "No IO support"
LoadFile _ -> throwError $ strMsg "No IO support"
_ -> collectDefs exprs bindings (expr : rest)
collectDefs [] bindings rest = return (bindings, reverse rest)
evalTopExpr :: Env -> EgisonTopExpr -> EgisonM Env
evalTopExpr env topExpr = do
ret <- evalTopExpr' env topExpr
case fst ret of
Nothing -> return ()
Just output -> liftIO $ putStrLn output
return $ snd ret
evalTopExpr' :: Env -> EgisonTopExpr -> EgisonM (Maybe String, Env)
evalTopExpr' env (Define name expr) = recursiveBind env [(name, expr)] >>= return . ((,) Nothing)
evalTopExpr' env (Test expr) = do
val <- evalExprDeep env expr
return (Just (show val), env)
evalTopExpr' env (Execute expr) = do
io <- evalExpr env expr
case io of
Value (IOFunc m) -> m >> return (Nothing, env)
_ -> throwError $ TypeMismatch "io" io
evalTopExpr' env (Load file) = loadLibraryFile file >>= evalTopExprs env >>= return . ((,) Nothing)
evalTopExpr' env (LoadFile file) = loadFile file >>= evalTopExprs env >>= return . ((,) Nothing)
evalExpr :: Env -> EgisonExpr -> EgisonM WHNFData
evalExpr _ (CharExpr c) = return . Value $ Char c
evalExpr _ (StringExpr s) = return $ Value $ toEgison s
evalExpr _ (BoolExpr b) = return . Value $ Bool b
evalExpr _ (NumberExpr x y) = return . Value $ reduceFraction (Number x y)
evalExpr _ (FloatExpr x y) = return . Value $ Float x y
evalExpr env (VarExpr name) = refVar env name >>= evalRef
evalExpr _ (InductiveDataExpr name []) = return . Value $ InductiveData name []
evalExpr env (InductiveDataExpr name exprs) =
Intermediate . IInductiveData name <$> mapM (newObjectRef env) exprs
evalExpr _ (TupleExpr []) = return . Value $ Tuple []
evalExpr env (TupleExpr [expr]) = evalExpr env expr
evalExpr env (TupleExpr exprs) = Intermediate . ITuple <$> mapM (newObjectRef env) exprs
evalExpr _ (CollectionExpr []) = return . Value $ Collection Sq.empty
evalExpr env (CollectionExpr inners) = do
inners' <- mapM fromInnerExpr inners
innersSeq <- liftIO $ newIORef $ Sq.fromList inners'
return $ Intermediate $ ICollection innersSeq
where
fromInnerExpr :: InnerExpr -> EgisonM Inner
fromInnerExpr (ElementExpr expr) = IElement <$> newObjectRef env expr
fromInnerExpr (SubCollectionExpr expr) = ISubCollection <$> newObjectRef env expr
evalExpr env (ArrayExpr exprs) = do
refs' <- mapM (newObjectRef env) exprs
return . Intermediate . IArray $ Array.listArray (1, toInteger (length exprs)) refs'
evalExpr env (HashExpr assocs) = do
let (keyExprs, exprs) = unzip assocs
keyWhnfs <- mapM (evalExpr env) keyExprs
keys <- mapM makeHashKey keyWhnfs
refs <- mapM (newObjectRef env) exprs
case keys of
[] -> do
let keys' = map (\key -> case key of IntKey i -> i) keys
return . Intermediate . IIntHash $ HL.fromList $ zip keys' refs
_ ->
case head keys of
IntKey _ -> do
let keys' = map (\key -> case key of IntKey i -> i) keys
return . Intermediate . IIntHash $ HL.fromList $ zip keys' refs
CharKey _ -> do
let keys' = map (\key -> case key of CharKey c -> c) keys
return . Intermediate . ICharHash $ HL.fromList $ zip keys' refs
StrKey _ -> do
let keys' = map (\key -> case key of StrKey s -> s) keys
return . Intermediate . IStrHash $ HL.fromList $ zip keys' refs
where
makeHashKey :: WHNFData -> EgisonM EgisonHashKey
makeHashKey (Value val) =
case val of
Number _ _ -> fromEgison val >>= (return . IntKey)
Char c -> return (CharKey c)
String str -> return (StrKey str)
_ -> throwError $ TypeMismatch "integer or string" $ Value val
makeHashKey whnf = throwError $ TypeMismatch "integer or string" $ whnf
evalExpr env (IndexedExpr expr indices) = do
array <- evalExpr env expr
indices' <- mapM (evalExprDeep env) indices
refArray array indices'
evalExpr env (LambdaExpr names expr) = return . Value $ Func env names expr
evalExpr env (PatternFunctionExpr names pattern) = return . Value $ PatternFunc env names pattern
evalExpr env (IfExpr test expr expr') = do
test <- evalExpr env test >>= fromWHNF
evalExpr env $ if test then expr else expr'
evalExpr env (LetExpr bindings expr) =
mapM extractBindings bindings >>= flip evalExpr expr . extendEnv env . concat
where
extractBindings :: BindingExpr -> EgisonM [Binding]
extractBindings ([name], expr) =
makeBindings [name] . (:[]) <$> newObjectRef env expr
extractBindings (names, expr) =
makeBindings names <$> (evalExpr env expr >>= fromTuple)
evalExpr env (LetRecExpr bindings expr) =
let bindings' = evalState (concat <$> mapM extractBindings bindings) 0
in recursiveBind env bindings' >>= flip evalExpr expr
where
extractBindings :: BindingExpr -> State Int [(String, EgisonExpr)]
extractBindings ([name], expr) = return [(name, expr)]
extractBindings (names, expr) = do
var <- genVar
let k = length names
target = VarExpr var
matcher = TupleExpr $ replicate k SomethingExpr
nth n =
let pattern = TuplePat $ flip map [1..k] $ \i ->
if i == n then PatVar "#_" else WildCard
in MatchExpr target matcher [(pattern, VarExpr "#_")]
return ((var, expr) : map (second nth) (zip names [1..]))
genVar :: State Int String
genVar = modify (1+) >> gets (('#':) . show)
evalExpr env (DoExpr bindings expr) = return $ Value $ IOFunc $ do
let body = foldr genLet (ApplyExpr expr $ TupleExpr [VarExpr "#1"]) bindings
applyFunc (Value $ Func env ["#1"] body) $ Value World
where
genLet (names, expr) expr' =
LetExpr [(["#1", "#2"], ApplyExpr expr $ TupleExpr [VarExpr "#1"])] $
LetExpr [(names, VarExpr "#2")] expr'
evalExpr env (IoExpr expr) = do
io <- evalExpr env expr
case io of
Value (IOFunc m) -> do
val <- m >>= evalWHNF
case val of
Tuple [_, val'] -> return $ Value val'
_ -> throwError $ TypeMismatch "io" io
evalExpr env (MatchAllExpr target matcher (pattern, expr)) = do
target <- newObjectRef env target
matcher <- evalExpr env matcher >>= evalMatcherWHNF
result <- patternMatch env pattern target matcher
mmap (flip evalExpr expr . extendEnv env) result >>= fromMList
where
fromMList :: MList EgisonM WHNFData -> EgisonM WHNFData
fromMList MNil = return . Value $ Collection Sq.empty
fromMList (MCons val m) = do
head <- IElement <$> newEvalutedObjectRef val
tail <- ISubCollection <$> (liftIO . newIORef . Thunk $ m >>= fromMList)
seqRef <- liftIO . newIORef $ Sq.fromList [head, tail]
return . Intermediate $ ICollection $ seqRef
evalExpr env (MatchExpr target matcher clauses) = do
target <- newObjectRef env target
matcher <- evalExpr env matcher >>= evalMatcherWHNF
let tryMatchClause (pattern, expr) cont = do
result <- patternMatch env pattern target matcher
case result of
MCons bindings _ -> evalExpr (extendEnv env bindings) expr
MNil -> cont
foldr tryMatchClause (throwError $ strMsg "failed pattern match") clauses
evalExpr env (SeqExpr expr1 expr2) = do
evalExprDeep env expr1
evalExpr env expr2
evalExpr env (ApplyExpr func arg) = do
func <- evalExpr env func
arg <- evalExpr env arg
case func of
Value (MemoizedFunc ref hashRef env names body) -> do
indices <- evalWHNF arg
indices' <- mapM fromEgison $ fromTupleValue indices
hash <- liftIO $ readIORef hashRef
case HL.lookup indices' hash of
Just objRef -> do
evalRef objRef
Nothing -> do
whnf <- applyFunc (Value (Func env names body)) arg
retRef <- newEvalutedObjectRef whnf
hash <- liftIO $ readIORef hashRef
liftIO $ writeIORef hashRef (HL.insert indices' retRef hash)
writeObjectRef ref (Value (MemoizedFunc ref hashRef env names body))
return whnf
_ -> applyFunc func arg
evalExpr env (MemoizeExpr memoizeFrame expr) = do
mapM (\(x, y, z) -> do x' <- evalExprDeep env x
case x' of
(MemoizedFunc ref hashRef env' names body) -> do
indices <- evalExprDeep env y
indices' <- mapM fromEgison $ fromTupleValue indices
hash <- liftIO $ readIORef hashRef
ret <- evalExprDeep env z
retRef <- newEvalutedObjectRef (Value ret)
liftIO $ writeIORef hashRef (HL.insert indices' retRef hash)
writeObjectRef ref (Value (MemoizedFunc ref hashRef env' names body))
_ -> throwError $ TypeMismatch "memoized-function" (Value x'))
memoizeFrame
evalExpr env expr
evalExpr env (MatcherBFSExpr info) = return $ Value $ UserMatcher env BFSMode info
evalExpr env (MatcherDFSExpr info) = return $ Value $ UserMatcher env DFSMode info
evalExpr env (GenerateArrayExpr (name:[]) (TupleExpr (sizeExpr:[])) expr) =
generateArray env name sizeExpr expr
evalExpr env (GenerateArrayExpr (name:xs) (TupleExpr (sizeExpr:ys)) expr) =
generateArray env name sizeExpr (GenerateArrayExpr xs (TupleExpr ys) expr)
evalExpr env (GenerateArrayExpr names size expr) =
evalExpr env (GenerateArrayExpr names (TupleExpr [size]) expr)
evalExpr env (ArraySizeExpr expr) =
evalExpr env expr >>= arraySize
where
arraySize :: WHNFData -> EgisonM WHNFData
arraySize (Intermediate (IArray arr)) = return . Value . toEgison $ (snd (Array.bounds arr)) % 1
arraySize (Value (Array arr)) = return . Value . toEgison $ (snd (Array.bounds arr)) % 1
arraySize val = throwError $ TypeMismatch "array" val
evalExpr _ SomethingExpr = return $ Value Something
evalExpr _ UndefinedExpr = return $ Value Undefined
evalExpr _ expr = throwError $ NotImplemented ("evalExpr for " ++ show expr)
evalExprDeep :: Env -> EgisonExpr -> EgisonM EgisonValue
evalExprDeep env expr = evalExpr env expr >>= evalWHNF
evalRef :: ObjectRef -> EgisonM WHNFData
evalRef ref = do
obj <- liftIO $ readIORef ref
case obj of
WHNF val -> return val
Thunk thunk -> do
val <- thunk
writeObjectRef ref val
return val
evalRefDeep :: ObjectRef -> EgisonM EgisonValue
evalRefDeep ref = do
obj <- liftIO $ readIORef ref
case obj of
WHNF (Value val) -> return val
WHNF val -> do
val <- evalWHNF val
writeObjectRef ref $ Value val
return val
Thunk thunk -> do
val <- thunk >>= evalWHNF
writeObjectRef ref $ Value val
return val
evalWHNF :: WHNFData -> EgisonM EgisonValue
evalWHNF (Value val) = return val
evalWHNF (Intermediate (IInductiveData name refs)) =
InductiveData name <$> mapM evalRefDeep refs
evalWHNF (Intermediate (IArray refs)) = do
refs' <- mapM evalRefDeep $ Array.elems refs
return $ Array $ Array.listArray (Array.bounds refs) refs'
evalWHNF (Intermediate (IIntHash refs)) = do
refs' <- mapM evalRefDeep refs
return $ IntHash refs'
evalWHNF (Intermediate (ICharHash refs)) = do
refs' <- mapM evalRefDeep refs
return $ CharHash refs'
evalWHNF (Intermediate (IStrHash refs)) = do
refs' <- mapM evalRefDeep refs
return $ StrHash refs'
evalWHNF (Intermediate (ITuple [ref])) = evalRefDeep ref
evalWHNF (Intermediate (ITuple refs)) = Tuple <$> mapM evalRefDeep refs
evalWHNF coll = Collection <$> (fromCollection coll >>= fromMList >>= mapM evalRefDeep . Sq.fromList)
applyFunc :: WHNFData -> WHNFData -> EgisonM WHNFData
applyFunc (Value (Func env [name] body)) arg = do
ref <- newEvalutedObjectRef arg
evalExpr (extendEnv env $ makeBindings [name] [ref]) body
applyFunc (Value (Func env names body)) arg = do
refs <- fromTuple arg
if length names == length refs
then evalExpr (extendEnv env $ makeBindings names refs) body
else throwError $ ArgumentsNumWithNames names (length names) (length refs)
applyFunc (Value (PrimitiveFunc func)) arg = func arg
applyFunc (Value (IOFunc m)) arg = do
case arg of
Value World -> m
_ -> throwError $ TypeMismatch "world" arg
applyFunc val _ = throwError $ TypeMismatch "function" val
generateArray :: Env -> String -> EgisonExpr -> EgisonExpr -> EgisonM WHNFData
generateArray env name sizeExpr expr = do
size <- evalExpr env sizeExpr >>= fromWHNF >>= return . fromInteger
elems <- mapM genElem (enumFromTo 1 size)
return $ Intermediate $ IArray $ Array.listArray (1, size) elems
where
genElem :: Integer -> EgisonM ObjectRef
genElem i = do env' <- bindEnv env name $ toInteger i
newObjectRef env' expr
bindEnv :: Env -> String -> Integer -> EgisonM Env
bindEnv env name i = do
ref <- newEvalutedObjectRef (Value (Number (i,0) (1,0)))
return $ extendEnv env [(name, ref)]
refArray :: WHNFData -> [EgisonValue] -> EgisonM WHNFData
refArray val [] = return val
refArray (Value (Array array)) (index:indices) = do
i <- (liftM fromInteger . fromEgison) index
if (\(a,b) -> if a <= i && i <= b then True else False) $ Array.bounds array
then refArray (Value (array ! i)) indices
else return $ Value Undefined
refArray (Intermediate (IArray array)) (index:indices) = do
i <- (liftM fromInteger . fromEgison) index
if (\(a,b) -> if a <= i && i <= b then True else False) $ Array.bounds array
then let ref = array ! i in
evalRef ref >>= flip refArray indices
else return $ Value Undefined
refArray (Value (IntHash hash)) (index:indices) = do
key <- fromEgison index
case HL.lookup key hash of
Just val -> refArray (Value val) indices
Nothing -> return $ Value Undefined
refArray (Intermediate (IIntHash hash)) (index:indices) = do
key <- fromEgison index
case HL.lookup key hash of
Just ref -> evalRef ref >>= flip refArray indices
Nothing -> return $ Value Undefined
refArray (Value (CharHash hash)) (index:indices) = do
key <- fromEgison index
case HL.lookup key hash of
Just val -> refArray (Value val) indices
Nothing -> return $ Value Undefined
refArray (Intermediate (ICharHash hash)) (index:indices) = do
key <- fromEgison index
case HL.lookup key hash of
Just ref -> evalRef ref >>= flip refArray indices
Nothing -> return $ Value Undefined
refArray (Value (StrHash hash)) (index:indices) = do
key <- fromEgison index
case HL.lookup key hash of
Just val -> refArray (Value val) indices
Nothing -> return $ Value Undefined
refArray (Intermediate (IStrHash hash)) (index:indices) = do
key <- fromEgison index
case HL.lookup key hash of
Just ref -> evalRef ref >>= flip refArray indices
Nothing -> return $ Value Undefined
refArray val _ = throwError $ TypeMismatch "array or hash" val
newThunk :: Env -> EgisonExpr -> Object
newThunk env expr = Thunk $ evalExpr env expr
newObjectRef :: Env -> EgisonExpr -> EgisonM ObjectRef
newObjectRef env expr = liftIO $ newIORef $ newThunk env expr
writeObjectRef :: ObjectRef -> WHNFData -> EgisonM ()
writeObjectRef ref val = liftIO . writeIORef ref $ WHNF val
newEvalutedObjectRef :: WHNFData -> EgisonM ObjectRef
newEvalutedObjectRef = liftIO . newIORef . WHNF
makeBindings :: [String] -> [ObjectRef] -> [Binding]
makeBindings = zip
recursiveBind :: Env -> [(String, EgisonExpr)] -> EgisonM Env
recursiveBind env bindings = do
let (names, exprs) = unzip bindings
refs <- replicateM (length bindings) $ newObjectRef nullEnv UndefinedExpr
let env' = extendEnv env $ makeBindings names refs
zipWithM_ (\ref expr ->
case expr of
MemoizedLambdaExpr names body -> do
hashRef <- liftIO $ newIORef HL.empty
liftIO . writeIORef ref . WHNF . Value $ MemoizedFunc ref hashRef env' names body
_ -> liftIO . writeIORef ref . Thunk $ evalExpr env' expr)
refs exprs
return env'
--
-- Pattern Match
--
patternMatch :: Env -> EgisonPattern -> ObjectRef -> Matcher -> EgisonM (MList EgisonM Match)
patternMatch env pattern target matcher = processMStates [msingleton $ MState env [] [] [MAtom pattern target matcher]]
processMStates :: [MList EgisonM MatchingState] -> EgisonM (MList EgisonM Match)
processMStates [] = return MNil
processMStates streams = do
(matches, streams') <- mapM processMStates' streams >>= extractMatches . concat
mappend (fromList matches) $ processMStates streams'
processMStates' :: MList EgisonM MatchingState -> EgisonM [MList EgisonM MatchingState]
processMStates' MNil = return []
processMStates' stream@(MCons state _) =
case pmMode (getMatcher (topMAtom state)) of
DFSMode -> processMStatesDFS stream
BFSMode -> processMStatesBFS stream
gatherBindings :: MatchingState -> Maybe [Binding]
gatherBindings (MState _ _ bindings []) = return bindings
gatherBindings (MState _ _ bindings trees) = isResolved trees >> return bindings
where isResolved :: [MatchingTree] -> Maybe ()
isResolved [] = return ()
isResolved (MAtom _ _ _ : _) = Nothing
isResolved (MNode _ state : rest) = gatherBindings state >> isResolved rest
extractMatches :: [MList EgisonM MatchingState] -> EgisonM ([Match], [MList EgisonM MatchingState])
extractMatches = extractMatches' ([], [])
where
extractMatches' :: ([Match], [MList EgisonM MatchingState]) -> [MList EgisonM MatchingState] -> EgisonM ([Match], [MList EgisonM MatchingState])
extractMatches' (xs, ys) [] = return (xs, ys)
extractMatches' (xs, ys) ((MCons (gatherBindings -> Just bindings) states):rest) = do
states' <- states
extractMatches' (xs ++ [bindings], ys ++ [states']) rest
extractMatches' (xs, ys) (stream:rest) = extractMatches' (xs, ys ++ [stream]) rest
processMStatesDFS :: MList EgisonM MatchingState -> EgisonM [(MList EgisonM MatchingState)]
processMStatesDFS (MCons state stream) = do
stream' <- processMState state
newStream <- mappend stream' stream
return [newStream]
processMStatesBFS :: MList EgisonM MatchingState -> EgisonM [(MList EgisonM MatchingState)]
processMStatesBFS (MCons state stream) = do
newStream <- processMState state
newStream' <- stream
return [newStream, newStream']
topMAtom :: MatchingState -> MatchingTree
topMAtom (MState _ _ _ (mAtom@(MAtom _ _ _):_)) = mAtom
topMAtom (MState _ _ _ ((MNode _ mstate):_)) = topMAtom mstate
getMatcher :: MatchingTree -> Matcher
getMatcher (MAtom _ _ matcher) = matcher
processMState :: MatchingState -> EgisonM (MList EgisonM MatchingState)
processMState state = do
if isNotPat state
then do
let (state1, state2) = splitMState state
result <- processMStates [msingleton state1]
case result of
MNil -> return $ msingleton state2
_ -> return MNil
else processMState' state
where
isNotPat :: MatchingState -> Bool
isNotPat state = case topMAtom state of
MAtom (NotPat _) _ _ -> True
_ -> False
splitMState :: MatchingState -> (MatchingState, MatchingState)
splitMState (MState env loops bindings ((MAtom (NotPat pattern) target matcher) : trees)) =
(MState env loops bindings [MAtom pattern target matcher], MState env loops bindings trees)
splitMState (MState env loops bindings ((MNode penv state') : trees)) =
let (state1, state2) = splitMState state'
in (MState env loops bindings [MNode penv state1], MState env loops bindings (MNode penv state2 : trees))
processMState' :: MatchingState -> EgisonM (MList EgisonM MatchingState)
processMState' (MState _ _ _ []) = throwError $ EgisonBug "should not reach here (empty matching-state)"
processMState' (MState _ _ _ ((MNode _ (MState _ _ _ [])):_)) = throwError $ EgisonBug "should not reach here (empty matching-node)"
processMState' (MState env loops bindings (MNode penv (MState env' loops' bindings' ((MAtom (VarPat name) target matcher):trees')):trees)) = do
case lookup name penv of
Just pattern ->
case trees' of
[] -> return $ msingleton $ MState env loops bindings ((MAtom pattern target matcher):trees)
_ -> return $ msingleton $ MState env loops bindings ((MAtom pattern target matcher):(MNode penv (MState env' loops' bindings' trees')):trees)
Nothing -> throwError $ UnboundVariable name
processMState' (MState env loops bindings (MNode penv (MState env' loops' bindings' ((MAtom (IndexedPat (VarPat name) indices) target matcher):trees')):trees)) = do
case lookup name penv of
Just pattern -> do
let env'' = extendEnvForNonLinearPatterns env' bindings loops'
indices' <- mapM (evalExpr env'' >=> liftM fromInteger . fromWHNF) indices
let pattern' = IndexedPat pattern $ map (\i -> NumberExpr (i,0) (1,0)) indices'
case trees' of
[] -> return $ msingleton $ MState env loops bindings ((MAtom pattern' target matcher):trees)
_ -> return $ msingleton $ MState env loops bindings ((MAtom pattern' target matcher):(MNode penv (MState env' loops' bindings' trees')):trees)
Nothing -> throwError $ UnboundVariable name
processMState' (MState env loops bindings ((MNode penv state):trees)) = do
processMState' state >>= mmap (\state' -> case state' of
MState _ _ _ [] -> return $ MState env loops bindings trees
_ -> (return . MState env loops bindings . (: trees) . MNode penv) state')
processMState' (MState env loops bindings ((MAtom pattern target matcher):trees)) = do
let env' = extendEnvForNonLinearPatterns env bindings loops
case pattern of
NotPat _ -> throwError $ EgisonBug "should not reach here (not pattern)"
VarPat _ -> throwError $ strMsg "cannot use variable except in pattern function"
LetPat bindings' pattern' ->
let extractBindings ([name], expr) =
makeBindings [name] . (:[]) <$> newObjectRef env' expr
extractBindings (names, expr) =
makeBindings names <$> (evalExpr env' expr >>= fromTuple)
in
liftM concat (mapM extractBindings bindings')
>>= (\b -> return $ msingleton $ MState env loops (b ++ bindings) ((MAtom pattern' target matcher):trees))
PredPat predicate -> do
func <- evalExpr env' predicate
arg <- evalRef target
result <- applyFunc func arg >>= fromWHNF
if result then return $ msingleton $ (MState env loops bindings trees)
else return MNil
ApplyPat func args -> do
func' <- evalExpr env' func
case func' of
Value (PatternFunc env'' names expr) ->
let penv = zip names args
in return $ msingleton $ MState env loops bindings (MNode penv (MState env'' [] [] [MAtom expr target matcher]) : trees)
_ -> throwError $ TypeMismatch "pattern constructor" func'
LoopPat name (LoopRange start ends endPat) pat pat' -> do
startNum <- evalExpr env' start >>= fromWHNF
startNumRef <- newEvalutedObjectRef $ Value $ Number ((startNum - 1),0) (1,0)
ends' <- evalExpr env' ends
if isPrimitiveValue ends'
then do
endsRef <- newEvalutedObjectRef ends'
inners <- liftIO $ newIORef $ Sq.fromList [IElement endsRef]
endsRef' <- liftIO $ newIORef (WHNF (Intermediate (ICollection inners)))
return $ msingleton $ MState env ((LoopPatContext (name, startNumRef) endsRef' endPat pat pat'):loops) bindings ((MAtom ContPat target matcher):trees)
else do
endsRef <- newEvalutedObjectRef ends'
return $ msingleton $ MState env ((LoopPatContext (name, startNumRef) endsRef endPat pat pat'):loops) bindings ((MAtom ContPat target matcher):trees)
ContPat ->
case loops of
[] -> throwError $ strMsg "cannot use cont pattern except in loop pattern"
LoopPatContext (name, startNumRef) endsRef endPat pat pat' : loops' -> do
startNum <- evalRef startNumRef >>= fromWHNF
nextNumRef <- newEvalutedObjectRef $ Value $ Number ((startNum + 1),0) (1,0)
ends <- evalRef endsRef
b <- isEmptyCollection ends
if b
then return MNil
else do
(carEndsRef, cdrEndsRef) <- fromJust <$> runMaybeT (unconsCollection ends)
carEndsNum <- evalRef carEndsRef >>= fromWHNF
if startNum > carEndsNum
then return MNil
else if startNum == carEndsNum
then return $ fromList [MState env loops' bindings ((MAtom endPat startNumRef Something):(MAtom pat' target matcher):trees),
MState env ((LoopPatContext (name, nextNumRef) cdrEndsRef endPat pat pat'):loops') bindings ((MAtom pat target matcher):trees)]
else return $ fromList [MState env ((LoopPatContext (name, nextNumRef) endsRef endPat pat pat'):loops') bindings ((MAtom pat target matcher):trees)]
AndPat patterns ->
let trees' = map (\pat -> MAtom pat target matcher) patterns ++ trees
in return $ msingleton $ MState env loops bindings trees'
OrPat patterns ->
return $ fromList $ flip map patterns $ \pat ->
MState env loops bindings (MAtom pat target matcher : trees)
_ ->
case matcher of
UserMatcher _ _ _ -> do
(patterns, targetss, matchers) <- inductiveMatch env' pattern target matcher
mfor targetss $ \ref -> do
targets <- evalRef ref >>= fromTuple
let trees' = zipWith3 MAtom patterns targets matchers ++ trees
return $ MState env loops bindings trees'
Tuple matchers -> do
case pattern of
ValuePat _ -> return $ msingleton $ MState env loops bindings ((MAtom pattern target Something):trees)
WildCard -> return $ msingleton $ MState env loops bindings ((MAtom pattern target Something):trees)
PatVar _ -> return $ msingleton $ MState env loops bindings ((MAtom pattern target Something):trees)
IndexedPat _ _ -> return $ msingleton $ MState env loops bindings ((MAtom pattern target Something):trees)
TuplePat patterns -> do
targets <- evalRef target >>= fromTuple
if not (length patterns == length targets) then throwError $ ArgumentsNum (length patterns) (length targets) else return ()
if not (length patterns == length matchers) then throwError $ ArgumentsNum (length patterns) (length matchers) else return ()
let trees' = zipWith3 MAtom patterns targets matchers ++ trees
return $ msingleton $ MState env loops bindings trees'
_ -> throwError $ strMsg $ "should not reach here. matcher: " ++ show matcher ++ ", pattern: " ++ show pattern
Something ->
case pattern of
ValuePat valExpr -> do
val <- evalExprDeep env' valExpr
tgtVal <- evalRefDeep target
if val == tgtVal
then return $ msingleton $ MState env loops bindings trees
else return MNil
WildCard -> return $ msingleton $ MState env loops bindings trees
PatVar name -> return $ msingleton $ MState env loops ((name, target):bindings) trees
IndexedPat (PatVar name) indices -> do
indices <- mapM (evalExpr env' >=> liftM fromInteger . fromWHNF) indices
case lookup name bindings of
Just ref -> do
obj <- evalRef ref >>= updateHash indices >>= newEvalutedObjectRef
return $ msingleton $ MState env loops (subst name obj bindings) trees
Nothing -> do
obj <- updateHash indices (Intermediate . IIntHash $ HL.empty) >>= newEvalutedObjectRef
return $ msingleton $ MState env loops ((name,obj):bindings) trees
where
updateHash :: [Integer] -> WHNFData -> EgisonM WHNFData
updateHash [index] (Intermediate (IIntHash hash)) = do
return . Intermediate . IIntHash $ HL.insert index target hash
updateHash (index:indices) (Intermediate (IIntHash hash)) = do
val <- maybe (return $ Intermediate $ IIntHash HL.empty) evalRef $ HL.lookup index hash
ref <- updateHash indices val >>= newEvalutedObjectRef
return . Intermediate . IIntHash $ HL.insert index ref hash
updateHash indices (Value (IntHash hash)) = do
keys <- return $ HL.keys hash
vals <- mapM (newEvalutedObjectRef . Value) $ HL.elems hash
updateHash indices (Intermediate $ IIntHash $ HL.fromList $ zip keys vals)
updateHash _ v = throwError $ strMsg $ "expected hash value: " ++ show v
subst :: (Eq a) => a -> b -> [(a, b)] -> [(a, b)]
subst k nv ((k', v'):xs) | k == k' = (k', nv):(subst k nv xs)
| otherwise = (k', v'):(subst k nv xs)
subst _ _ [] = []
IndexedPat pattern indices -> throwError $ strMsg ("invalid indexed-pattern: " ++ show pattern)
TuplePat patterns -> do
targets <- evalRef target >>= fromTuple
if not (length patterns == length targets) then throwError $ ArgumentsNum (length patterns) (length targets) else return ()
let trees' = zipWith3 MAtom patterns targets (take (length patterns) (repeat Something)) ++ trees
return $ msingleton $ MState env loops bindings trees'
_ -> throwError $ strMsg "something can only match with a pattern variable"
_ -> throwError $ EgisonBug $ "should not reach here. matcher: " ++ show matcher ++ ", pattern: " ++ show pattern
inductiveMatch :: Env -> EgisonPattern -> ObjectRef -> Matcher ->
EgisonM ([EgisonPattern], MList EgisonM ObjectRef, [Matcher])
inductiveMatch env pattern target (UserMatcher matcherEnv _ clauses) = do
foldr tryPPMatchClause failPPPatternMatch clauses
where
tryPPMatchClause (pat, matchers, clauses) cont = do
result <- runMaybeT $ primitivePatPatternMatch env pat pattern
case result of
Just (patterns, bindings) -> do
targetss <- foldr tryPDMatchClause failPDPatternMatch clauses
matchers <- evalExpr matcherEnv matchers >>= evalMatcherWHNF >>= (return . fromTupleValue)
return (patterns, targetss, matchers)
where
tryPDMatchClause (pat, expr) cont = do
result <- runMaybeT $ primitiveDataPatternMatch pat target
case result of
Just bindings' -> do
let env = extendEnv matcherEnv $ bindings ++ bindings'
evalExpr env expr >>= fromCollection
_ -> cont
_ -> cont
failPPPatternMatch = throwError $ strMsg "failed primitive pattern pattern match"
failPDPatternMatch = throwError $ strMsg "failed primitive data pattern match"
primitivePatPatternMatch :: Env -> PrimitivePatPattern -> EgisonPattern ->
MatchM ([EgisonPattern], [Binding])
primitivePatPatternMatch _ PPWildCard _ = return ([], [])
primitivePatPatternMatch _ PPPatVar pattern = return ([pattern], [])
primitivePatPatternMatch env (PPValuePat name) (ValuePat expr) = do
ref <- lift $ newObjectRef env expr
return ([], [(name, ref)])
primitivePatPatternMatch env (PPInductivePat name patterns) (InductivePat name' exprs)
| name == name' =
(concat *** concat) . unzip <$> zipWithM (primitivePatPatternMatch env) patterns exprs
| otherwise = matchFail
primitivePatPatternMatch _ _ _ = matchFail
primitiveDataPatternMatch :: PrimitiveDataPattern -> ObjectRef -> MatchM [Binding]
primitiveDataPatternMatch PDWildCard _ = return []
primitiveDataPatternMatch (PDPatVar name) ref = return [(name, ref)]
primitiveDataPatternMatch (PDInductivePat name patterns) ref = do
whnf <- lift $ evalRef ref
case whnf of
Intermediate (IInductiveData name' refs) | name == name' ->
concat <$> zipWithM primitiveDataPatternMatch patterns refs
Value (InductiveData name' vals) | name == name' -> do
refs <- lift $ mapM (newEvalutedObjectRef . Value) vals
concat <$> zipWithM primitiveDataPatternMatch patterns refs
_ -> matchFail
primitiveDataPatternMatch PDEmptyPat ref = do
whnf <- lift $ evalRef ref
isEmpty <- lift $ isEmptyCollection whnf
if isEmpty then return [] else matchFail
primitiveDataPatternMatch (PDConsPat pattern pattern') ref = do
whnf <- lift $ evalRef ref
(head, tail) <- unconsCollection whnf
(++) <$> primitiveDataPatternMatch pattern head
<*> primitiveDataPatternMatch pattern' tail
primitiveDataPatternMatch (PDSnocPat pattern pattern') ref = do
whnf <- lift $ evalRef ref
(init, last) <- unsnocCollection whnf
(++) <$> primitiveDataPatternMatch pattern init
<*> primitiveDataPatternMatch pattern' last
primitiveDataPatternMatch (PDConstantPat expr) ref = do
target <- lift (evalRef ref) >>= either (const matchFail) return . extractPrimitiveValue
isEqual <- lift $ (==) <$> evalExprDeep nullEnv expr <*> pure target
if isEqual then return [] else matchFail
expandCollection :: WHNFData -> EgisonM (Seq Inner)
expandCollection (Value (Collection vals)) =
mapM (liftM IElement . newEvalutedObjectRef . Value) vals
expandCollection (Intermediate (ICollection innersRef)) = liftIO $ readIORef innersRef
expandCollection val = throwError $ TypeMismatch "collection" val
isEmptyCollection :: WHNFData -> EgisonM Bool
isEmptyCollection (Value (Collection col)) = return $ Sq.null col
isEmptyCollection coll@(Intermediate (ICollection innersRef)) = do
inners <- liftIO $ readIORef innersRef
case Sq.viewl inners of
EmptyL -> return True
(ISubCollection ref') :< tInners -> do
hInners <- evalRef ref' >>= expandCollection
liftIO $ writeIORef innersRef (hInners >< tInners)
isEmptyCollection coll
_ -> return False
isEmptyCollection _ = return False
unconsCollection :: WHNFData -> MatchM (ObjectRef, ObjectRef)
unconsCollection (Value (Collection col)) =
case Sq.viewl col of
EmptyL -> matchFail
val :< vals ->
lift $ (,) <$> newEvalutedObjectRef (Value val)
<*> newEvalutedObjectRef (Value $ Collection vals)
unconsCollection coll@(Intermediate (ICollection innersRef)) = do
inners <- liftIO $ readIORef innersRef
case Sq.viewl inners of
EmptyL -> matchFail
(IElement ref') :< tInners -> do
tInnersRef <- liftIO $ newIORef tInners
lift $ (ref', ) <$> newEvalutedObjectRef (Intermediate $ ICollection tInnersRef)
(ISubCollection ref') :< tInners -> do
hInners <- lift $ evalRef ref' >>= expandCollection
liftIO $ writeIORef innersRef (hInners >< tInners)
unconsCollection coll
unconsCollection _ = matchFail
unsnocCollection :: WHNFData -> MatchM (ObjectRef, ObjectRef)
unsnocCollection (Value (Collection col)) =
case Sq.viewr col of
EmptyR -> matchFail
vals :> val ->
lift $ (,) <$> newEvalutedObjectRef (Value $ Collection vals)
<*> newEvalutedObjectRef (Value val)
unsnocCollection coll@(Intermediate (ICollection innersRef)) = do
inners <- liftIO $ readIORef innersRef
case Sq.viewr inners of
EmptyR -> matchFail
hInners :> (IElement ref') -> do
hInnersRef <- liftIO $ newIORef hInners
lift $ (, ref') <$> newEvalutedObjectRef (Intermediate $ ICollection hInnersRef)
hInners :> (ISubCollection ref') -> do
tInners <- lift $ evalRef ref' >>= expandCollection
liftIO $ writeIORef innersRef (hInners >< tInners)
unsnocCollection coll
unsnocCollection _ = matchFail
extendEnvForNonLinearPatterns :: Env -> [Binding] -> [LoopPatContext] -> Env
extendEnvForNonLinearPatterns env bindings loops = extendEnv env $ bindings ++ map (\(LoopPatContext binding _ _ _ _) -> binding) loops
evalMatcherWHNF :: WHNFData -> EgisonM Matcher
evalMatcherWHNF (Value matcher@Something) = return matcher
evalMatcherWHNF (Value matcher@(UserMatcher _ _ _)) = return matcher
evalMatcherWHNF (Value (Tuple ms)) = Tuple <$> mapM (evalMatcherWHNF . Value) ms
evalMatcherWHNF (Intermediate (ITuple refs)) = do
whnfs <- mapM evalRef refs
ms <- mapM evalMatcherWHNF whnfs
return $ Tuple ms
evalMatcherWHNF whnf = throwError $ TypeMismatch "matcher" whnf
--
-- Util
--
fromTuple :: WHNFData -> EgisonM [ObjectRef]
fromTuple (Intermediate (ITuple refs)) = return refs
fromTuple (Value (Tuple vals)) = mapM (newEvalutedObjectRef . Value) vals
fromTuple whnf = return <$> newEvalutedObjectRef whnf
fromTupleValue :: EgisonValue -> [EgisonValue]
fromTupleValue (Tuple vals) = vals
fromTupleValue val = [val]
fromCollection :: WHNFData -> EgisonM (MList EgisonM ObjectRef)
fromCollection (Value (Collection vals)) =
if Sq.null vals then return MNil
else fromSeq <$> mapM (newEvalutedObjectRef . Value) vals
fromCollection whnf@(Intermediate (ICollection _)) = do
isEmpty <- isEmptyCollection whnf
if isEmpty
then return MNil
else do
(head, tail) <- fromJust <$> runMaybeT (unconsCollection whnf)
tail' <- evalRef tail
return $ MCons head (fromCollection tail')
fromCollection whnf = throwError $ TypeMismatch "collection" whnf
--
-- String
--
packStringValue :: EgisonValue -> EgisonM Text
packStringValue (Collection seq) = do
let ls = toList seq
str <- mapM (\val -> case val of
Char c -> return c
_ -> throwError $ TypeMismatch "char" (Value val))
ls
return $ T.pack str
packStringValue (Tuple [val]) = packStringValue val
packStringValue val = throwError $ TypeMismatch "string" (Value val)
--
-- Util
--
data EgisonHashKey =
IntKey Integer
| CharKey Char
| StrKey Text
extractPrimitiveValue :: WHNFData -> Either EgisonError EgisonValue
extractPrimitiveValue (Value val@(Char _)) = return val
extractPrimitiveValue (Value val@(Bool _)) = return val
extractPrimitiveValue (Value val@(Number _ _)) = return val
extractPrimitiveValue (Value val@(Float _ _)) = return val
extractPrimitiveValue whnf = throwError $ TypeMismatch "primitive value" whnf
isPrimitiveValue :: WHNFData -> Bool
isPrimitiveValue (Value (Char _)) = True
isPrimitiveValue (Value (Bool _)) = True
isPrimitiveValue (Value (Number _ _)) = True
isPrimitiveValue (Value (Float _ _)) = True
isPrimitiveValue _ = False
| beni55/egison | hs-src/Language/Egison/Core.hs | mit | 41,566 | 0 | 28 | 9,847 | 14,759 | 7,192 | 7,567 | -1 | -1 |
{-# htermination intersectFM_C :: (b1 -> b2 -> b3) -> FiniteMap Float b1 -> FiniteMap Float b2 -> FiniteMap Float b3 #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_intersectFM_C_6.hs | mit | 138 | 0 | 3 | 24 | 5 | 3 | 2 | 1 | 0 |
--The sum of the squares of the first ten natural numbers is,
--12 + 22 + ... + 102 = 385
--The square of the sum of the first ten natural numbers is,
--(1 + 2 + ... + 10)2 = 552 = 3025
--Hence the difference between the sum of the squares of the first ten natural numbers and the square of the sum is 3025 − 385 = 2640.
--Find the difference between the sum of the squares of the first one hundred natural numbers and the square of the sum
sumOfSqs :: Int -> Int
sumOfSqs n = quot (n * (n + 1) * ((2 * n) + 1)) 6
sqSums :: Int -> Int
sqSums n = (quot (n * (n + 1)) 2) ^ 2
sqDiff :: Int -> Int
sqDiff n = (sqSums n) - (sumOfSqs n)
| sravan-s/euler | euler-0006/sumSquareDiff.hs | mit | 642 | 0 | 11 | 153 | 137 | 75 | 62 | 6 | 1 |
{-
(**) Decode a run-length encoded list.
Given a run-length code list generated as specified in problem 11. Construct its uncompressed version.
Example in Haskell:
P12> decodeModified
[Multiple 4 'a',Single 'b',Multiple 2 'c',
Multiple 2 'a',Single 'd',Multiple 4 'e']
"aaaabccaadeeee"
-}
data Item a = Multiple Int a | Single a deriving (Show)
decodeModified :: [Item a] -> [a]
decodeModified = concatMap expand
where expand (Multiple n x) = replicate n x
expand (Single x) = [x]
| gaoce/haskell_99 | 12.hs | mit | 534 | 0 | 9 | 129 | 92 | 49 | 43 | 5 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
module JSaddleHello ( main ) where
import Data.Monoid ((<>))
import Control.Monad (forever)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (takeMVar, putMVar, newEmptyMVar)
import Control.Lens ((^.))
import Language.Javascript.JSaddle
(jsg, jsg3, js, js1, jss, fun, valToNumber, syncPoint,
nextAnimationFrame, runJSM, askJSM, global)
main = do
doc <- jsg "document"
doc ^. js "body" ^. jss "innerHTML" "<h1>Kia ora (Hi)</h1>"
-- Create a haskell function call back for the onclick event
doc ^. jss "onclick" (fun $ \ _ _ [e] -> do
x <- e ^. js "clientX" >>= valToNumber
y <- e ^. js "clientY" >>= valToNumber
newParagraph <- doc ^. js1 "createElement" "p"
newParagraph ^. js1 "appendChild" (
doc ^. js1 "createTextNode" ("Click " ++ show (x, y)))
doc ^. js "body" ^. js1 "appendChild" newParagraph
return ())
-- Make an exit button
exitMVar <- liftIO newEmptyMVar
exit <- doc ^. js1 "createElement" "span"
exit ^. js1 "appendChild" (
doc ^. js1 "createTextNode" "Click here to exit")
doc ^. js "body" ^. js1 "appendChild" exit
exit ^. jss "onclick" (fun $ \ _ _ _ -> liftIO $ putMVar exitMVar ())
-- Force all all the lazy evaluation to be executed
syncPoint
-- Animate the color of the exit button
ctx <- askJSM
liftIO . forkIO . forever $
(`runJSM` ctx) . nextAnimationFrame $ \ t -> do
let n = floor ((sin (3 * t) + 1) * 128)
(h1, h2) = n `divMod` 16
hexDigits = ['0'..'9'] <> ['A'..'F']
exit ^. js "style" ^. jss "color" ("#0000" <> [hexDigits !! h1, hexDigits !! h2])
return ()
-- In GHC compiled version the WebSocket connection will end when this
-- thread ends. So we will wait until the user clicks exit.
liftIO $ takeMVar exitMVar
doc ^. js "body" ^. jss "innerHTML" "<h1>Ka kite ano (See you later)</h1>"
return ()
| ghcjs/jsaddle-hello | src/JSaddleHello.hs | mit | 2,076 | 0 | 22 | 555 | 631 | 327 | 304 | 40 | 1 |
{-# LANGUAGE FlexibleContexts, NoImplicitPrelude, DeriveTraversable, NamedFieldPuns, DerivingVia, DeriveGeneric #-}
module Conflict
( Conflict(..), Sides(..), LineNo
, setEachBody, setStrings
, pretty, prettyLines
, parse
) where
import Control.Monad.State (MonadState, state, evalStateT)
import Control.Monad.Writer (runWriter, tell)
import Data.Functor.Identity (Identity(..))
import Data.Maybe (fromMaybe)
import Generic.Data (Generically1(..))
import GHC.Generics (Generic1)
import Prelude.Compat
type LineNo = Int
data Sides a = Sides
{ sideA :: a
, sideBase :: a
, sideB :: a
} deriving (Functor, Foldable, Traversable, Show, Eq, Ord, Generic1)
deriving Applicative via Generically1 Sides
data Conflict = Conflict
{ cMarkers :: Sides (LineNo, String) -- The markers at the beginning of sections
, cMarkerEnd :: (LineNo, String) -- The ">>>>>>>...." marker at the end of the conflict
, cBodies :: Sides [String]
} deriving (Show)
-- traversal
bodies :: Applicative f => (Sides [String] -> f (Sides [String])) -> Conflict -> f Conflict
bodies f c@Conflict{cBodies} = (\x -> c{cBodies = x}) <$> f cBodies
-- setter:
setBodies :: (Sides [String] -> Sides [String]) -> Conflict -> Conflict
setBodies f = runIdentity . bodies (Identity . f)
setEachBody :: ([String] -> [String]) -> Conflict -> Conflict
setEachBody = setBodies . fmap
setStrings :: (String -> String) -> Conflict -> Conflict
setStrings = setEachBody . map
prettyLines :: Conflict -> [String]
prettyLines Conflict{cMarkers, cMarkerEnd, cBodies} =
concat ((:) <$> (snd <$> cMarkers) <*> cBodies) <> [snd cMarkerEnd]
pretty :: Conflict -> String
pretty = unlines . prettyLines
breakUpToMarker ::
MonadState [(LineNo, String)] m =>
Char -> Maybe Int -> m [(LineNo, String)]
breakUpToMarker c mCount =
state (break cond)
where
count = fromMaybe 7 mCount
prefix = replicate count c
cond (_, line) =
pre == prefix && rightCount
where
(pre, post) = splitAt count line
rightCount =
case (mCount, post) of
(Just{}, x:_) -> c /= x
_ -> True
readHead :: MonadState [a] m => m (Maybe a)
readHead = state f
where
f [] = (Nothing, [])
f (l:ls) = (Just l, ls)
tryReadUpToMarker ::
MonadState [(LineNo, String)] m =>
Char -> Maybe Int -> m ([(LineNo, String)], Maybe (LineNo, String))
tryReadUpToMarker c mCount =
(,) <$> breakUpToMarker c mCount <*> readHead
readUpToMarker ::
MonadState [(LineNo, String)] m =>
Char -> Maybe Int -> m ([(LineNo, String)], (LineNo, String))
readUpToMarker c mCount = do
res <- tryReadUpToMarker c mCount
case res of
(ls, Just h) -> pure (ls, h)
(ls, Nothing) ->
error $ concat
[ "Parse error: failed reading up to marker: "
, show c, ", got:"
, concatMap (\(l,s) -> "\n" ++ show l ++ "\t" ++ s) $ take 5 ls
]
parseConflict :: MonadState [(LineNo, String)] m => (LineNo, String) -> m Conflict
parseConflict markerA =
do (linesA , markerBase) <- readUpToMarker '|' markerCount
(linesBase, markerB) <- readUpToMarker '=' markerCount
(linesB , markerEnd) <- readUpToMarker '>' markerCount
pure Conflict
{ cMarkers = Sides markerA markerBase markerB
, cMarkerEnd = markerEnd
, cBodies = fmap snd <$> Sides linesA linesBase linesB
}
where
markerCount = Just (length (takeWhile (== '<') (snd markerA)))
parseFromNumberedLines :: [(LineNo, String)] -> [Either String Conflict]
parseFromNumberedLines =
snd . runWriter . evalStateT loop
where
loop =
do (ls, mMarkerA) <- tryReadUpToMarker '<' Nothing
tell $ map (Left . snd) ls
case mMarkerA of
Nothing -> pure ()
Just markerA ->
do tell . pure . Right =<< parseConflict markerA
loop
parse :: String -> [Either String Conflict]
parse = parseFromNumberedLines . zip [1..] . lines
| Peaker/git-mediate | src/Conflict.hs | gpl-2.0 | 4,233 | 0 | 20 | 1,195 | 1,422 | 775 | 647 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Sites.AmyaChronicles
( amyaChronicles
) where
import Network.HTTP.Types.URI (decodePathSegments)
import Data.Maybe (maybeToList, listToMaybe)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Data.Text.Encoding.Error (lenientDecode)
import qualified Data.ByteString.UTF8 as US
-- Taggy
import Control.Lens (only,(^?), folded, _Just, (^..), (^.))
import Text.Taggy.Lens hiding (name)
-- Parser
import Text.Parsec
import Data.Functor.Identity (Identity)
import Control.Applicative (liftA)
-- Local imports
import Types
import Sites.Util (toPipeline)
--
-- Amya Chronicles - Testing Taggy parsing
--
amyaChronicles = Comic
{ comicName = "Amya Chronicles"
, seedPage = "http://www.amyachronicles.com/archives/comic/09292009"
, seedCache = Always
, pageParse = toPipeline amyaChroniclesPageParse
, cookies = []
}
amyaChroniclesPageParse :: ReplyType -> IO [FetchType]
amyaChroniclesPageParse (WebpageReply pg) = do
let text = foldl (\c (a, b) -> TL.replace a b c) (TLE.decodeUtf8With lenientDecode pg)
[ (">>", ">>")
, ("<<", "^lt;<")
, ("< ", "< ")
-- , (" >", " >") - Breaks the image match
]
-- Next Page Link
-- TODO: make it so it can match on only 'comic-nav-next'
-- TODO: make it so that it only matches "class" attr
let next = text ^? html . allAttributed (folded . only "comic-nav-base comic-nav-next") . attr "href" . _Just
-- Page Name
let name = text ^. html . allAttributed (folded . only "post-title") . contents
print name
-- Search for double page image link
-- TODO: make it actually verify that its a link to
-- "THIS IS A DOUBLE PAGE SPREAD, CLICK HERE FOR FULL IMAGE!"
let img = case (listToMaybe $ filter (T.isSuffixOf "jpg") (text ^.. html . allNamed (only "strong") . elements . attr "href" . _Just)) of
Nothing -> text ^. html . allAttributed (folded . only "comic") . allNamed (only "img") . attr "src" . _Just
Just x -> x
-- Parse the title and create the tag.
case parseTitle name img of
Left _ -> return $ map (\url -> Webpage (T.unpack url) Always) $ maybeToList next
Right x -> return $ [Image (T.unpack img) x] ++ (map (\url -> Webpage (T.unpack url) Always) $ maybeToList next)
-- Fetching next page
parseTitle name url = runParser (titles url) () "" name
titles :: T.Text -> ParsecT T.Text u Identity ComicTag
titles url = choice
[ try (do
chp <- numParse
_ <- char '.'
_ <- numParse -- pg
eof
return $ mainStory chp url)
, try (do
chp <- numParse
_ <- char '.'
_ <- numParse -- pg
_ <- space
_ <- char '–'
_ <- space
_ <- numParse -- chp'
_ <- char '.'
_ <- numParse -- pg'
eof
return $ mainStory chp url)
, try (do
name <- wordParse
_ <- space
_ <- numParse
eof
return $ shortStory name url)
, (return $ artStory url)
]
numParse :: ParsecT T.Text u Identity Integer
numParse = liftA read (many1 digit)
wordParse :: ParsecT T.Text u Identity T.Text
wordParse = liftA T.pack (many1 letter)
-- Comic Tag
mainStory chp url = ComicTag "amya_chronicles" Nothing Nothing (Just $ UnitTag [StandAlone $ Digit chp Nothing Nothing Nothing] Nothing) (Just $ last $ decodePathSegments $ US.fromString $ T.unpack url)
shortStory storyName url = ComicTag "amya_chronicles" (Just storyName) Nothing Nothing (Just $ last $ decodePathSegments $ US.fromString $ T.unpack url)
artStory url = ComicTag "amya_chronicles" (Just "artwork") Nothing Nothing (Just $ last $ decodePathSegments $ US.fromString $ T.unpack url)
| pharaun/hComicFetcher | src/Sites/AmyaChronicles.hs | gpl-3.0 | 3,869 | 0 | 21 | 970 | 1,178 | 617 | 561 | -1 | -1 |
module Main where
import GrammarOfZones
import DistanceTables
import R
import Piece
import Board
import qualified Data.Vector as V
import Control.Monad
main :: IO ()
main = do
{--
let s_color = White
let s_rank = Bishop
let start = (3,2)
let destination = (4,5)
let subject = makeChessPiece s_color s_rank start
let target = makeChessPiece Black Pawn destination
let adversary1 = makeChessPiece Black King (7,7)
let adversary2 = makeChessPiece Black Knight (2,7)
let ally1 = makeChessPiece White King (7,1)
let ally2 = makeChessPiece White Pawn (6,3)
let ally3 = makeChessPiece White Pawn (5,5)
let pieces = [subject, target, adversary1, adversary2, ally1, ally2, ally3]
let obsticals = map location $ filter (\x -> x /= subject && x /= target) pieces
let mainTrajectory = [(3,2),(5,4),(4,5)]
print mainTrajectory
let zone = generateChessZoneM2 pieces subject target mainTrajectory
zone >>= return.zoneToString >>= putStrLn
--}
let pieces = [ (makeChessPiece Black Pawn (1,5)),
(makeChessPiece Black Target (6,8)),
(makeChessPiece White Target (1,1)),
(makeChessPiece White Pawn (6,6)),
(makeChessPiece White King (1,8)),
(makeChessPiece Black King (8,6))]
--}
{--
let pieces = [ (makeChessPiece White Bishop (3,2)),
(makeChessPiece Black Pawn (4,5)),
(makeChessPiece Black King (7,7)),
(makeChessPiece Black Knight (2,7)),
(makeChessPiece White King (7,1)),
(makeChessPiece White Pawn (6,3)),
(makeChessPiece White Pawn (5,5))]
--}
let teamA = filter (\x -> White == color x) pieces
let teamB = filter (\x -> Black == color x) pieces
let teamAZones = [ generateChessZoneM3 pieces mainPiece targetPiece (generationHelper mainPiece targetPiece pieces) | mainPiece <- teamA , targetPiece <- teamB ]
let teamBZones = [ generateChessZoneM3 pieces mainPiece targetPiece (generationHelper mainPiece targetPiece pieces) | mainPiece <- teamB , targetPiece <- teamA ]
let teamAZonesSignificant = map (liftM $ filter (\(_,_,x) -> x > 1)) teamAZones
let teamBZonesSignificant = map (liftM $ filter (\(_,_,x) -> x > 1)) teamBZones
let allZones = teamAZonesSignificant ++ teamBZonesSignificant
let allTheNames = [ (mainPiece, targetPiece) | mainPiece <- teamA , targetPiece <- teamB] ++ [ (mainPiece, targetPiece) | mainPiece <- teamB , targetPiece <- teamA]
printAllTheZones' allTheNames allZones
-- printAllTheZones allZones
print "bye"
generationHelper :: Piece -> Piece -> [Piece] -> [Location]
generationHelper mainPiece targetPiece allPieces = do
let trajectories = bJT 1 mainPiece (location targetPiece) (map location $ filter (\x -> x/=mainPiece && x/=targetPiece) allPieces)
if trajectories == [] then [location mainPiece]
else head trajectories
printAllTheZones :: (Show a, Show a1) => [IO [(a, [Location], a1)]] -> IO ()
printAllTheZones [] = putStrLn " --- "
printAllTheZones (x:xs) = x >>= return.zoneToString >>= putStrLn >> putStrLn " --- " >> printAllTheZones xs
printAllTheZones' :: (Show a1, Show a2, Show a) =>[a] -> [IO [(a1, [Location], a2)]] -> IO ()
printAllTheZones' [] _ = putStrLn " --- "
printAllTheZones' (n:ns) (x:xs) = print n >> x >>= return.zoneToString >>= putStrLn >> putStrLn " --- " >> printAllTheZones' ns xs
| joshuaunderwood7/HaskeLinGeom | GrammarOfZonesMain.hs | gpl-3.0 | 3,539 | 0 | 18 | 865 | 863 | 455 | 408 | 37 | 2 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Users.ModelSpec where
import Control.Monad (void)
import Control.Monad.Trans (liftIO)
import System.Random
import Test.Hspec
import Test.Hspec.Fn
import Common
import Ctxt
import Users.Model
instance Factory Ctxt User (HspecFn NewUser) where
fields = do rand <- liftIO randomIO
let username = "user" <> tshow rand
let email = username <> "@test.com"
let pass = "pass" <> tshow rand
return $ NewUser username email pass
save fNewUser = do newUser <- fNewUser
Just user <-
eval (\ctxt -> do
void $ createUser ctxt newUser
getUserByUsername ctxt (newUserUsername newUser))
return user
spec :: Spec
spec = fnTests $ do
describe "getUsers" $ do
it "should get all the users" $ do
newUser <- create id
users <- eval getUsers
(newUser `elem` users) `shouldEqual` True
describe "createUser" $ do
it "should create a new user" $ do
void $ eval (\ctxt -> createUser ctxt
(NewUser "new" "new@new.com" "pass"))
[user] <- eval getUsers
userEmail user `shouldEqual` "new@new.com"
describe "getUserByUsername" $ do
it "should get the user with that username" $ do
newUser <- create id
let username = userUsername newUser
Just user <- eval (`getUserByUsername` username)
userEmail user `shouldEqual` userEmail newUser
describe "getUserByEmail" $ do
it "should get the user with that email" $ do
newUser <- create id
let email = userEmail newUser
Just user <- eval (`getUserByEmail` email)
userUsername user `shouldEqual` userUsername newUser
describe "authenticateUser" $ do
it "should get Just the user with that email and password" $ do
void $ eval (\ctxt -> createUser ctxt
(NewUser "new" "new@new.com" "pass"))
Just user <- eval (\ctxt -> authenticateUser ctxt "new" "pass")
userUsername user `shouldEqual` "new"
it "should return Nothing if the email is wrong" $ do
void $ eval (\ctxt -> createUser ctxt
(NewUser "new" "new@new.com" "pass"))
eval (\ctxt -> authenticateUser ctxt "eew" "pass")
>>= shouldEqual Nothing
it "should return Nothing if the password is wrong" $ do
void $ eval (\ctxt -> createUser ctxt
(NewUser "new" "new@new.com" "pass"))
eval (\ctxt -> authenticateUser ctxt "new" "random")
>>= shouldEqual Nothing
| emhoracek/smooch | app/tests/Users/ModelSpec.hs | gpl-3.0 | 2,933 | 0 | 20 | 951 | 757 | 360 | 397 | 68 | 1 |
module Paths_wizzard (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [0,1,0,0], versionTags = []}
bindir, libdir, datadir, libexecdir :: FilePath
bindir = "/home/fayong/prog/Haskell/wizzard/.cabal-sandbox/bin"
libdir = "/home/fayong/prog/Haskell/wizzard/.cabal-sandbox/lib/x86_64-linux-ghc-7.6.3/wizzard-0.1.0.0"
datadir = "/home/fayong/prog/Haskell/wizzard/.cabal-sandbox/share/x86_64-linux-ghc-7.6.3/wizzard-0.1.0.0"
libexecdir = "/home/fayong/prog/Haskell/wizzard/.cabal-sandbox/libexec"
getBinDir, getLibDir, getDataDir, getLibexecDir :: IO FilePath
getBinDir = catchIO (getEnv "wizzard_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "wizzard_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "wizzard_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "wizzard_libexecdir") (\_ -> return libexecdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| eniac314/wizzard | dist/dist-sandbox-de3c8ba2/build/autogen/Paths_wizzard.hs | gpl-3.0 | 1,299 | 0 | 10 | 167 | 332 | 190 | 142 | 26 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Refraction.Discover.Types
( adFinder
, tao
, onionLengthWithoutTLD
, Location
, Nonce
) where
import Data.ByteString (ByteString)
import Data.Text
import Data.Word (Word64)
import Network.Refraction.BitcoinUtils
-- TODO(hudon): make this fee dynamic (set by user in config?)
-- Advertise fee
tao = 10000 :: SatoshiValue
--TODO(hudon): identifier could be unique per pool
adFinder = "RFRCTN14" :: Text
onionLengthWithoutTLD = 16 :: Int
type Location = ByteString
type Nonce = Word64
| hudon/refraction-hs | src/Network/Refraction/Discover/Types.hs | gpl-3.0 | 561 | 0 | 5 | 101 | 96 | 63 | 33 | 16 | 1 |
{-
Copyright (C) 2015-2016 Ramakrishnan Muthukrishnan <ram@rkrishnan.org>
This file is part of FuncTorrent.
FuncTorrent is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
FuncTorrent is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with FuncTorrent; if not, see <http://www.gnu.org/licenses/>
-}
-- | Tests for MagnetURI module
{-# LANGUAGE OverloadedStrings #-}
module MagneturiTests (tests) where
import FuncTorrent.MagnetURI
import Test.Hspec
tests :: IO ()
tests = hspec $ do
describe "MagnetURI starts with magnet:? .. " $ do
it "Valid MagnetURI" $ do
parseMagneturi "magnet:?xt=urn:btih:c12fe1c06bba254a9dc9f519b335aa7c1367a88a"
`shouldBe` (Right (Magnetinfo {infoHash = "urn:btih:c12fe1c06bba254a9dc9f519b335aa7c1367a88a", trackerlist = [], name = "", xlen = Nothing}))
it "Valid MagnetURI with tracker list" $ do
parseMagneturi "magnet:?xt=urn:btih:1f8a4ee3c3f57e81f8f0b4e658177201fc2a3118&dn=Honey+Bee+2+%5B2017%5D+Malayalam+DVDRiP+x264+AAC+700MB+ZippyMovieZ+E&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Fzer0day.ch%3A1337&tr=udp%3A%2F%2Fopen.demonii.com%3A1337&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Fexodus.desync.com%3A6969"
`shouldBe` (Right (Magnetinfo {infoHash = "urn:btih:1f8a4ee3c3f57e81f8f0b4e658177201fc2a3118", trackerlist = [ "udp://tracker.leechers-paradise.org:6969", "udp://zer0day.ch:1337", "udp://open.demonii.com:1337", "udp://tracker.coppersurfer.tk:6969", "udp://exodus.desync.com:6969"], name = "Honey+Bee+2+[2017]+Malayalam+DVDRiP+x264+AAC+700MB+ZippyMovieZ+E", xlen = Nothing}))
it "Valid MagnetURI with tracker list 2" $ do
parseMagneturi "magnet:?xt=urn:btih:88c491dbbcdb0bb1ad142fabddd1436c09f17e7e&dn=CIA+Comrade+in+America+%282017%29+Malayalam+DESI+SCR+x264+AAC+700MB+&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Fzer0day.ch%3A1337&tr=udp%3A%2F%2Fopen.demonii.com%3A1337&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Fexodus.desync.com%3A6969"
`shouldBe` (Right (Magnetinfo {infoHash = "urn:btih:88c491dbbcdb0bb1ad142fabddd1436c09f17e7e", trackerlist = ["udp://tracker.leechers-paradise.org:6969","udp://zer0day.ch:1337","udp://open.demonii.com:1337","udp://tracker.coppersurfer.tk:6969","udp://exodus.desync.com:6969"], name = "CIA+Comrade+in+America+(2017)+Malayalam+DESI+SCR+x264+AAC+700MB+", xlen = Nothing }))
| vu3rdd/functorrent | test/MagneturiTests.hs | gpl-3.0 | 2,813 | 0 | 17 | 286 | 254 | 146 | 108 | 16 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.MEnv
(
MEnv',
module MEnv,
module MEnv.Tick,
module MEnv.Keys,
module MEnv.Screen,
module MEnv.Sound,
module MEnv.Foreign,
module MEnv.Players,
module MEnv.System,
module MEnv.Resource,
module Game.GameData,
) where
import MEnv
import MEnv.Tick
import MEnv.Keys
import MEnv.Screen
import MEnv.Sound
import MEnv.Foreign
import MEnv.Players
import MEnv.System
import MEnv.Resource
import Game.GameData
type MEnv' =
MEnv GameData
| karamellpelle/grid | source/Game/MEnv.hs | gpl-3.0 | 1,252 | 0 | 5 | 251 | 145 | 99 | 46 | 25 | 0 |
module Filter.TreeDeduction (makeTreeDeduction) where
import Text.Pandoc
import Filter.Util (splitIt, intoChunks,formatChunk, unlines')
import Data.Map (fromList, toList, unions)
import Prelude
makeTreeDeduction :: Block -> Block
makeTreeDeduction cb@(CodeBlock (_,classes,extra) contents)
| "TreeDeduction" `elem` classes = Div ("",[],[]) $ map (activate classes extra) $ intoChunks contents
| "TreePlayground" `elem` classes = Div ("",[],[]) [toPlayground classes extra contents]
| otherwise = cb
makeTreeDeduction x = x
activate cls extra chunk
| "propNK" `elem` cls = template (opts [("system","propNK")])
| "propNJ" `elem` cls = template (opts [("system","propNJ")])
| "openLogicNK" `elem` cls = template (opts [("system","openLogicNK")])
| otherwise = template (opts [])
where numof = takeWhile (/= ' ')
(h:t) = formatChunk chunk
propof = dropWhile (== ' ') . dropWhile (/= ' ')
opts adhoc = unions [fromList extra, fromList fixed, fromList adhoc]
fixed = [ ("goal", propof h)
, ("submission", "saveAs:" ++ numof h)
, ("type", "treedeductionchecker")
]
template opts = Div ("",["exercise"],[])
[ Plain
[Span ("",[],[])
[Str (numof h)]
]
, Div ("",[],map (\(x,y) -> ("data-carnap-" ++ x,y)) $ toList opts)
[Plain [Str (unlines' t)]]
]
toPlayground cls extra contents
| "propNK" `elem` cls = template (opts [("system","propNK")])
| "propNJ" `elem` cls = template (opts [("system","propNJ")])
| "openLogicNK" `elem` cls = template (opts [("system","openLogicNK")])
| otherwise = template (opts [])
where numof = takeWhile (/= ' ')
opts adhoc = unions [fromList extra, fromList fixed, fromList adhoc]
fixed = [ ("type", "treedeductionchecker") ]
template opts = Div ("",["exercise"],[])
[ Plain
[Span ("",[],[])
[Str "Playground"]
]
, Div ("",[],map (\(x,y) -> ("data-carnap-" ++ x,y)) $ toList opts)
[Plain [Str (unlines' $ formatChunk contents)]]
]
| opentower/carnap | Carnap-Server/Filter/TreeDeduction.hs | gpl-3.0 | 2,504 | 0 | 16 | 910 | 910 | 493 | 417 | 43 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer.Accounts.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing account. This method supports patch semantics.
--
-- /See:/ <https://developers.google.com/ad-exchange/buyer-rest Ad Exchange Buyer API Reference> for @adexchangebuyer.accounts.patch@.
module Network.Google.Resource.AdExchangeBuyer.Accounts.Patch
(
-- * REST Resource
AccountsPatchResource
-- * Creating a Request
, accountsPatch
, AccountsPatch
-- * Request Lenses
, apPayload
, apConfirmUnsafeAccountChange
, apId
) where
import Network.Google.AdExchangeBuyer.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer.accounts.patch@ method which the
-- 'AccountsPatch' request conforms to.
type AccountsPatchResource =
"adexchangebuyer" :>
"v1.4" :>
"accounts" :>
Capture "id" (Textual Int32) :>
QueryParam "confirmUnsafeAccountChange" Bool :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Account :> Patch '[JSON] Account
-- | Updates an existing account. This method supports patch semantics.
--
-- /See:/ 'accountsPatch' smart constructor.
data AccountsPatch = AccountsPatch'
{ _apPayload :: !Account
, _apConfirmUnsafeAccountChange :: !(Maybe Bool)
, _apId :: !(Textual Int32)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AccountsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'apPayload'
--
-- * 'apConfirmUnsafeAccountChange'
--
-- * 'apId'
accountsPatch
:: Account -- ^ 'apPayload'
-> Int32 -- ^ 'apId'
-> AccountsPatch
accountsPatch pApPayload_ pApId_ =
AccountsPatch'
{ _apPayload = pApPayload_
, _apConfirmUnsafeAccountChange = Nothing
, _apId = _Coerce # pApId_
}
-- | Multipart request metadata.
apPayload :: Lens' AccountsPatch Account
apPayload
= lens _apPayload (\ s a -> s{_apPayload = a})
-- | Confirmation for erasing bidder and cookie matching urls.
apConfirmUnsafeAccountChange :: Lens' AccountsPatch (Maybe Bool)
apConfirmUnsafeAccountChange
= lens _apConfirmUnsafeAccountChange
(\ s a -> s{_apConfirmUnsafeAccountChange = a})
-- | The account id
apId :: Lens' AccountsPatch Int32
apId = lens _apId (\ s a -> s{_apId = a}) . _Coerce
instance GoogleRequest AccountsPatch where
type Rs AccountsPatch = Account
type Scopes AccountsPatch =
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient AccountsPatch'{..}
= go _apId _apConfirmUnsafeAccountChange
(Just AltJSON)
_apPayload
adExchangeBuyerService
where go
= buildClient (Proxy :: Proxy AccountsPatchResource)
mempty
| rueshyna/gogol | gogol-adexchange-buyer/gen/Network/Google/Resource/AdExchangeBuyer/Accounts/Patch.hs | mpl-2.0 | 3,633 | 0 | 14 | 849 | 482 | 285 | 197 | 71 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Disks.Resize
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Resizes the specified persistent disk. You can only increase the size of
-- the disk.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.disks.resize@.
module Network.Google.Resource.Compute.Disks.Resize
(
-- * REST Resource
DisksResizeResource
-- * Creating a Request
, disksResize
, DisksResize
-- * Request Lenses
, drRequestId
, drProject
, drDisk
, drZone
, drPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.disks.resize@ method which the
-- 'DisksResize' request conforms to.
type DisksResizeResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"disks" :>
Capture "disk" Text :>
"resize" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] DisksResizeRequest :>
Post '[JSON] Operation
-- | Resizes the specified persistent disk. You can only increase the size of
-- the disk.
--
-- /See:/ 'disksResize' smart constructor.
data DisksResize =
DisksResize'
{ _drRequestId :: !(Maybe Text)
, _drProject :: !Text
, _drDisk :: !Text
, _drZone :: !Text
, _drPayload :: !DisksResizeRequest
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DisksResize' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'drRequestId'
--
-- * 'drProject'
--
-- * 'drDisk'
--
-- * 'drZone'
--
-- * 'drPayload'
disksResize
:: Text -- ^ 'drProject'
-> Text -- ^ 'drDisk'
-> Text -- ^ 'drZone'
-> DisksResizeRequest -- ^ 'drPayload'
-> DisksResize
disksResize pDrProject_ pDrDisk_ pDrZone_ pDrPayload_ =
DisksResize'
{ _drRequestId = Nothing
, _drProject = pDrProject_
, _drDisk = pDrDisk_
, _drZone = pDrZone_
, _drPayload = pDrPayload_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
drRequestId :: Lens' DisksResize (Maybe Text)
drRequestId
= lens _drRequestId (\ s a -> s{_drRequestId = a})
-- | Project ID for this request.
drProject :: Lens' DisksResize Text
drProject
= lens _drProject (\ s a -> s{_drProject = a})
-- | The name of the persistent disk.
drDisk :: Lens' DisksResize Text
drDisk = lens _drDisk (\ s a -> s{_drDisk = a})
-- | The name of the zone for this request.
drZone :: Lens' DisksResize Text
drZone = lens _drZone (\ s a -> s{_drZone = a})
-- | Multipart request metadata.
drPayload :: Lens' DisksResize DisksResizeRequest
drPayload
= lens _drPayload (\ s a -> s{_drPayload = a})
instance GoogleRequest DisksResize where
type Rs DisksResize = Operation
type Scopes DisksResize =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient DisksResize'{..}
= go _drProject _drZone _drDisk _drRequestId
(Just AltJSON)
_drPayload
computeService
where go
= buildClient (Proxy :: Proxy DisksResizeResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Disks/Resize.hs | mpl-2.0 | 4,823 | 0 | 19 | 1,207 | 638 | 380 | 258 | 95 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.ConsentStores.SetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sets the access control policy on the specified resource. Replaces any
-- existing policy. Can return \`NOT_FOUND\`, \`INVALID_ARGUMENT\`, and
-- \`PERMISSION_DENIED\` errors.
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.consentStores.setIamPolicy@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.ConsentStores.SetIAMPolicy
(
-- * REST Resource
ProjectsLocationsDataSetsConsentStoresSetIAMPolicyResource
-- * Creating a Request
, projectsLocationsDataSetsConsentStoresSetIAMPolicy
, ProjectsLocationsDataSetsConsentStoresSetIAMPolicy
-- * Request Lenses
, pldscssipXgafv
, pldscssipUploadProtocol
, pldscssipAccessToken
, pldscssipUploadType
, pldscssipPayload
, pldscssipResource
, pldscssipCallback
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.consentStores.setIamPolicy@ method which the
-- 'ProjectsLocationsDataSetsConsentStoresSetIAMPolicy' request conforms to.
type ProjectsLocationsDataSetsConsentStoresSetIAMPolicyResource
=
"v1" :>
CaptureMode "resource" "setIamPolicy" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] SetIAMPolicyRequest :>
Post '[JSON] Policy
-- | Sets the access control policy on the specified resource. Replaces any
-- existing policy. Can return \`NOT_FOUND\`, \`INVALID_ARGUMENT\`, and
-- \`PERMISSION_DENIED\` errors.
--
-- /See:/ 'projectsLocationsDataSetsConsentStoresSetIAMPolicy' smart constructor.
data ProjectsLocationsDataSetsConsentStoresSetIAMPolicy =
ProjectsLocationsDataSetsConsentStoresSetIAMPolicy'
{ _pldscssipXgafv :: !(Maybe Xgafv)
, _pldscssipUploadProtocol :: !(Maybe Text)
, _pldscssipAccessToken :: !(Maybe Text)
, _pldscssipUploadType :: !(Maybe Text)
, _pldscssipPayload :: !SetIAMPolicyRequest
, _pldscssipResource :: !Text
, _pldscssipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsConsentStoresSetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldscssipXgafv'
--
-- * 'pldscssipUploadProtocol'
--
-- * 'pldscssipAccessToken'
--
-- * 'pldscssipUploadType'
--
-- * 'pldscssipPayload'
--
-- * 'pldscssipResource'
--
-- * 'pldscssipCallback'
projectsLocationsDataSetsConsentStoresSetIAMPolicy
:: SetIAMPolicyRequest -- ^ 'pldscssipPayload'
-> Text -- ^ 'pldscssipResource'
-> ProjectsLocationsDataSetsConsentStoresSetIAMPolicy
projectsLocationsDataSetsConsentStoresSetIAMPolicy pPldscssipPayload_ pPldscssipResource_ =
ProjectsLocationsDataSetsConsentStoresSetIAMPolicy'
{ _pldscssipXgafv = Nothing
, _pldscssipUploadProtocol = Nothing
, _pldscssipAccessToken = Nothing
, _pldscssipUploadType = Nothing
, _pldscssipPayload = pPldscssipPayload_
, _pldscssipResource = pPldscssipResource_
, _pldscssipCallback = Nothing
}
-- | V1 error format.
pldscssipXgafv :: Lens' ProjectsLocationsDataSetsConsentStoresSetIAMPolicy (Maybe Xgafv)
pldscssipXgafv
= lens _pldscssipXgafv
(\ s a -> s{_pldscssipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldscssipUploadProtocol :: Lens' ProjectsLocationsDataSetsConsentStoresSetIAMPolicy (Maybe Text)
pldscssipUploadProtocol
= lens _pldscssipUploadProtocol
(\ s a -> s{_pldscssipUploadProtocol = a})
-- | OAuth access token.
pldscssipAccessToken :: Lens' ProjectsLocationsDataSetsConsentStoresSetIAMPolicy (Maybe Text)
pldscssipAccessToken
= lens _pldscssipAccessToken
(\ s a -> s{_pldscssipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldscssipUploadType :: Lens' ProjectsLocationsDataSetsConsentStoresSetIAMPolicy (Maybe Text)
pldscssipUploadType
= lens _pldscssipUploadType
(\ s a -> s{_pldscssipUploadType = a})
-- | Multipart request metadata.
pldscssipPayload :: Lens' ProjectsLocationsDataSetsConsentStoresSetIAMPolicy SetIAMPolicyRequest
pldscssipPayload
= lens _pldscssipPayload
(\ s a -> s{_pldscssipPayload = a})
-- | REQUIRED: The resource for which the policy is being specified. See the
-- operation documentation for the appropriate value for this field.
pldscssipResource :: Lens' ProjectsLocationsDataSetsConsentStoresSetIAMPolicy Text
pldscssipResource
= lens _pldscssipResource
(\ s a -> s{_pldscssipResource = a})
-- | JSONP
pldscssipCallback :: Lens' ProjectsLocationsDataSetsConsentStoresSetIAMPolicy (Maybe Text)
pldscssipCallback
= lens _pldscssipCallback
(\ s a -> s{_pldscssipCallback = a})
instance GoogleRequest
ProjectsLocationsDataSetsConsentStoresSetIAMPolicy
where
type Rs
ProjectsLocationsDataSetsConsentStoresSetIAMPolicy
= Policy
type Scopes
ProjectsLocationsDataSetsConsentStoresSetIAMPolicy
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsConsentStoresSetIAMPolicy'{..}
= go _pldscssipResource _pldscssipXgafv
_pldscssipUploadProtocol
_pldscssipAccessToken
_pldscssipUploadType
_pldscssipCallback
(Just AltJSON)
_pldscssipPayload
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsConsentStoresSetIAMPolicyResource)
mempty
| brendanhay/gogol | gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/ConsentStores/SetIAMPolicy.hs | mpl-2.0 | 6,857 | 0 | 16 | 1,368 | 784 | 460 | 324 | 126 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
-- Module : Properties.Series
-- Copyright : (c) 2012 Brendan Hay <brendan@soundcloud.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan@soundcloud.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Properties.Series (
seriesProperties
) where
import Blaze.ByteString.Builder
import Control.Applicative ((<$>))
import Data.Maybe
import Numbers.Types
import Numbers.Whisper.Series
import Properties.Generators
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit
import Test.QuickCheck
seriesProperties :: Test.Framework.Test
seriesProperties = testGroup "time series"
[ testGroup "create" [
testProperty "input resolution used by create" prop_input_resolution_used_by_create
, testProperty "input step used by create" prop_input_step_used_by_create
, testProperty "input step used by create" prop_input_step_used_by_create
, testProperty "end is less than or equal to create time" prop_end_less_than_or_equal_to_create_time
, testProperty "end is within step of create time" prop_end_within_step_of_create_time
, testProperty "last value equals create value" prop_last_value_equals_create_value
, testProperty "total values equals create value" prop_total_values_equals_create_value
]
, testGroup "update" [
testProperty "resolution preserved by update" prop_resolution_preserved_by_update
, testProperty "step preserved by update" prop_step_preserved_by_update
, testProperty "old values ignored by update" prop_old_values_ignored_by_update
, testProperty "later times move end along" prop_later_times_move_end_along_in_update
, testProperty "new ends are less than or equal to update time" prop_new_end_less_than_or_equal_to_update_time
, testProperty "new ends are within step of create time" prop_new_end_within_step_of_create_time
, testProperty "for new ends last value equals update value" prop_new_end_last_value_equals_update_value
, testProperty "update between start and end adds value" prop_update_between_start_and_end_adds_value
]
, testGroup "fetch" [
testProperty "fetching from start to end is series identity" prop_fetch_start_to_end_is_series_identity
, testProperty "fetching preserves the resolution" prop_fetch_preserves_resolution
, testProperty "fetching preserves the step" prop_fetch_preserves_step
, testProperty "fetched values are less than or equal to original" prop_fetch_values_less_than_or_equal_to_original
]
, testGroup "series" [
testProperty "end is divisible by step" prop_end_divisible_by_step
, testProperty "start - end diff equals resolution * step" prop_start_end_equals_resolution_times_step
, testProperty "values length equals resolution" prop_values_length_equals_resolution
, testProperty "orders values by their insertion time" prop_ordered_by_insertion_time
]
, testGroup "examples" [
testCase "a worked example of a create" test_example_create
, testCase "a worked example of an update" test_example_update
, testCase "a worked example of a fetch" test_example_fetch
]
]
prop_input_resolution_used_by_create :: SeriesCreate -> Bool
prop_input_resolution_used_by_create sc =
createInputRes sc == createOutputRes sc
prop_input_step_used_by_create :: SeriesCreate -> Bool
prop_input_step_used_by_create SeriesCreate{..} =
createInputStep == createOutputStep
prop_end_less_than_or_equal_to_create_time :: SeriesCreate -> Bool
prop_end_less_than_or_equal_to_create_time SeriesCreate{..} =
fromIntegral createOutputEnd <= (fromIntegral createInputTime :: Int)
prop_end_within_step_of_create_time :: SeriesCreate -> Bool
prop_end_within_step_of_create_time SeriesCreate{..} =
fromIntegral createOutputEnd > (fromIntegral createInputTime - createInputStep :: Int)
prop_last_value_equals_create_value :: SeriesCreate -> Bool
prop_last_value_equals_create_value SeriesCreate{..} =
createInputVal == fromJust (last createOutputValues)
prop_total_values_equals_create_value :: SeriesCreate -> Bool
prop_total_values_equals_create_value SeriesCreate{..} =
createInputVal == sum (catMaybes createOutputValues)
prop_resolution_preserved_by_update :: SeriesUpdate -> Bool
prop_resolution_preserved_by_update SeriesUpdate{..} =
updateInputRes == updateOutputRes
prop_step_preserved_by_update :: SeriesUpdate -> Bool
prop_step_preserved_by_update SeriesUpdate{..} =
updateInputStep == updateOutputStep
prop_old_values_ignored_by_update :: SeriesUpdate -> Property
prop_old_values_ignored_by_update su@SeriesUpdate{..} =
isUpdateBeforeStart su ==> updateInputSeries == updateOutputSeries
prop_later_times_move_end_along_in_update :: SeriesUpdate -> Property
prop_later_times_move_end_along_in_update su@SeriesUpdate{..} =
isUpdateAfterEnd su ==> updateInputEnd < updateOutputEnd
prop_new_end_less_than_or_equal_to_update_time :: SeriesUpdate -> Property
prop_new_end_less_than_or_equal_to_update_time su@SeriesUpdate{..} =
isUpdateAfterEnd su ==> fromIntegral updateOutputEnd <= (fromIntegral updateInputTime :: Int)
prop_new_end_within_step_of_create_time :: SeriesUpdate -> Property
prop_new_end_within_step_of_create_time su@SeriesUpdate{..} =
isUpdateAfterEnd su ==> fromIntegral updateOutputEnd > (fromIntegral updateInputTime - updateInputStep :: Int)
prop_new_end_last_value_equals_update_value :: SeriesUpdate -> Property
prop_new_end_last_value_equals_update_value su@SeriesUpdate{..} =
isUpdateAfterEnd su ==> updateInputVal == fromJust (last updateOutputValues)
prop_update_between_start_and_end_adds_value :: SeriesUpdate -> Property
prop_update_between_start_and_end_adds_value su@SeriesUpdate{..} =
isUpdateBetweenStartAndEnd su ==> prettyClose (sum (catMaybes updateInputValues) + updateInputVal) (sum (catMaybes updateOutputValues))
prop_fetch_start_to_end_is_series_identity :: Series -> Bool
prop_fetch_start_to_end_is_series_identity series =
series == fetch (Time . fromIntegral $ start series) (Time . fromIntegral $ end series) series
prop_fetch_preserves_resolution :: SeriesFetch -> Bool
prop_fetch_preserves_resolution SeriesFetch{..} =
fetchInputRes == fetchOutputRes
prop_fetch_preserves_step :: SeriesFetch -> Bool
prop_fetch_preserves_step SeriesFetch{..} =
fetchInputStep == fetchOutputStep
prop_fetch_values_less_than_or_equal_to_original :: SeriesFetch -> Bool
prop_fetch_values_less_than_or_equal_to_original SeriesFetch{..} =
sum (catMaybes fetchOutputValues) <= sum (catMaybes fetchInputValues)
prop_end_divisible_by_step :: Series -> Bool
prop_end_divisible_by_step series =
0 == fromIntegral (end series) `mod` step series
prop_start_end_equals_resolution_times_step :: Series -> Bool
prop_start_end_equals_resolution_times_step series =
fromIntegral (end series - start series) == (resolution series * step series)
prop_values_length_equals_resolution :: Series -> Bool
prop_values_length_equals_resolution series =
resolution series == length (values series)
prop_ordered_by_insertion_time :: Series -> Property
prop_ordered_by_insertion_time series =
forAll (vector $ resolution series) $ \xs ->
(map Just xs) == values (foldl upd series xs)
where
upd s v = update (incr s) v s
incr s = fromIntegral (end s) + fromIntegral (step s)
test_example_create :: Assertion
test_example_create = do
let series = create 5 10 (Time 50000) 3.4
assertEqual "resolution" 5 (resolution series)
assertEqual "step" 10 (step series)
assertEqual "end" (I 50000) (end series)
assertEqual "start" (I 49950) (start series)
assertEqual "values"
[Nothing, Nothing, Nothing, Nothing, Just 3.4]
(values series)
assertEqual "build"
"49950,50000,10|None,None,None,None,3.4"
(toByteString $ build series)
test_example_update :: Assertion
test_example_update = do
let series = update 50010 4.5 $ create 5 10 (Time 50000) 3.4
assertEqual "resolution" 5 (resolution series)
assertEqual "step" 10 (step series)
assertEqual "end" (I 50010) (end series)
assertEqual "start" (I 49960) (start series)
assertEqual "values"
[Nothing, Nothing, Nothing, Just 3.4, Just 4.5]
(values series)
assertEqual "build"
"49960,50010,10|None,None,None,3.4,4.5"
(toByteString $ build series)
test_example_fetch :: Assertion
test_example_fetch = do
let series = fetch (Time 49950) (Time 50020)
. update 50010 4.5 $ create 5 10 (Time 50000) 3.4
assertEqual "resolution" 5 (resolution series)
assertEqual "step" 10 (step series)
assertEqual "end" (I 50020) (end series)
assertEqual "start" (I 49970) (start series)
assertEqual "values"
[Nothing, Nothing, Just 3.4, Just 4.5, Nothing]
(values series)
assertEqual "build"
"49970,50020,10|None,None,3.4,4.5,None"
(toByteString $ build series)
data SeriesCreate = SeriesCreate {
createInputRes :: Resolution
, createInputStep :: Step
, createInputTime :: Time
, createInputVal :: Double
, createOutputSeries :: Series
, createOutputRes :: Resolution
, createOutputStep :: Step
, createOutputStart :: Interval
, createOutputEnd :: Interval
, createOutputValues :: [Maybe Double]
} deriving Show
instance Arbitrary SeriesCreate where
arbitrary = do
r <- choose (1, maxResolution)
s <- choose (1, 1000)
t <- arbitrary
NonNegative v <- arbitrary
let series = create r s t v
return SeriesCreate {
createInputRes = r
, createInputStep = s
, createInputTime = t
, createInputVal = v
, createOutputSeries = series
, createOutputRes = resolution series
, createOutputStep = step series
, createOutputStart = start series
, createOutputEnd = end series
, createOutputValues = values series
}
data SeriesUpdate = SeriesUpdate {
updateInputTime :: Time
, updateInputVal :: Double
, updateInputSeries :: Series
, updateInputRes :: Resolution
, updateInputStep :: Step
, updateInputStart :: Interval
, updateInputEnd :: Interval
, updateInputValues :: [Maybe Double]
, updateOutputSeries :: Series
, updateOutputRes :: Resolution
, updateOutputStep :: Step
, updateOutputStart :: Interval
, updateOutputEnd :: Interval
, updateOutputValues :: [Maybe Double]
} deriving Show
isUpdateBeforeStart :: SeriesUpdate -> Bool
isUpdateBeforeStart SeriesUpdate{..} =
(fromIntegral updateInputTime :: Int) < fromIntegral updateInputStart + updateInputStep
isUpdateAfterEnd :: SeriesUpdate -> Bool
isUpdateAfterEnd SeriesUpdate{..} =
(fromIntegral updateInputTime :: Int) >= fromIntegral updateInputEnd + updateInputStep
isUpdateBetweenStartAndEnd :: SeriesUpdate -> Bool
isUpdateBetweenStartAndEnd su =
not (isUpdateBeforeStart su) && not (isUpdateAfterEnd su)
instance Arbitrary SeriesUpdate where
arbitrary = do
s <- arbitrary
NonNegative t <- arbitrary
NonNegative v <- arbitrary
let series = update t v s
return SeriesUpdate {
updateInputTime = t
, updateInputVal = v
, updateInputSeries = s
, updateInputRes = resolution s
, updateInputStep = step s
, updateInputStart = start s
, updateInputEnd = end s
, updateInputValues = values s
, updateOutputSeries = series
, updateOutputRes = resolution s
, updateOutputStep = step series
, updateOutputStart = start series
, updateOutputEnd = end series
, updateOutputValues = values series
}
data SeriesFetch = SeriesFetch {
fetchInputFrom :: Time
, fetchInputTo :: Time
, fetchInputSeries :: Series
, fetchInputRes :: Resolution
, fetchInputStep :: Step
, fetchInputStart :: Interval
, fetchInputEnd :: Interval
, fetchInputValues :: [Maybe Double]
, fetchOutputSeries :: Series
, fetchOutputRes :: Resolution
, fetchOutputStep :: Step
, fetchOutputStart :: Interval
, fetchOutputEnd :: Interval
, fetchOutputValues :: [Maybe Double]
} deriving Show
instance Arbitrary SeriesFetch where
arbitrary = do
s <- arbitrary
NonNegative f <- arbitrary
NonNegative t <- arbitrary
let series = fetch f t s
return SeriesFetch {
fetchInputFrom = f
, fetchInputTo = t
, fetchInputSeries = s
, fetchInputRes = resolution s
, fetchInputStep = step s
, fetchInputStart = start s
, fetchInputEnd = end s
, fetchInputValues = values s
, fetchOutputSeries = series
, fetchOutputRes = resolution s
, fetchOutputStep = step series
, fetchOutputStart = start series
, fetchOutputEnd = end series
, fetchOutputValues = values series
}
instance Arbitrary Series where
arbitrary = do
s <- createOutputSeries <$> arbitrary
actions <- arbitrary
return $ foldl applyAction s actions
where
applyAction :: Series -> Either (NonNegative Time, NonNegative Time) (NonNegative Time, NonNegative Double) -> Series
applyAction s (Left (NonNegative f, NonNegative t)) = fetch f t s
applyAction s (Right (NonNegative t, NonNegative v)) = update t v s
| brendanhay/numbersd | test/Properties/Series.hs | mpl-2.0 | 13,481 | 0 | 14 | 2,394 | 2,937 | 1,528 | 1,409 | -1 | -1 |
module Git.Command.Cvsimport (run) where
run :: [String] -> IO ()
run args = return () | wereHamster/yag | Git/Command/Cvsimport.hs | unlicense | 87 | 0 | 7 | 15 | 42 | 23 | 19 | 3 | 1 |
{-
Created : 2013 Dec 23 (Mon) 23:15:11 by carr.
Last Modified : 2014 Jul 30 (Wed) 09:07:30 by Harold Carr.
TODO:
- use Shelly to
- start up fuseki
- run test program
- CLI with flags instead of order (and to enable optional arguments)
- optional: dbAddress; multiple users, groups, permissions in one CLI command
- factor RDF utilities
- use Lens
- threepenny-gui
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Interview where
import Control.Monad (unless)
import Data.RDF.Types (LValue (..), Node (..))
import Data.String (IsString)
import Data.String.Utils (replace)
import Data.Text as T (Text, pack)
import Database.HSparql.Connection
import Database.HSparql.QueryGenerator
import System.Environment (getArgs)
------------------------------------------------------------------------------
-- CLI
main :: IO ()
main = do
initializeDB
av <- getArgs
act av
act :: [String] -> IO ()
act ("new-user" : uEmail : []) = newUserCLI uEmail
act ("add-user-to-group" : uEmail : gName :[]) = addUserToGroupCLI uEmail gName
act ("rm-user-from-group" : uEmail : gName :[]) = putStrLn $ "TODO: rmUserFromGroup " ++ uEmail ++ " " ++ gName
act ("list-user-groups" : uEmail : []) = putStrLn $ "TODO: list-user-groups " ++ uEmail
act ("delete-user" : uEmail : []) = putStrLn $ "TODO: delete-user " ++ uEmail
act ("list-users" : []) = listUsersCLI
act ("new-group" : gName : []) = newGroupCLI gName
act ("add-permission-to-group" : gName : permission : resource:[]) = addPermissionToGroupCLI gName permission resource
act ("rm-permission-from-group" : gName : permission : resource:[]) = putStrLn $ "TODO: rmPermissionFromGroup " ++ gName ++ " " ++ permission ++ " " ++ resource
act ("list-group-permissions" : gName : []) = putStrLn $ "TODO: list-user-groups " ++ gName
act ("delete-group" : gName : []) = putStrLn $ "TODO: delete-group " ++ gName
act ("list-groups" : []) = listGroupsCLI
act x = putStrLn $ "unknown: " ++ show x
newUserCLI :: String -> IO ()
newUserCLI uEmail = do
r <- newUser uEmail
print r
listUsersCLI :: IO ()
listUsersCLI = do
r <- listUsers
print r
newGroupCLI :: String -> IO ()
newGroupCLI gName = do
r <- newGroup gName
print r
addUserToGroupCLI :: String -> String -> IO ()
addUserToGroupCLI uEmail gName = do
r <- addUserToGroup uEmail gName
print r
addPermissionToGroupCLI :: String -> String -> String -> IO ()
addPermissionToGroupCLI gName permission resource = do
r <- addPermissionToGroup gName permission resource
print r
listGroupsCLI :: IO ()
listGroupsCLI = do
r <- listGroups
banner "group"
case r of
Just g -> mapM_ format g
Nothing -> putStrLn "no groups"
where
format (_:n:[]) = (\(Bound v) -> print $ getValue v) n
getValue :: Node -> Text
getValue (Data.RDF.Types.LNode (Data.RDF.Types.PlainL x)) = x
banner :: String -> IO ()
banner x = do
putStrLn "--------------------------------------------------"
putStrLn x
------------------------------------------------------------------------------
-- persistent operations
-- Note: if these were high-frequency/volume operations then would batch calls where possible
newUser :: String -> IO Bool
newUser uEmail = do
update uid isA user
updateL uid emailAddress uEmail
where
uid = ohcP $ emailAddressToId uEmail
listUsers :: IO (Maybe [[BindingValue]])
listUsers =
queryVTT var isA user
newGroup :: String -> IO Bool
newGroup gName = do
update uid isA group
updateL uid groupName gName
where
uid = ohcP gName
addUserToGroup :: String -> String -> IO Bool
addUserToGroup uEmail gName =
update uid memberOf (ohcP gName)
where
uid = ohcP $ emailAddressToId uEmail
addPermissionToGroup :: String -> String -> String -> IO Bool
addPermissionToGroup gName permission resource =
updateL uid perm resource
where
uid = ohcP gName
perm = pToP permission
listGroups :: IO (Maybe [[BindingValue]])
listGroups =
queryVTTVTV isA group groupName
------------------------------------------------------------------------------
-- DB initialization
isDBAlreadyPopulated :: IO Bool
isDBAlreadyPopulated = sendAsk a
where
a :: Query AskQuery
a = do
_ <- openHcOrgPrefix
i <- initialized
ask <- askTriple i i i
return AskQuery { queryAsk = [ask] }
initializeDB :: IO ()
initializeDB = do
dbAlreadyPopulated <- isDBAlreadyPopulated
unless dbAlreadyPopulated initializeDB'
where
haskellCurryEmail = "haskell.curry@projectdelta.com"
mathGroup = "mathGroup"
mathEmail = "math@projectdelta.com"
scheduling = "https://scheduling.office.projectdelta.com/"
initializeDB' = do
-- make Haskell Curry a user and a member of the math group
newUser haskellCurryEmail
newGroup mathGroup
addUserToGroup haskellCurryEmail mathGroup
-- math group
-- can read and send emails to the math@projectdelta.com mailing list
addPermissionToGroup mathGroup readP mathEmail
addPermissionToGroup mathGroup writeP mathEmail
-- can reserve conference rooms via https://scheduling.office.projectdelta.com/
addPermissionToGroup mathGroup readP scheduling
addPermissionToGroup mathGroup writeP scheduling
-- has read-only access to financial reports at https://reports.finance.projectdelta.com/
addPermissionToGroup mathGroup readP "https://reports.finance.projectdelta.com/"
-- set the initialized marker
update initialized initialized initialized
return ()
------------------------------------------------------------------------------
-- rdf utilities
-- Note: cannot do top-level types because `TermLike` is not exported.
-- update :: (TermLike a, TermLike b, TermLike c) => Query a -> Query b -> Query c -> IO Bool
update s p o = sendUpdate u
where
u = do
_ <- openHcOrgPrefix
s' <- s; p' <- p; o' <- o;
u' <- updateTriple s' p' o'
return UpdateQuery { queryUpdate = [u'] }
-- updateL :: (TermLike a, TermLike b) => Query a -> Query b -> [Char] -> IO Bool
updateL s p o = sendUpdate u
where
u = do
_ <- openHcOrgPrefix
s' <- s; p' <- p
u' <- updateTriple s' p' (T.pack o)
return UpdateQuery { queryUpdate = [u'] }
-- queryVTT :: (TermLike b, c) => Query Variable -> Query b -> Query c -> IO (Maybe [[BindingValue]])
queryVTT s p o = sendQuery q
where
q = do
_ <- openHcOrgPrefix
s' <- s; p' <- p; o' <- o;
triple s' p' o'
return SelectQuery { queryVars = [s'] }
-- queryVTTVTV :: (TermLike a, TermLike b, TermLike c) => Query a -> Query b -> Query c -> IO (Maybe [[BindingValue]])
queryVTTVTV p1 o1 p2 = sendQuery q
where
q = do
_ <- openHcOrgPrefix
uid <- var; p1' <- p1; o1' <- o1
p2' <- p2; o2' <- var
triple uid p1' o1'
triple uid p2' o2'
return SelectQuery { queryVars = [uid,o2'] }
sendUpdate :: Query UpdateQuery -> IO Bool
sendUpdate = updateQuery dbUpdateAddress
sendQuery :: Query SelectQuery -> IO (Maybe [[BindingValue]])
sendQuery = selectQuery dbQueryAddress
sendAsk :: Query AskQuery -> IO Bool
sendAsk = askQuery dbQueryAddress
------------------------------------------------------------------------------
-- misc utilities
emailAddressToId :: (Eq a, IsString [a]) => [a] -> [a]
emailAddressToId = replace "@" "AT"
-- pToP :: String -> Query IRIRef
pToP x | x == readP = readPermission
| x == writeP = writePermission
| otherwise = ohcP x -- TODO: restrict
------------------------------------------------------------------------------
-- constants
dbAddress, dbQueryAddress, dbUpdateAddress, readP, writeP :: String
dbAddress = "http://localhost:3030/ds/"
dbQueryAddress = dbAddress ++ "query"
dbUpdateAddress = dbAddress ++ "update"
readP = "read"
writeP = "write"
-- IRIRef not exported
-- ohcP :: Query IRIRef
ohcP x = do { ohc <- openHcOrgPrefix ; return (ohc .:. T.pack x) }
emailAddress = ohcP "emailAddress"
group = ohcP "group"
groupName = ohcP "groupName"
hasPermissions = ohcP "hasPermissions"
initialized = ohcP "initialized"
isA = ohcP "isA"
memberOf = ohcP "memberOf"
readPermission = ohcP "readPermission"
user = ohcP "user"
writePermission = ohcP "writePermission"
-- openHcOrgPrefix :: Query Prefix
openHcOrgPrefix = prefix "openHcOrg" (iriRef "http://openhc.org/")
------------------------------------------------------------------------------
-- Experiments
qAll :: IO (Maybe [[BindingValue]])
qAll = sendQuery q
where
q = do
s <- var; p <- var; o <- var
triple s p o
return SelectQuery { queryVars = [s, p, o] }
qBook1 :: IO (Maybe [[BindingValue]])
qBook1 = sendQuery q
where
q = do
dc <- prefix "dc" (iriRef "http://purl.org/dc/elements/1.1/")
ex <- prefix "ex" (iriRef "http://example/")
x <- var
triple (ex .:. "book1") (dc .:. "title") (T.pack "A new book")
return SelectQuery { queryVars = [x] }
aBook1 :: IO Bool
aBook1 = sendAsk a
where
a = do
dc <- prefix "dc" (iriRef "http://purl.org/dc/elements/1.1/")
ex <- prefix "ex" (iriRef "http://example/")
ask <- askTriple (ex .:. "book1") (dc .:. "title") (T.pack "A new book")
return AskQuery { queryAsk = [ask] }
-- PREFIX dc: <http://purl.org/dc/elements/1.1/> INSERT DATA { <http://example/book1> dc:title "A new book" ; dc:creator "A.N.Other" . }
uhb :: IO Bool
uhb = sendUpdate u
where
u = do
dc <- prefix "dc" (iriRef "http://purl.org/dc/elements/1.1/")
ex <- prefix "ex" (iriRef "http://example/")
ut1 <- updateTriple (ex .:. "book1") (dc .:. "title") (T.pack "A HASKELL book")
ut2 <- updateTriple (ex .:. "book1") (dc .:. "title") (T.pack "Another HASKELL book")
ut3 <- updateTriple (ex .:. "book1") (dc .:. "title") (T.pack "Yet another HASKELL book")
return UpdateQuery { queryUpdate = [ut1,ut2,ut3] }
-- End of file.
| haroldcarr/learn-haskell-coq-ml-etc | haskell/playpen/interview/zalora/src/Interview.hs | unlicense | 10,914 | 0 | 13 | 3,019 | 2,653 | 1,338 | 1,315 | 201 | 2 |
{-# LANGUAGE DeriveDataTypeable, TypeFamilies, TypeOperators, StandaloneDeriving,
FlexibleContexts #-}
-- Workaround
{-# LANGUAGE CPP, UndecidableInstances #-}
module Main where
import Flow
import Flow.Vector
import Flow.Kernel
import Flow.Builder
import Control.Monad
import Data.Typeable
-- ----------------------------------------------------------------------------
-- --- Functional ---
-- ----------------------------------------------------------------------------
-- Data tags
data Tag -- ^ Initialisation (e.g. FFT plans)
data Vis -- ^ Visibilities (File name to OSKAR / raw visibilities / binned ...)
data UVGrid -- ^ UV grid
data Image -- ^ Image
data GCFs -- ^ A set of GCFs
data CleanResult -- ^ Result of cleaning (e.g. model + residual)
deriving instance Typeable Tag
deriving instance Typeable Vis
deriving instance Typeable UVGrid
deriving instance Typeable Image
deriving instance Typeable GCFs
deriving instance Typeable CleanResult
-- Abstract kernel signatures.
--
-- TODO: The string we use here is somewhat important for keeping them
-- apart - it would be more elegant if we could enforce them to be
-- unique in some other way.
createGrid :: Flow UVGrid
createGrid = flow "create grid"
grid :: Flow Vis -> Flow GCFs -> Flow UVGrid -> Flow UVGrid
grid = flow "grid"
degrid :: Flow UVGrid -> Flow GCFs -> Flow Vis -> Flow Vis
degrid = flow "degrid"
idft :: Flow UVGrid -> Flow Image
idft = flow "idft"
dft :: Flow Image -> Flow UVGrid
dft = flow "dft"
gcf :: Flow Vis -> Flow GCFs
gcf = flow "gcf"
initRes :: Flow Image
initRes = flow "residual init"
psfVis :: Flow Vis -> Flow Vis
psfVis = flow "prepare vis for PSF"
clean :: Flow Image -> Flow Image -> Flow CleanResult
clean = flow "clean"
cleanModel :: Flow CleanResult -> Flow Image
cleanModel = flow "clean/model"
cleanResidual :: Flow CleanResult -> Flow Image
cleanResidual = flow "clean/residual"
imageSum :: Flow Image -> Flow Image
imageSum = flow "image sum"
-- | Compound gridder actor
gridder :: Flow Vis -> Flow GCFs -> Flow Image
gridder vis gcfs = idft (grid vis gcfs createGrid)
-- | Compound degridder actor
degridder :: Flow Image -> Flow Vis -> Flow GCFs -> Flow Vis
degridder img vis gcfs = degrid (dft img) gcfs vis
-- | Compound PSF gridder actor
psfGrid :: Flow Vis -> Flow GCFs -> Flow Image
psfGrid vis gcfs = gridder (psfVis vis) gcfs
-- | Compound cleaning actor
cleaner :: Flow Image -> Flow Image -> (Flow Image, Flow Image)
cleaner dirty psf = (cleanResidual result, cleanModel result)
where result = clean dirty psf
-- | Compound major loop iteration actor
majorIter :: Flow GCFs -> Flow Image
-> (Flow Image, Flow Vis) -> Int
-> (Flow Image, Flow Vis)
majorIter gcfs psf (_res, vis) _i = (res', vis')
where img = gridder vis gcfs
(res', mod') = cleaner img psf
vis' = degridder mod' vis gcfs
-- | Compound major loop actor
majorLoop :: Int -> Flow Vis -> (Flow Image, Flow Vis)
majorLoop n vis
= foldl (majorIter gcfs psf) (initRes, vis) [1..n]
where gcfs = gcf vis
psf = psfGrid vis gcfs
majorLoopSum :: Int -> Flow Vis -> Flow Image
majorLoopSum n vis = imageSum $ fst $ majorLoop n vis
-- ----------------------------------------------------------------------------
-- --- Kernels ---
-- ----------------------------------------------------------------------------
data Config = Config
{ cfgInput :: [(FilePath, Int)]
, cfgOutput :: FilePath
, cfgMajorLoops :: Int
, cfgGrid :: GridPar
}
data GridPar = GridPar
-- Make data representations. Sadly, this can not be *quite* done with
-- deriving yet (#8165). In the meantime, we use preprocessor
#define DATAREPR_INSTANCE(NewRepr, Repr) \
instance DataRepr NewRepr where \
type ReprType NewRepr = ReprType (Repr); \
reprNop (NewRepr r) = reprNop r; \
reprAccess (NewRepr r) = reprAccess r; \
reprCompatible (NewRepr r1) (NewRepr r2) = reprCompatible r1 r2
newtype ImageRepr = ImageRepr (VectorRepr () Image)
deriving (Typeable, Show)
DATAREPR_INSTANCE(ImageRepr, VectorRepr () Image)
newtype UVGridRepr = UVGridRepr (VectorRepr () UVGrid)
deriving (Typeable, Show)
DATAREPR_INSTANCE(UVGridRepr, VectorRepr () UVGrid)
-- By default images and grids are always consumed by the caller, as
-- they are large objects with lots of write operations, and we don't
-- want to duplicate them.
imgRepr :: ImageRepr
imgRepr = ImageRepr $ VectorRepr WriteAccess
uvgRepr :: UVGridRepr
uvgRepr = UVGridRepr $ VectorRepr WriteAccess
-- Plan representation is used by many kernels
planRepr :: VectorRepr () Tag
planRepr = VectorRepr ReadAccess
newtype RawVisRepr = RawVisRepr (VectorRepr () Vis)
deriving (Typeable, Show)
DATAREPR_INSTANCE(RawVisRepr, VectorRepr () Vis)
newtype SortedVisRepr = SortedVisRepr (VectorRepr () Vis)
deriving (Typeable, Show)
DATAREPR_INSTANCE(SortedVisRepr, VectorRepr () Vis)
-- Visibilities generally remain constant
rawVisRepr :: RawVisRepr
rawVisRepr = RawVisRepr $ VectorRepr ReadAccess
visRepr :: SortedVisRepr
visRepr = SortedVisRepr $ VectorRepr ReadAccess
-- GCFs too
gcfsRepr :: VectorRepr () GCFs
gcfsRepr = VectorRepr ReadAccess
dummy :: (DataRepr r, IsReprs rs, IsReprKern (ReprType r) rs)
=> String -> rs -> r -> ReprKernFun (ReprType r) rs
dummy name rs r = mappingKernel name rs r code
where code _ _ = putStrLn name >> return nullVector
halideWrapper :: (DataRepr r, IsReprs rs, IsReprKern (ReprType r) rs)
=> String -> rs -> r -> ReprKernFun (ReprType r) rs
halideWrapper _ = dummy "halide"
cWrapper :: (DataRepr r, IsReprs rs, IsReprKern (ReprType r) rs)
=> String -> rs -> r -> ReprKernFun (ReprType r) rs
cWrapper _ = dummy "c"
oskarReader :: Typeable d => Domain d -> [(FilePath, Int)] -> Kernel Vis
oskarReader d _ = dummy "oskar" Z (RegionRepr d rawVisRepr)
sorter :: Flow Vis -> Kernel Vis
sorter = dummy "sorter" (rawVisRepr :. Z) visRepr
setOnes :: Flow Vis -> Kernel Vis
setOnes = dummy "ones" (visRepr :. Z) visRepr
gcfKernel :: GridPar -> Flow Tag -> Flow Vis -> Kernel GCFs
gcfKernel _ = halideWrapper "gcfs" (planRepr :. visRepr :. Z) gcfsRepr
fftCreatePlans :: GridPar -> Kernel Tag
fftCreatePlans _ = dummy "fftPlans" Z planRepr
fftKern :: GridPar -> Flow Tag -> Flow Image -> Kernel UVGrid
fftKern _ = dummy "fftKern" (planRepr :. imgRepr :. Z) uvgRepr
ifftKern :: GridPar -> Flow Tag -> Flow UVGrid -> Kernel Image
ifftKern _ = dummy "ifftKern" (planRepr :. uvgRepr :. Z) imgRepr
gridInit :: GridPar -> Kernel UVGrid
gridInit _ = dummy "gridInit" Z uvgRepr
gridKernel :: GridPar -> Flow Vis -> Flow GCFs -> Flow UVGrid -> Kernel UVGrid
gridKernel _ = dummy "gridKernel" (visRepr :. gcfsRepr :. uvgRepr :. Z) uvgRepr
psfGridKernel :: GridPar -> Flow Vis -> Flow GCFs -> Flow UVGrid -> Kernel UVGrid
psfGridKernel _ = dummy "psfGridKernel" (visRepr :. gcfsRepr :. uvgRepr :. Z) uvgRepr
degridKernel :: GridPar -> Flow UVGrid -> Flow GCFs -> Flow Vis -> Kernel Vis
degridKernel _ = dummy "degridKernel" (uvgRepr :. gcfsRepr :. visRepr :. Z) visRepr
cleanResRepr :: VectorRepr () CleanResult
cleanResRepr = VectorRepr WriteAccess
cleanKernel :: Flow Image -> Flow Image -> Kernel CleanResult
cleanKernel = halideWrapper "clean" (imgRepr :. imgRepr :. Z) cleanResRepr
splitModel :: Flow CleanResult -> Kernel Image
splitModel = dummy "splitModel" (cleanResRepr :. Z) imgRepr
splitResidual :: Flow CleanResult -> Kernel Image
splitResidual = dummy "splitResidual" (cleanResRepr :. Z) imgRepr
imageSumKernel :: Typeable d => Domain d -> Flow Image -> Kernel Image
imageSumKernel dom = dummy "image summation" (RegionRepr dom imgRepr :. Z) imgRepr
imageWriter :: FilePath -> Flow Image -> Kernel Image
imageWriter _ = dummy "image writer" (imgRepr :. Z) NoRepr
-- ----------------------------------------------------------------------------
-- --- Strategy ---
-- ----------------------------------------------------------------------------
scatterImaging :: Typeable d => Config -> Domain d -> Flow Tag -> Flow Vis
-> Strategy ()
scatterImaging cfg dh tag vis =
implementing (fst $ majorLoop (cfgMajorLoops cfg) vis) $ do
-- Sort visibility data
let addDom :: IsKernelDef kf => kf -> kf
addDom = regionKernel dh
rebind vis $ addDom sorter
-- Initialise FFTs
let gpar = cfgGrid cfg
bind tag $ addDom $ fftCreatePlans gpar
-- Generate GCF
let gcfs = gcf vis
bind gcfs $ addDom $ gcfKernel gpar tag vis
-- Make rules
bindRule idft (addDom $ ifftKern gpar tag)
bindRule dft (addDom $ fftKern gpar tag)
bindRule createGrid (addDom $ gridInit gpar)
bindRule grid (addDom $ gridKernel gpar)
bindRule degrid (addDom $ degridKernel gpar)
bindRule clean (addDom cleanKernel)
bindRule cleanResidual (addDom splitResidual)
bindRule cleanModel (addDom splitModel)
-- PSF. Note that we bind a kernel here that implements *two*
-- abstract kernel nodes!
--let psfg = grid (psfVis vis) gcfs createGrid
--bind psfg (psfGridKernel gpar vis gcfs createGrid)
bindRule (grid . psfVis) (addDom $ psfGridKernel gpar)
calculate $ psfGrid vis gcfs
-- Loop
forM_ [1..cfgMajorLoops cfg-1] $ \i -> do
-- Force grid calculation - we do not want to share this between
-- loop iterations!
calculate createGrid
-- Generate new visibilities
calculate $ snd $ majorLoop i vis
-- Calculate residual of last loop iteration
calculate createGrid
calculate $ fst $ majorLoop (cfgMajorLoops cfg) vis
-- Strategy implements imaging loop for a number of data sets
scatterImagingMain :: Config -> Strategy ()
scatterImagingMain cfg = do
-- Make data set domain
let dataSets = length (cfgInput cfg)
dataSetRepeats = sum $ map snd $ cfgInput cfg
dom <- makeRangeDomain 0 dataSetRepeats
-- Create data flow for visibilities, build abstract data flow to do
-- configured number of major loops over this input data
tag <- uniq (flow "tag")
let vis = flow "vis" tag
-- Split by datasets
let result = majorLoopSum (cfgMajorLoops cfg) vis
ds <- split dom dataSets
distribute ds ParSchedule $ void $ do
-- Split by number of runs.
-- TODO: Number of runs should depend on data set!
rep <- split ds 3
distribute rep SeqSchedule $ void $ do
-- Read in visibilities. The domain handle passed in tells the
-- kernel which of the datasets to load.
bind vis $ oskarReader rep $ cfgInput cfg
-- Implement this data flow
scatterImaging cfg rep tag vis
-- calculate $ fst $ majorLoop (cfgMajorLoops cfg) vis
-- Sum up local images (TODO: accumulate?)
bindRule imageSum (imageSumKernel rep)
calculate result
-- Sum and write out the result
rebind result $ imageWriter (cfgOutput cfg)
-- Strategy implements imaging loop for a number of data sets
scatterSimple :: Config -> Strategy ()
scatterSimple cfg = do
-- Create data flow for visibilities, build abstract data flow to do
-- configured number of major loops over this input data
vis <- uniq (flow "vis")
tag <- uniq (flow "tag")
-- Read in visibilities. The domain handle passed in tells the
-- kernel which of the datasets to load.
dom <- makeRangeDomain 0 1
bind vis $ oskarReader dom $ cfgInput cfg
-- Implement this data flow
scatterImaging cfg dom tag vis
-- Sum and write out the result
bindRule imageSum (imageSumKernel dom)
let result = majorLoopSum (cfgMajorLoops cfg) vis
rebind result (imageWriter (cfgOutput cfg))
testStrat :: Strategy ()
testStrat = scatterImagingMain $ Config
[("input.vis", 3),
("input2.vis", 3),
("input3.vis", 3)]
"output.img"
1
GridPar
main :: IO ()
main = dumpSteps testStrat >> execStrategy testStrat
| SKA-ScienceDataProcessor/RC | MS5/programs/imaging.hs | apache-2.0 | 11,853 | 0 | 15 | 2,414 | 3,174 | 1,574 | 1,600 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Session where
import Control.Applicative
import Control.Monad.Reader
-- lens
import Control.Lens
-- happstack framework
import Data.Acid
import Data.SafeCopy
import Happstack.Server
import Happstack.Server.ClientSession
import Text.I18n
-- local
import State
import State.Helper
import State.Users
type ServerT a = ClientSessionT SessionData AcidServerT a
runServerT
:: AcidState BlogState
-> SessionConf
-> ServerT a
-> ServerPart a
runServerT acid sconf srvt =
runReaderT (withClientSessionT sconf srvt) acid
--
-- Type definition
--
data SessionData = SessionData
{ _sessionUser :: Maybe UserID
, _sessionLocale :: Maybe String
}
makeLenses ''SessionData
deriveSafeCopy 0 'base ''SessionData
instance ClientSession SessionData where
emptySession = SessionData
{ _sessionUser = Nothing
, _sessionLocale = Nothing
}
--
-- Requests
--
getUserID :: ServerT (Maybe UserID)
getUserID = liftSessionStateT $ use sessionUser
setUserID :: Maybe UserID -> ServerT ()
setUserID mid = liftSessionStateT $ sessionUser .= mid
getSessionUser :: ServerT (Maybe User)
getSessionUser = do
muid <- getUserID
maybe (return Nothing) (runQuery . GetUserById) muid
getSessionLocale :: ServerT (Maybe Locale)
getSessionLocale = liftSessionStateT $
fmap Locale <$> use sessionLocale
setSessionLocale :: Maybe Locale -> ServerT ()
setSessionLocale mloc = liftSessionStateT $
sessionLocale .= (unloc <$> mloc)
where
unloc (Locale s) = s
| mcmaniac/blog.nils.cc | src/Session.hs | apache-2.0 | 1,519 | 0 | 9 | 260 | 406 | 216 | 190 | 45 | 1 |
{-# LANGUAGE ExistentialQuantification, OverloadedStrings #-}
module HStyle.Rule
( Rule (..)
, Options (..)
, FileState (..)
, FileM
, runFileM
, runRule
) where
import Control.Monad (forM_, unless)
import Control.Monad.Reader (ReaderT, ask, runReaderT)
import Control.Monad.State (State, get, put, runState)
import Control.Monad.Writer (WriterT, runWriterT, tell)
import Data.Text (Text)
import qualified Data.Text as T
import HStyle.Block
import HStyle.Checker
import HStyle.Fixer
import HStyle.Parse
import HStyle.Selector
-- | Compose the elements of a rule. Use ExistentialQuantification so the
-- internal state of a rule cannot be touched from the outside.
data Rule = forall a. Rule (Selector a) (Checker a) (Fixer a)
-- | Options for checking files
data Options = Options
{ -- | Attempt to fix files
optionsFix :: Bool
, -- | Be quiet
optionsQuiet :: Bool
} deriving (Show)
data FileState = FileState
{ -- | File we're fixing
filePath :: FilePath
, -- | The module in the file
fileModule :: Module
, -- | A block holding the file contents
fileBlock :: Block
, -- | Flag indicating whether or not the in-memory representation differs
-- from the file on disk
fileUpdated :: Bool
, -- | Flag indicating that all checks were OK
fileOk :: Bool
} deriving (Show)
-- | We prefer to keep the file checking out of the IO monad.
type FileM = ReaderT Options (WriterT [Text] (State FileState))
runFileM :: FileM a -> Options -> FileState -> (a, FileState, [Text])
runFileM fm options fs =
-- Peel of the monads one by one
let w = runReaderT fm options
s = runWriterT w
((x, ts), fs') = runState s fs
in (x, fs', ts)
-- | Write some text followed by a newline
putLn :: Text -> FileM ()
putLn = tell . return
-- | Represents fixing status
data Fix
= DontFix -- ^ User doesn't want to fix it
| CouldntFix -- ^ Our library is unable to fix it
| Fixed -- ^ Fixed, result
deriving (Eq, Show)
runRule :: Rule -> FileM ()
runRule rule@(Rule selector checker fixer) = do
fs <- get
check $ selector (fileModule fs) (fileBlock fs)
where
-- Check the files one by one. However, note that if we fixed a file, we
-- need to re-run the current rule, because e.g. line number might have
-- changed, so our selections will no longer be valid.
check [] = return ()
check ((x, r) : selections) = do
fix <- checkBlock checker fixer x r
case fix of
Fixed -> runRule rule
_ -> check selections
checkBlock :: Checker a -> Fixer a -> a -> Range -> FileM Fix
checkBlock checker fixer x range = do
-- Query monad states
fs <- get
options <- ask
-- Determine problems, and attempt to fix (lazily)
let block = fileBlock fs
problems = checker x block range
needFix = optionsFix options && not (null problems)
(fix, block') = case (needFix, fixer x block range) of
(False, _) -> (DontFix, block)
(True, Nothing) -> (CouldntFix, block)
(True, Just ls) -> (Fixed, updateRange range ls block)
-- Output our results for this check
forM_ problems $ \(line, problem) -> do
-- let line = absoluteLineNumber i block
putLn $ T.pack (filePath fs) `T.append` ":" `T.append`
T.pack (show line) `T.append` ": " `T.append` problem
unless (optionsQuiet options) $ do
case fix of
DontFix -> putLn " Found:"
CouldntFix -> putLn " Couldn't fix:"
Fixed -> putLn " Fixed:"
putLn $ prettyRange 4 block range
-- If we fixed anything, re-parse the module. Parsing here should really,
-- not fail, because if it does, we made the code unparseable with our own
-- fix...
let (module', _) = case fix of
Fixed -> either error id $
parseModule (Just $ filePath fs) (toText block')
_ -> (fileModule fs, block')
-- Save updated file state
put fs
{ fileModule = module'
, fileBlock = block'
, fileUpdated = fileUpdated fs || fix == Fixed
, fileOk = fileOk fs && null problems
}
-- Return fix resolution
return fix
| jaspervdj/hstyle | src/HStyle/Rule.hs | bsd-3-clause | 4,448 | 0 | 18 | 1,364 | 1,081 | 595 | 486 | 85 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import System.Environment (lookupEnv)
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
import qualified Data.ByteString.Base64 as BS
import qualified Data.ByteString.Char8 as BS
import HerokuBuild
main :: IO ()
main = withOptions $ \(Options app cmd) -> do
k <- getApiKey
let builds = "/apps/" ++ app ++ "/builds/"
case cmd of
Start u v -> p buildId =<< postHeroku k builds (newBuild u v)
Status b -> p status =<< getHeroku k (builds ++ b)
Release b a -> do
mb <- getHeroku k (builds ++ b)
case mb of
Just (Build _ Success _ s) -> do
postHeroku' k ("/apps/" ++ a ++ "/releases") s
putStrLn "Success"
_ -> err "API error or build not found or not succeeded"
where
p :: Show a => (Build -> a) -> Maybe Build -> IO ()
p acc = maybe (err "failed to parse API response") (print . acc)
getApiKey :: IO ApiKey
getApiKey = do
mk <- lookupEnv "HEROKU_API_KEY"
case mk of
Just k -> return $ encode k
Nothing -> err "HEROKU_API_KEY environment variable not set"
where
encode :: String -> ApiKey
encode k =
let encoded = BS.encode $ ":" `BS.append` BS.pack k
in "Basic " `BS.append` encoded
err :: String -> IO a
err msg = do
hPutStrLn stderr $ "Error: " ++ msg
exitFailure
| pbrisbin/heroku-build | main.hs | bsd-3-clause | 1,455 | 0 | 22 | 436 | 494 | 248 | 246 | 38 | 4 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Orphans () where
import Data.Vector (Vector)
import qualified Data.Vector as V
import Test.SmallCheck.Series as SC
import Test.Tasty.QuickCheck as QC
instance Serial m a => Serial m (Vector a) where
series = fmap V.fromList series
instance Arbitrary a => Arbitrary (Vector a) where
arbitrary = fmap V.fromList arbitrary
shrink = map V.fromList . shrink . V.toList
| quchen/qa-playground | test/Testsuite/Orphans.hs | bsd-3-clause | 561 | 0 | 9 | 146 | 134 | 76 | 58 | 13 | 0 |
{-# LANGUAGE TypeFamilies, DeriveFunctor, DeriveDataTypeable #-}
module Test.LazySmallCheck2012.FunctionalValues.Instances where
import Control.Applicative
import Data.Data
import Data.Typeable
import Test.LazySmallCheck2012.Instances
import Test.LazySmallCheck2012.FunctionalValues
instance Argument () where
type Base () = ()
toBase = id
fromBase = id
instance (Argument a, Argument b) => Argument (a, b) where
type Base (a, b) = (BaseCast a, BaseCast b)
toBase (i, j) = (toBaseCast i, toBaseCast j)
fromBase (i, j) = (fromBaseCast i, fromBaseCast j)
instance (Argument a, Argument b, Argument c) => Argument (a, b, c) where
type Base (a, b, c) = (BaseCast a, (BaseCast b, BaseCast c))
toBase (i, j, k) = (toBaseCast i, (toBaseCast j, toBaseCast k))
fromBase (i, (j, k)) = (fromBaseCast i, fromBaseCast j, fromBaseCast k)
instance (Argument a, Argument b, Argument c, Argument d)
=> Argument (a, b, c, d) where
type Base (a, b, c, d) = (BaseCast a, (BaseCast b, (BaseCast c, BaseCast d)))
toBase (i, j, k, l) = (toBaseCast i, (toBaseCast j, (toBaseCast k, toBaseCast l)))
fromBase (i, (j, (k, l))) = (fromBaseCast i, fromBaseCast j, fromBaseCast k, fromBaseCast l)
instance (Argument a, Argument b, Argument c, Argument d, Argument e)
=> Argument (a, b, c, d, e) where
type Base (a, b, c, d, e) = (BaseCast a, (BaseCast b, (BaseCast c, (BaseCast d, BaseCast e))))
toBase (i, j, k, l, m) = (toBaseCast i, (toBaseCast j, (toBaseCast k, (toBaseCast l, toBaseCast m))))
fromBase (i, (j, (k, (l, m)))) = (fromBaseCast i, fromBaseCast j, fromBaseCast k, fromBaseCast l, fromBaseCast m)
instance Argument a => Argument [a] where
type Base [a] = Either () (BaseCast a, BaseCast [a])
toBase [] = Left ()
toBase (x:xs) = Right (toBaseCast x, toBaseCast xs)
fromBase (Left ()) = []
fromBase (Right (x, xs)) = (fromBaseCast x:fromBaseCast xs)
instance Argument Bool where
type Base Bool = Either () ()
toBase False = Left ()
toBase True = Right ()
fromBase (Left ()) = False
fromBase (Right ()) = True
instance Argument a => Argument (Maybe a) where
type Base (Maybe a) = Either () (BaseCast a)
toBase Nothing = Left ()
toBase (Just x) = Right (toBaseCast x)
fromBase (Left ()) = Nothing
fromBase (Right x) = Just (fromBaseCast x)
instance (Argument a, Argument b) => Argument (Either a b) where
type Base (Either a b) = Either (BaseCast a) (BaseCast b)
toBase = either (Left . toBaseCast) (Right . toBaseCast)
fromBase = either (Left . fromBaseCast) (Right . fromBaseCast)
instance Argument Nat where
type Base Nat = Nat
toBase = id
fromBase = id
instance Argument Int where
type Base Int = Nat
toBase = fst $ isoIntNat 0
fromBase = snd $ isoIntNat 0
instance Argument Char where
type Base Char = Nat
toBase = (fst $ isoIntNat (fromEnum 'a')) . fromEnum
fromBase = toEnum . (snd $ isoIntNat (fromEnum 'a'))
| UoYCS-plasma/LazySmallCheck2012 | Test/LazySmallCheck2012/FunctionalValues/Instances.hs | bsd-3-clause | 2,972 | 0 | 11 | 638 | 1,375 | 740 | 635 | 63 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Data.LTSV.String
( ltsv
, record
, encode
, decode
, decodeLTSV
, ToRecord(..)
, FromRecord(..)
, Field, FieldMap, Record, LTSV
)
where
import Control.Monad (liftM2)
import Control.Applicative ((<*))
import Text.Parsec ( parse
, Parsec
, newline, sepBy, tab, char, many1, alphaNum, oneOf, many, noneOf, (<|>), try
)
import Data.List (intercalate)
import qualified Data.Map as Map
type Field = (String, String)
type FieldMap = Map.Map String String
type Record = [Field]
type LTSV = [Record]
-- |
-- LTSV format parser.
--
-- >>> import Text.Parsec (parse)
-- >>> parse ltsv "(stdin)" "aaa:111\tbbb:222"
-- Right [[("aaa","111"),("bbb","222")]]
-- >>> parse ltsv "(stdin)" "aaa:111\tbbb:222\nccc:333\tddd:444"
-- Right [[("aaa","111"),("bbb","222")],[("ccc","333"),("ddd","444")]]
-- >>> parse ltsv "(stdin)" "aaa:111\tbbb:222\nccc:333\tddd:444\n"
-- Right [[("aaa","111"),("bbb","222")],[("ccc","333"),("ddd","444")]]
--
ltsv :: Parsec String () LTSV
ltsv = do
rs <- many $ try recordNL
r <- record
return $ if null r
then rs
else rs ++ [r]
recordNL :: Parsec String () Record
recordNL = record <* newline
record :: Parsec String () Record
record = sepBy field tab
where
field = liftM2 (,) (label <* colon) value
label = many1 lbyte
lbyte = alphaNum <|> oneOf "_.-"
value = many $ noneOf "\t\n\r"
colon = char ':'
class ToRecord a where
toRecord :: a -> Record
class FromRecord a where
fromRecord :: FieldMap -> Maybe a
-- |
-- Serialize a record value as a String.
--
-- >>> data Person = Person { name :: String, age :: Int } deriving Show
-- >>> instance ToRecord Person where toRecord p = [("name", name p), ("age", show . age $ p)]
-- >>> encode $ Person "krdlab" 128
-- "name:krdlab\tage:128"
--
encode :: (ToRecord a) => a -> String
encode = serialize . toRecord
serialize :: Record -> String
serialize [] = ""
serialize ts = intercalate "\t" $ map s ts
where
s (l, v) = l ++ ":" ++ v
-- |
-- deserialize a record value from a String.
--
-- >>> import Control.Applicative ((<$>))
-- >>> data Person = Person { name :: String, age :: Int } deriving Show
-- >>> instance FromRecord Person where fromRecord m = liftM2 Person (Map.lookup "name" m) (read <$> Map.lookup "age" m)
-- >>> decode "name:krdlab\tage:128" :: Maybe Person
-- Just (Person {name = "krdlab", age = 128})
--
decode :: (FromRecord a) => String -> Maybe a
decode = decodeWith record fromRecord
decodeWith :: Parsec String () Record -> (FieldMap -> Maybe a) -> String -> Maybe a
decodeWith p conv s =
case parse p "(decode)" s of
Right r -> conv $ Map.fromList r
_ -> Nothing
decodeLTSV :: (FromRecord a) => LTSV -> Maybe [a]
decodeLTSV = mapM (fromRecord . Map.fromList)
| krdlab/haskell-ltsv | Data/LTSV/String.hs | bsd-3-clause | 2,981 | 0 | 10 | 743 | 673 | 382 | 291 | 56 | 2 |
module Language.Java.JVM.Generator where
import Language.Java.JVM.JavapParser
import Language.Java.JVM.SignatureParser
import Control.Monad.Identity
import Control.Monad.State
import Data.Char (toUpper)
import Data.List (foldl')
import qualified Data.Set as Set
import Language.Haskell.Exts.Syntax
import Text.Parsec
import System.Info
import System.FilePath
import System.Process
addExeExtension :: FilePath -> String
addExeExtension fn= case os of
"mingw32"->addExtension fn "exe"
"cygwin32"->addExtension fn "exe"
"win32"->addExtension fn "exe"
_->fn
runJavap :: FilePath -> String -> IO(String)
runJavap javaHome className=do
let javapPath=javaHome </> "bin" </> (addExeExtension "javap")
s<-readProcess javapPath ["-s",className] ""
let l=lines s
case l of
(_:xs)-> return $ unlines xs
_ -> return ""
parseClass :: FilePath -> String -> IO (Either ParseError TypeDecl)
parseClass javaHome className=do
contents<-runJavap javaHome className
return $ parseTypeDecl contents
generate :: TypeDecl -> (Module,FilePath)
generate td=let
cls=td_name td
moduleName=map toMod (zip [0..] cls)
fp=addExtension moduleName "hs"
decls=concat $ runIdentity $ evalStateT (mapM (generateDecl cls) (td_decls td)) ((SrcLoc fp 6 1),Set.empty)
impTypes=ImportDecl (SrcLoc fp 3 1) (ModuleName "Language.Java.JVM.Types") False False Nothing Nothing Nothing
impAPI=ImportDecl (SrcLoc fp 3 1) (ModuleName "Language.Java.JVM.API") False False Nothing Nothing Nothing
in (Module (SrcLoc fp 2 1) (ModuleName ("Language.Java.JVM.Bindings."++moduleName)) [LanguagePragma (SrcLoc fp 1 1) [Ident "RankNTypes"]] Nothing Nothing [impTypes,impAPI] decls,fp)
where
toMod (_,'/')='_'
toMod (0,a)=toUpper a
toMod (_,a)=a
generateDecl :: String -> JDecl -> SrcLocT [Decl]
generateDecl cls (JMethodDecl name signature static)=do
slTyp<-srcLoc
slFun<-srcLoc
id<-identName name
let
Right (JSignature params ret)=parseSignature signature
(exp,ret')=if name=="<init>"
then
(App (App (Var (UnQual (Ident "newObject"))) (Lit $ String cls)) (Lit $ String signature),Just "JObj")
else
let
methoddef=App (App (App (Con (UnQual (Ident "Method"))) (Lit $ String cls)) (Lit $ String name)) (Lit $ String signature)
methodInvocation=Var (UnQual (Ident $ wrapperToMethod ret))
obj=Var (UnQual (Ident "obj"))
in (App (App methodInvocation obj) methoddef,ret)
pats=zipWith (\_ idx->PVar $ Ident ("p"++(show idx))) params [0..]
patsWithObj=if name /= "<init>" && (not static)
then ((PVar $ Ident "obj") : pats)
else pats
parms=zipWith (\w idx->App (Var $ UnQual $ Ident w) $ cast w (Var $ UnQual $ Ident ("p"++(show idx)))) params [0..]
rhs=UnGuardedRhs $ App exp $ List parms
m0=Match slFun (Ident id) patsWithObj Nothing rhs (BDecls [])
retType=TyApp (TyVar $ Ident "m") (TyVar $ Ident $ wrapperToUnwrapped ret')
paramType=foldl' (\t p->(TyFun (TyVar $ Ident $ wrapperToUnwrapped $ Just p) t)) retType params
objType=if name=="<init>"
then paramType
else (TyFun (TyVar $ Ident "JObjectPtr") paramType)
typ=TyForall Nothing [ClassA (UnQual $ Ident "WithJava") [(TyVar $ Ident "m")]] objType
sig=TypeSig slFun [Ident id] typ
return $ [sig,FunBind [m0]]
generateDecl cls (JFieldDecl name signature static)=undefined
type SrcLocT=StateT GenState Identity
type GenState=(SrcLoc,Set.Set String)
srcLoc :: SrcLocT SrcLoc
srcLoc= do
sl<-gets fst
modify (\((SrcLoc fp l c),g)->(SrcLoc fp (l+1) c,g))
return sl
identName:: String -> SrcLocT String
identName n=do
names<-gets snd
let
n'=if n=="<init>"
then "new"
else n
possibleNames=[n'] ++ (map (\idx->(n')++(show idx)) [1..])
okNames=filter (\pn->Set.notMember pn names) possibleNames
firstOK=head okNames
modify (\(s,ns)->(s,Set.insert firstOK ns))
return firstOK
cast :: String -> Exp -> Exp
cast "JLong"= App (Var $ UnQual $ Ident "fromIntegral")
cast "JInt"= App (Var $ UnQual $ Ident "fromIntegral")
cast _=id
wrapperToUnwrapped :: Maybe String -> String
wrapperToUnwrapped (Just "JObj")="JObjectPtr"
wrapperToUnwrapped (Just "JInt")="Integer"
wrapperToUnwrapped (Just "JBool")="Bool"
wrapperToUnwrapped (Just "JByte")="Int"
wrapperToUnwrapped (Just "JChar")="Char"
wrapperToUnwrapped (Just "JShort")="Int"
wrapperToUnwrapped (Just "JLong")="Integer"
wrapperToUnwrapped (Just "JFloat")="Float"
wrapperToUnwrapped (Just "JDouble")="Double"
wrapperToUnwrapped Nothing="()"
wrapperToUnwrapped (Just a)=error ("undefined wrapper"++a)
wrapperToMethod :: Maybe String -> String
wrapperToMethod (Just "JObj")="objectMethod"
wrapperToMethod (Just "JInt")="intMethod"
wrapperToMethod (Just "JBool")="booleanMethod"
wrapperToMethod (Just "JByte")="byteMethod"
wrapperToMethod (Just "JChar")="charMethod"
wrapperToMethod (Just "JShort")="shortMethod"
wrapperToMethod (Just "JLong")="longMethod"
wrapperToMethod (Just "JFloat")="floatMethod"
wrapperToMethod (Just "JDouble")="doubleMethod"
wrapperToMethod Nothing="voidMethod"
wrapperToMethod (Just a)=error ("undefined wrapper"++a)
| JPMoresmau/HJVM | src/Language/Java/JVM/Generator.hs | bsd-3-clause | 5,998 | 0 | 25 | 1,692 | 2,076 | 1,065 | 1,011 | 120 | 4 |
module GeoLabel.Geometry.Polar (Polar(..)) where
import Prelude () -- Don't import anything from standard prelude
import Numeric.Units.Dimensional.Prelude
import GeoLabel.Real (R)
data Polar = Polar { r :: Length R,
theta :: Angle R,
phi :: Angle R } deriving Show
| wyager/GeoLabel | src/GeoLabel/Geometry/Polar.hs | bsd-3-clause | 311 | 0 | 9 | 85 | 79 | 49 | 30 | 7 | 0 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import System.Environment (getArgs)
import qualified Funk.CLI as CLI
handleArgs :: [String] -> IO ()
handleArgs ["init"] = CLI.handleInit
handleArgs args@("status":"playing":_) = CLI.handlePlaying args
handleArgs _ = putStrLn "?"
main :: IO ()
main = getArgs >>= handleArgs
| apa512/funk | src/Main.hs | bsd-3-clause | 360 | 0 | 9 | 53 | 111 | 62 | 49 | 11 | 1 |
module Main where
import Data.Char (digitToInt)
highesNumber = 999999
isCoolNumber :: Int -> Bool
isCoolNumber n = n == (sum $ map (\x -> (digitToInt x)^5) $ show n)
main = print $ sum $ filter isCoolNumber [2..highesNumber]
| stulli/projectEuler | eu30.hs | bsd-3-clause | 229 | 0 | 14 | 42 | 98 | 53 | 45 | 6 | 1 |
module GivenTypeWriteFunction where
co :: (b -> c) -> (a -> b) -> a -> c
co bToC aToB a = bToC (aToB a)
a :: (a -> c) -> a -> a
a aToC a = a
a' :: (a -> b) -> a -> b
a' aToB a = aToB a
| renevp/hello-haskell | src/givenTypeWriteFunction.hs | bsd-3-clause | 188 | 0 | 8 | 56 | 119 | 63 | 56 | 7 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
#include "kinds.h"
#ifdef DataPolyKinds
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
#endif
#ifdef SafeHaskell
{-# LANGUAGE Safe #-}
#endif
module Type.Semigroup
( (:<>)
)
where
------------------------------------------------------------------------------
type family (a :: KPoly1) :<> (b :: KPoly1) :: KPoly1
infixr 6 :<>
#ifdef DataPolyKinds
------------------------------------------------------------------------------
type instance '[] :<> '[] = '[]
type instance (a ': as) :<> '[] = (a ': as)
type instance '[] :<> (b ': bs) = (b ': bs)
type instance (a ': as) :<> (b ': bs) = a ': (as :<> (b ': bs))
------------------------------------------------------------------------------
type instance 'Nothing :<> 'Nothing = 'Nothing
type instance 'Nothing :<> 'Just b = 'Just b
type instance 'Just a :<> 'Nothing = 'Just a
type instance 'Just a :<> 'Just b = 'Just (a :<> b)
------------------------------------------------------------------------------
type instance 'Left _a :<> 'Left b = 'Left b
type instance 'Left _a :<> 'Right b = 'Right b
type instance 'Right a :<> 'Left _b = 'Right a
type instance 'Right a :<> 'Right _b = 'Right a
------------------------------------------------------------------------------
type instance '() :<> '() = '()
------------------------------------------------------------------------------
type instance '(a, b) :<> '(a', b') = '(a :<> a', b :<> b')
------------------------------------------------------------------------------
type instance '(a, b, c) :<> '(a', b', c') = '(a :<> a', b :<> b', c :<> c')
------------------------------------------------------------------------------
type instance '(a, b, c, d) :<> '(a', b', c', d') =
'(a :<> a', b :<> b', c :<> c', d :<> d')
------------------------------------------------------------------------------
type instance '(a, b, c, d, e) :<> '(a', b', c', d', e') =
'(a :<> a', b :<> b', c :<> c', d :<> d', e :<> e')
------------------------------------------------------------------------------
type instance '(a, b, c, d, e, f) :<> '(a', b', c', d', e', f') =
'(a :<> a', b :<> b', c :<> c', d :<> d', e :<> e', f :<> f')
------------------------------------------------------------------------------
type instance '(a, b, c, d, e, f, g) :<> '(a', b', c', d', e', f', g') =
'(a :<> a', b :<> b', c :<> c', d :<> d', e :<> e', f :<> f', g :<> g')
#endif
| duairc/symbols | types/src/Type/Semigroup.hs | bsd-3-clause | 2,489 | 38 | 9 | 404 | 887 | 523 | 364 | -1 | -1 |
module Cloud.AWS.RDS.Types.Tag
( Tag(..)
) where
import Data.Text (Text)
data Tag = Tag
{ tagValue :: Text
, tagKey :: Text
}
deriving (Show, Eq)
| worksap-ate/aws-sdk | Cloud/AWS/RDS/Types/Tag.hs | bsd-3-clause | 170 | 0 | 8 | 49 | 60 | 38 | 22 | 7 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Test.PGComment (
TestPGLineComment(..)
, TestPGBlockComment(..)
, TestPGComment(..)
, spec
) where
import Data.List (isInfixOf, isSuffixOf)
import Data.Maybe (listToMaybe, fromMaybe)
import Data.Proxy (Proxy(..))
import Data.Tagged (tagWith)
import Database.PostgreSQL.Simple.Bind.Parser
import Test.Hspec (Spec, describe, shouldSatisfy)
import Test.QuickCheck (Gen, Arbitrary(..), sized, resize, oneof, suchThat)
import qualified Data.Text as T
import Test.Common (PGSql(..), arbitraryString, charASCII, charASCIInl, arbitrarySumDecomposition)
import Test.Utils (propParserRight, propParsingWorks)
newtype TestPGLineComment = TestPGLineComment String deriving (Show)
instance Arbitrary TestPGLineComment where
arbitrary = TestPGLineComment <$> arbitraryString charASCII
instance PGSql TestPGLineComment where
render (TestPGLineComment s) = "--" ++ s
data TestPGBlockComment
= TestPGBlockCommentGroup [TestPGBlockComment]
| TestPGBlockCommentElement String
deriving (Show, Eq)
instance Arbitrary TestPGBlockComment where
arbitrary = TestPGBlockCommentGroup <$> (sized $ \n -> arbitrarySumDecomposition n
>>= mapM mkElement . zip (map ((== 0) . (`mod` 2)) [(1::Int)..])) where
mkElement (isGroup, s) = resize s $ if isGroup
then (arbitrary :: Gen TestPGBlockComment)
else (TestPGBlockCommentElement <$> (arbitraryString charASCIInl)
`suchThat` (not . isInfixOf "/*")
`suchThat` (not . isInfixOf "*/")
`suchThat` (not . isSuffixOf "/")
`suchThat` (not . isSuffixOf "*"))
shrink (TestPGBlockCommentGroup xs) = (concatMap shrink . filter isGroup $ xs)
++ (if (xs' /= xs)
then [TestPGBlockCommentGroup xs']
else []) where
xs' = map (\x -> fromMaybe x . listToMaybe . shrink $ x) xs
isGroup :: TestPGBlockComment -> Bool
isGroup (TestPGBlockCommentGroup _) = True
isGroup _ = False
shrink (TestPGBlockCommentElement s) = if length s > 3
then [TestPGBlockCommentElement (head s:' ':last s:[])]
else []
instance PGSql TestPGBlockComment where
render (TestPGBlockCommentGroup xs) = "/*" ++ (concatMap render xs) ++ "*/"
render (TestPGBlockCommentElement x) = x
newtype TestPGComment = TestPGComment String deriving (Show)
instance Arbitrary TestPGComment where
arbitrary = TestPGComment <$> oneof [
render <$> (arbitrary :: Gen TestPGLineComment)
, render <$> (arbitrary :: Gen TestPGBlockComment)]
instance PGSql TestPGComment where
render (TestPGComment s) = s
spec :: Spec
spec = do
describe "pgLineComment" $ do
propParsingWorks pgLineComment (Proxy :: Proxy TestPGLineComment)
describe "pgBlockComment" $ do
propParsingWorks pgBlockComment (Proxy :: Proxy TestPGBlockComment)
let prop' = propParserRight (tagWith (Proxy :: Proxy TestPGBlockComment) pgBlockComment)
prop' "starts with /*" . flip shouldSatisfy $ T.isPrefixOf "/*"
prop' "ends with */" . flip shouldSatisfy $ T.isSuffixOf "*/"
describe "pgComment" $ do
propParsingWorks pgComment (Proxy :: Proxy TestPGComment)
| zohl/postgresql-simple-bind | tests/Test/PGComment.hs | bsd-3-clause | 3,606 | 0 | 17 | 774 | 971 | 532 | 439 | 77 | 1 |
-------------------------------------------------------------------------------------
-- |
-- Copyright : (c) Hans Hoglund 2012
--
-- License : BSD-style
--
-- Maintainer : hans@hanshoglund.se
-- Stability : experimental
-- Portability : portable
--
-- Provides Balances, i.e. mappings from relative to absolute dynamics.
--
-------------------------------------------------------------------------------------
module Music.Dynamics.Balance -- (
-- )
where
import Data.Maybe
import Data.Either
import Data.Semigroup
import Control.Monad
import Control.Applicative
-- http://smac2013.renconmusic.org/midi-calibration/
| music-suite/music-dynamics | src/Music/Dynamics/Balance.hs | bsd-3-clause | 630 | 0 | 4 | 72 | 48 | 36 | 12 | 6 | 0 |
module Language.Lambda
( module Language.Lambda.Syntax
, module Language.Lambda.Parser
) where
import Language.Lambda.Syntax
import Language.Lambda.Parser
| utky/lambda-cute | src/Language/Lambda.hs | bsd-3-clause | 162 | 0 | 5 | 21 | 34 | 23 | 11 | 5 | 0 |
{-# LANGUAGE PolyKinds, DataKinds, TemplateHaskell, TypeFamilies,
GADTs, TypeOperators, RankNTypes, FlexibleContexts, UndecidableInstances,
FlexibleInstances, ScopedTypeVariables, MultiParamTypeClasses,
OverlappingInstances, StandaloneDeriving #-}
module Oxymoron.Description.Material where
import Data.Singletons
import Data.Singletons.Extras.Set
import Oxymoron.Description.Attribute
import Oxymoron.Description.Uniform
import Oxymoron.Description.Varying
import Oxymoron.Description.Program
--The material functions as a cache of the uniform values for the program
--It doesn't add anything in terms of type info (for now), but constitency I've
--added it.
data Material :: (Set Attribute)
-> (Set Uniform)
-> (Set Uniform)
-> (Set Varying)
-> * where
Material :: Program a b c d -> Material a b c d
| jfischoff/oxymoron | src/Oxymoron/Description/Material.hs | bsd-3-clause | 881 | 0 | 10 | 169 | 121 | 71 | 50 | 17 | 0 |
{-
This file is part of the package devalot-hakyll. It is subject to the
license terms in the LICENSE file found in the top-level directory of
this distribution and at git://pmade.com/devalot-hakyll/LICENSE. No
part of devalot-hakyll package, including this file, may be copied,
modified, propagated, or distributed except according to the terms
contained in the LICENSE file.
-}
--------------------------------------------------------------------------------
module Hakyll.Web.JavaScript (jsCompiler, jsCreate) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Data.ByteString.Lazy.UTF8 (toString)
import Data.List (intercalate)
import Hakyll
import Text.Jasmine (minify)
--------------------------------------------------------------------------------
-- | Compile a JavaScript file by minimizing it.
jsCompiler :: Compiler (Item String)
jsCompiler = fmap (toString . minify) <$> getResourceLBS
--------------------------------------------------------------------------------
-- | Compile all matching JavaScript files into a single file.
jsCreate :: Identifier -- ^ The name of the output file to create.
-> Pattern -- ^ A pattern to match all of the input files.
-> Rules () -- ^ Rules for Hakyll.
jsCreate file pattern = do
match pattern $ compile jsCompiler
create [file] $ do
route idRoute
compile $ do
files <- (loadAll pattern' :: Compiler [Item String])
makeItem $ intercalate ";\n" $ map itemBody files
where pattern' = pattern .&&. complement (fromList [file])
| pjones/devalot-hakyll | src/Hakyll/Web/JavaScript.hs | bsd-3-clause | 1,621 | 0 | 17 | 258 | 239 | 129 | 110 | 19 | 1 |
module Web.Mp3Convertr where
import System.FilePath.Posix (combine)
import System.Process (readProcess)
data ConvertConf = ConvertConf { getFFmpegCommand :: FilePath
, getTempDir :: FilePath
, getExt :: String
, getDestDir :: FilePath
, getBitRate :: Int
, getSamplingRate :: Int
} deriving (Show, Read, Eq)
convertToMp3 :: ConvertConf-> String -> IO String
convertToMp3 (ConvertConf cmd tmp ext dir b r) v = readProcess cmd ["-i", tmpPath, "-ab", bitRate, "-ar", samplingRate, destFilePath] []
where
tmpPath = combine tmp (v ++ ext)
destFilePath = combine dir (v ++ ".mp3")
bitRate = show b
samplingRate = show r
| algas/mp3huntr | src/Web/Mp3Convertr.hs | bsd-3-clause | 866 | 0 | 9 | 344 | 211 | 120 | 91 | 16 | 1 |
----------------------------------------------------------------------------
-- |
-- Module : Server.Tags.SearchM
-- Copyright : (c) Sergey Vinokurov 2016
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
-- Created : Tuesday, 23 August 2016
----------------------------------------------------------------------------
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
module Haskell.Language.Server.Tags.SearchM
( SearchT
, runSearchT
) where
import Control.Monad.Base
import Control.Monad.Catch
import Control.Monad.Except
import Control.Monad.ErrorExcept
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Control
import Control.Monad.Filesystem (MonadFS)
import Control.Monad.Logging (MonadLog)
import Data.ErrorMessage
import Haskell.Language.Server.Tags.Types
-- | Monad for carrying out symbol search operations.
newtype SearchT m a = SearchM (ErrorExceptT ErrorMessage (StateT TagsServerState (ReaderT TagsServerConf m)) a)
deriving
( Functor
, Applicative
, Monad
, MonadState TagsServerState
, MonadReader TagsServerConf
, MonadLog
, MonadBase b
)
deriving instance (MonadBase IO m, MonadCatch m) => MonadError ErrorMessage (SearchT m)
deriving instance (MonadBaseControl IO m, MonadMask m) => MonadFS (SearchT m)
runSearchT
:: MonadCatch m
=> TagsServerConf
-> TagsServerState
-> SearchT m a
-> m (Either ErrorMessage a, TagsServerState)
runSearchT conf serverState (SearchM action)
= flip runReaderT conf
$ flip runStateT serverState
$ runErrorExceptT action
| sergv/tags-server | src/Haskell/Language/Server/Tags/SearchM.hs | bsd-3-clause | 1,823 | 0 | 11 | 317 | 329 | 190 | 139 | 41 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
-- | Representations for signed and unsigned integer types
--
-- The reason for using symbol names ending with @_t@ is that 'deriveRender'
-- uses everything that comes before @_@ when rendering the constructor.
module Data.TypeRep.Types.IntWord where
import Data.Int
import qualified Data.Typeable as Typeable
import Data.Word
import Language.Syntactic
import Data.TypeRep.TH
data IntWordType a
where
Int8_t :: IntWordType (Full Int8)
Int16_t :: IntWordType (Full Int16)
Int32_t :: IntWordType (Full Int32)
Int64_t :: IntWordType (Full Int64)
Word8_t :: IntWordType (Full Word8)
Word16_t :: IntWordType (Full Word16)
Word32_t :: IntWordType (Full Word32)
Word64_t :: IntWordType (Full Word64)
int8Type :: (Syntactic a, IntWordType :<: Domain a, Internal a ~ Int8) => a
int8Type = sugarSym Int8_t
int16Type :: (Syntactic a, IntWordType :<: Domain a, Internal a ~ Int16) => a
int16Type = sugarSym Int16_t
int32Type :: (Syntactic a, IntWordType :<: Domain a, Internal a ~ Int32) => a
int32Type = sugarSym Int32_t
int64Type :: (Syntactic a, IntWordType :<: Domain a, Internal a ~ Int64) => a
int64Type = sugarSym Int64_t
word8Type :: (Syntactic a, IntWordType :<: Domain a, Internal a ~ Word8) => a
word8Type = sugarSym Word8_t
word16Type :: (Syntactic a, IntWordType :<: Domain a, Internal a ~ Word16) => a
word16Type = sugarSym Word16_t
word32Type :: (Syntactic a, IntWordType :<: Domain a, Internal a ~ Word32) => a
word32Type = sugarSym Word32_t
word64Type :: (Syntactic a, IntWordType :<: Domain a, Internal a ~ Word64) => a
word64Type = sugarSym Word64_t
deriveRender_forType ''IntWordType
deriveTypeEq ''IntWordType
deriveWitnessAny ''IntWordType
derivePWitnessAny ''IntWordType
deriveWitness ''Typeable.Typeable ''IntWordType
deriveWitness ''Eq ''IntWordType
deriveWitness ''Ord ''IntWordType
deriveWitness ''Show ''IntWordType
deriveWitness ''Num ''IntWordType
deriveWitness ''Integral ''IntWordType
derivePWitness ''Typeable.Typeable ''IntWordType
derivePWitness ''Eq ''IntWordType
derivePWitness ''Ord ''IntWordType
derivePWitness ''Show ''IntWordType
derivePWitness ''Num ''IntWordType
derivePWitness ''Integral ''IntWordType
| emilaxelsson/open-typerep | src/Data/TypeRep/Types/IntWord.hs | bsd-3-clause | 2,323 | 0 | 8 | 407 | 692 | 341 | 351 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Downloads page controller.
module HL.C.Downloads where
import HL.C
import HL.V.Downloads
-- | Downloads controller.
getDownloadsR :: C Html
getDownloadsR = senza downloadsV
-- | Downloads for particular OS.
getDownloadsForR :: OS -> C Html
getDownloadsForR = senza . downloadsForV
| yogsototh/hl | src/HL/C/Downloads.hs | bsd-3-clause | 326 | 0 | 6 | 50 | 59 | 35 | 24 | 8 | 1 |
{-# LANGUAGE DeriveGeneric, OverloadedStrings #-}
module DB.CardSimple.Model where
import Prelude hiding (id)
import Data.Text
import GHC.Generics
import Data.Aeson
import Data.Time.Clock
import Control.Applicative
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromRow
data CardSimple = CardSimple
{ id :: Maybe Int
, deck_id :: Int
, front :: Text
, back :: Text
, info :: Maybe Text
, deleted :: Bool
, created_on :: Maybe UTCTime
, created_by :: Int
} deriving (Generic, Show)
instance FromJSON CardSimple
instance ToJSON CardSimple
instance FromRow CardSimple where
fromRow = CardSimple <$> field
<*> field
<*> field
<*> field
<*> field
<*> field
<*> field
<*> field
| ppseafield/backend-flashcard | src/DB/CardSimple/Model.hs | bsd-3-clause | 880 | 0 | 13 | 285 | 192 | 112 | 80 | 31 | 0 |
module Import
( module Import
) where
import Foundation as Import
import Import.NoFoundation as Import | MaxGabriel/hackvote-yesod | Import.hs | cc0-1.0 | 124 | 0 | 4 | 36 | 22 | 16 | 6 | 4 | 0 |
module PatConstructor where
f :: Bool -> Int
f ((:) x xs) = 3
| roberth/uu-helium | test/typeerrors/Examples/PatConstructor.hs | gpl-3.0 | 63 | 0 | 7 | 15 | 31 | 18 | 13 | 3 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CognitoSync.SetIdentityPoolConfiguration
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sets the necessary configuration for push sync.
--
-- This API can only be called with developer credentials. You cannot call
-- this API with the temporary user credentials provided by Cognito
-- Identity.
--
-- /See:/ <http://docs.aws.amazon.com/cognitosync/latest/APIReference/API_SetIdentityPoolConfiguration.html AWS API Reference> for SetIdentityPoolConfiguration.
module Network.AWS.CognitoSync.SetIdentityPoolConfiguration
(
-- * Creating a Request
setIdentityPoolConfiguration
, SetIdentityPoolConfiguration
-- * Request Lenses
, sipcCognitoStreams
, sipcPushSync
, sipcIdentityPoolId
-- * Destructuring the Response
, setIdentityPoolConfigurationResponse
, SetIdentityPoolConfigurationResponse
-- * Response Lenses
, sipcrsIdentityPoolId
, sipcrsCognitoStreams
, sipcrsPushSync
, sipcrsResponseStatus
) where
import Network.AWS.CognitoSync.Types
import Network.AWS.CognitoSync.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | The input for the SetIdentityPoolConfiguration operation.
--
-- /See:/ 'setIdentityPoolConfiguration' smart constructor.
data SetIdentityPoolConfiguration = SetIdentityPoolConfiguration'
{ _sipcCognitoStreams :: !(Maybe CognitoStreams)
, _sipcPushSync :: !(Maybe PushSync)
, _sipcIdentityPoolId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'SetIdentityPoolConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sipcCognitoStreams'
--
-- * 'sipcPushSync'
--
-- * 'sipcIdentityPoolId'
setIdentityPoolConfiguration
:: Text -- ^ 'sipcIdentityPoolId'
-> SetIdentityPoolConfiguration
setIdentityPoolConfiguration pIdentityPoolId_ =
SetIdentityPoolConfiguration'
{ _sipcCognitoStreams = Nothing
, _sipcPushSync = Nothing
, _sipcIdentityPoolId = pIdentityPoolId_
}
-- | Options to apply to this identity pool for Amazon Cognito streams.
sipcCognitoStreams :: Lens' SetIdentityPoolConfiguration (Maybe CognitoStreams)
sipcCognitoStreams = lens _sipcCognitoStreams (\ s a -> s{_sipcCognitoStreams = a});
-- | Options to apply to this identity pool for push synchronization.
sipcPushSync :: Lens' SetIdentityPoolConfiguration (Maybe PushSync)
sipcPushSync = lens _sipcPushSync (\ s a -> s{_sipcPushSync = a});
-- | A name-spaced GUID (for example,
-- us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
-- Cognito. This is the ID of the pool to modify.
sipcIdentityPoolId :: Lens' SetIdentityPoolConfiguration Text
sipcIdentityPoolId = lens _sipcIdentityPoolId (\ s a -> s{_sipcIdentityPoolId = a});
instance AWSRequest SetIdentityPoolConfiguration
where
type Rs SetIdentityPoolConfiguration =
SetIdentityPoolConfigurationResponse
request = postJSON cognitoSync
response
= receiveJSON
(\ s h x ->
SetIdentityPoolConfigurationResponse' <$>
(x .?> "IdentityPoolId") <*> (x .?> "CognitoStreams")
<*> (x .?> "PushSync")
<*> (pure (fromEnum s)))
instance ToHeaders SetIdentityPoolConfiguration where
toHeaders
= const
(mconcat
["Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON SetIdentityPoolConfiguration where
toJSON SetIdentityPoolConfiguration'{..}
= object
(catMaybes
[("CognitoStreams" .=) <$> _sipcCognitoStreams,
("PushSync" .=) <$> _sipcPushSync])
instance ToPath SetIdentityPoolConfiguration where
toPath SetIdentityPoolConfiguration'{..}
= mconcat
["/identitypools/", toBS _sipcIdentityPoolId,
"/configuration"]
instance ToQuery SetIdentityPoolConfiguration where
toQuery = const mempty
-- | The output for the SetIdentityPoolConfiguration operation
--
-- /See:/ 'setIdentityPoolConfigurationResponse' smart constructor.
data SetIdentityPoolConfigurationResponse = SetIdentityPoolConfigurationResponse'
{ _sipcrsIdentityPoolId :: !(Maybe Text)
, _sipcrsCognitoStreams :: !(Maybe CognitoStreams)
, _sipcrsPushSync :: !(Maybe PushSync)
, _sipcrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'SetIdentityPoolConfigurationResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sipcrsIdentityPoolId'
--
-- * 'sipcrsCognitoStreams'
--
-- * 'sipcrsPushSync'
--
-- * 'sipcrsResponseStatus'
setIdentityPoolConfigurationResponse
:: Int -- ^ 'sipcrsResponseStatus'
-> SetIdentityPoolConfigurationResponse
setIdentityPoolConfigurationResponse pResponseStatus_ =
SetIdentityPoolConfigurationResponse'
{ _sipcrsIdentityPoolId = Nothing
, _sipcrsCognitoStreams = Nothing
, _sipcrsPushSync = Nothing
, _sipcrsResponseStatus = pResponseStatus_
}
-- | A name-spaced GUID (for example,
-- us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
-- Cognito.
sipcrsIdentityPoolId :: Lens' SetIdentityPoolConfigurationResponse (Maybe Text)
sipcrsIdentityPoolId = lens _sipcrsIdentityPoolId (\ s a -> s{_sipcrsIdentityPoolId = a});
-- | Options to apply to this identity pool for Amazon Cognito streams.
sipcrsCognitoStreams :: Lens' SetIdentityPoolConfigurationResponse (Maybe CognitoStreams)
sipcrsCognitoStreams = lens _sipcrsCognitoStreams (\ s a -> s{_sipcrsCognitoStreams = a});
-- | Options to apply to this identity pool for push synchronization.
sipcrsPushSync :: Lens' SetIdentityPoolConfigurationResponse (Maybe PushSync)
sipcrsPushSync = lens _sipcrsPushSync (\ s a -> s{_sipcrsPushSync = a});
-- | The response status code.
sipcrsResponseStatus :: Lens' SetIdentityPoolConfigurationResponse Int
sipcrsResponseStatus = lens _sipcrsResponseStatus (\ s a -> s{_sipcrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-cognito-sync/gen/Network/AWS/CognitoSync/SetIdentityPoolConfiguration.hs | mpl-2.0 | 6,862 | 0 | 14 | 1,312 | 912 | 542 | 370 | 112 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Replacer | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/replacer/src/main/javahelp/org/zaproxy/zap/extension/replacer/resources/help_ko_KR/helpset_ko_KR.hs | apache-2.0 | 970 | 80 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sl-SI">
<title>Plug-n-Hack | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/plugnhack/src/main/javahelp/org/zaproxy/zap/extension/plugnhack/resources/help_sl_SI/helpset_sl_SI.hs | apache-2.0 | 973 | 80 | 68 | 159 | 421 | 213 | 208 | -1 | -1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TupleSections #-}
module Language.Haskell.Liquid.GHC.Play where
import GHC
import CoreSyn
import Var
import TypeRep
import TcRnMonad
import Coercion
import Control.Arrow ((***))
import qualified Data.HashMap.Strict as M
import Language.Haskell.Liquid.GHC.Misc ()
class Subable a where
sub :: M.HashMap CoreBndr CoreExpr -> a -> a
subTy :: M.HashMap TyVar Type -> a -> a
instance Subable CoreExpr where
sub s (Var v) = M.lookupDefault (Var v) v s
sub _ (Lit l) = Lit l
sub s (App e1 e2) = App (sub s e1) (sub s e2)
sub s (Lam b e) = Lam b (sub s e)
sub s (Let b e) = Let (sub s b) (sub s e)
sub s (Case e b t a) = Case (sub s e) (sub s b) t (map (sub s) a)
sub s (Cast e c) = Cast (sub s e) c
sub s (Tick t e) = Tick t (sub s e)
sub _ (Type t) = Type t
sub _ (Coercion c) = Coercion c
subTy s (Var v) = Var (subTy s v)
subTy _ (Lit l) = Lit l
subTy s (App e1 e2) = App (subTy s e1) (subTy s e2)
subTy s (Lam b e) | isTyVar b = Lam v' (subTy s e)
where v' = case M.lookup b s of
Just (TyVarTy v) -> v
_ -> b
subTy s (Lam b e) = Lam (subTy s b) (subTy s e)
subTy s (Let b e) = Let (subTy s b) (subTy s e)
subTy s (Case e b t a) = Case (subTy s e) (subTy s b) (subTy s t) (map (subTy s) a)
subTy s (Cast e c) = Cast (subTy s e) (subTy s c)
subTy s (Tick t e) = Tick t (subTy s e)
subTy s (Type t) = Type (subTy s t)
subTy s (Coercion c) = Coercion (subTy s c)
instance Subable Coercion where
sub _ c = c
subTy _ _ = error "subTy Coercion"
instance Subable (Alt Var) where
sub s (a, b, e) = (a, map (sub s) b, sub s e)
subTy s (a, b, e) = (a, map (subTy s) b, subTy s e)
instance Subable Var where
sub s v | M.member v s = subVar $ s M.! v
| otherwise = v
subTy s v = setVarType v (subTy s (varType v))
subVar (Var x) = x
subVar _ = error "sub Var"
instance Subable (Bind Var) where
sub s (NonRec x e) = NonRec (sub s x) (sub s e)
sub s (Rec xes) = Rec ((sub s *** sub s) <$> xes)
subTy s (NonRec x e) = NonRec (subTy s x) (subTy s e)
subTy s (Rec xes) = Rec ((subTy s *** subTy s) <$> xes)
instance Subable Type where
sub _ e = e
subTy = substTysWith
substTysWith s tv@(TyVarTy v) = M.lookupDefault tv v s
substTysWith s (FunTy t1 t2) = FunTy (substTysWith s t1) (substTysWith s t2)
substTysWith s (ForAllTy v t) = ForAllTy v (substTysWith (M.delete v s) t)
substTysWith s (TyConApp c ts) = TyConApp c (map (substTysWith s) ts)
substTysWith s (AppTy t1 t2) = AppTy (substTysWith s t1) (substTysWith s t2)
substTysWith _ (LitTy t) = LitTy t
| abakst/liquidhaskell | src/Language/Haskell/Liquid/GHC/Play.hs | bsd-3-clause | 2,898 | 0 | 13 | 887 | 1,508 | 752 | 756 | 68 | 1 |
{-# LANGUAGE CPP #-}
#include "fusion-phases.h"
-- | Selectors.
--
-- See "Data.Array.Parallel.Unlifted" for how this works.
--
module Data.Array.Parallel.Unlifted.Sequential.USel
( -- * Types
USel2(..)
-- * Operations on selectors
, mkUSel2
, lengthUSel2
, tagsUSel2, indicesUSel2
, elementsUSel2_0, elementsUSel2_1
, tagsToIndices2)
where
import Data.Array.Parallel.Unlifted.Sequential.Vector as V
import qualified Data.Vector.Fusion.Bundle as S
import qualified Data.Vector.Fusion.Bundle.Monadic as M
import Data.Vector.Fusion.Stream.Monadic ( Stream(..) )
import Data.Vector.Fusion.Bundle.Monadic ( Bundle(..) )
import Data.Array.Parallel.Base (Tag)
-- | Selector.
data USel2
= USel2
{ usel2_tags :: !(Vector Tag)
, usel2_indices :: !(Vector Int)
, usel2_elements0 :: !Int -- ^ Number of tags with value 0.
, usel2_elements1 :: !Int -- ^ Number of tags with value 1.
}
-- | O(1). Construct a selector.
mkUSel2 :: Vector Tag -- ^ Tags array.
-> Vector Int -- ^ Indices array
-> Int -- ^ Number of elements taken from first array.
-> Int -- ^ Number of elements taken from second array.
-> USel2
mkUSel2 = USel2
{-# INLINE mkUSel2 #-}
-- Projections ----------------------------------------------------------------
-- INLINE trivial projections as they'll expand to a single record selector.
-- | O(1). Get the number of elements represented by this selector.
-- This is the length of the array returned by `combine`.
lengthUSel2 :: USel2 -> Int
lengthUSel2 = V.length . usel2_tags
{-# INLINE lengthUSel2 #-}
-- | O(1). Get the tags array of a selector.
tagsUSel2 :: USel2 -> Vector Tag
{-# INLINE tagsUSel2 #-}
tagsUSel2 = usel2_tags
-- | O(1). Get the indices array of a selector.
indicesUSel2 :: USel2 -> Vector Int
indicesUSel2 = usel2_indices
{-# INLINE indicesUSel2 #-}
-- | O(1). Get the number of elements that will be taken from the first array.
elementsUSel2_0 :: USel2 -> Int
elementsUSel2_0 = usel2_elements0
{-# INLINE elementsUSel2_0 #-}
-- | O(1). Get the number of elements that will be taken from the second array.
elementsUSel2_1 :: USel2 -> Int
elementsUSel2_1 = usel2_elements1
{-# INLINE elementsUSel2_1 #-}
-- | O(n). Compute the source index for each element of the result array.
tagsToIndices2 :: Vector Tag -> Vector Int
tagsToIndices2 tags
= unstream (mapAccumS add (0,0) (stream tags))
where
add (i,j) 0 = ((i+1,j),i)
add (i,j) _ = ((i,j+1),j)
{-# INLINE_STREAM tagsToIndices2 #-}
mapAccumS :: (acc -> a -> (acc,b)) -> acc -> S.Bundle v a -> S.Bundle v b
mapAccumS f acc0 (Bundle{sElems=Stream step s0,sSize=n})
= M.fromStream (Stream step' (acc0,s0)) n
where
{-# INLINE_INNER step' #-}
step' (acc,s)
= do r <- step s
case r of
S.Yield x s' -> let (acc',y) = f acc x
in return $ S.Yield y (acc',s')
S.Skip s' -> return $ S.Skip (acc,s')
S.Done -> return S.Done
{-# INLINE_STREAM mapAccumS #-}
| mainland/dph | dph-prim-seq/Data/Array/Parallel/Unlifted/Sequential/USel.hs | bsd-3-clause | 3,266 | 0 | 16 | 883 | 658 | 387 | 271 | 66 | 3 |
{-# LANGUAGE FlexibleContexts #-}
module Futhark.Internalise.AccurateSizes
( shapeBody
, annotateArrayShape
, argShapes
, ensureResultShape
, ensureResultExtShape
, ensureShape
, ensureShapeVar
)
where
import Control.Applicative
import Control.Monad
import Data.Loc
import qualified Data.HashMap.Lazy as HM
import Prelude
import Futhark.Representation.AST
import Futhark.Construct
import Futhark.MonadFreshNames
shapeBody :: (HasScope lore m, MonadFreshNames m, Bindable lore) =>
[VName] -> [Type] -> Body lore
-> m (Body lore)
shapeBody shapenames ts body =
runBodyBinder $ do
ses <- bodyBind body
sets <- mapM subExpType ses
return $ resultBody $ argShapes shapenames ts sets
annotateArrayShape :: ArrayShape shape =>
TypeBase shape u -> [Int] -> TypeBase Shape u
annotateArrayShape t newshape =
t `setArrayShape` Shape (take (arrayRank t) $
map (intConst Int32 . toInteger) $ newshape ++ repeat 0)
argShapes :: [VName] -> [TypeBase Shape u0] -> [TypeBase Shape u1] -> [SubExp]
argShapes shapes valts valargts =
map addShape shapes
where mapping = shapeMapping valts valargts
addShape name
| Just se <- HM.lookup name mapping = se
| otherwise = intConst Int32 0
ensureResultShape :: MonadBinder m =>
(m Certificates -> m Certificates)
-> SrcLoc -> [Type] -> Body (Lore m)
-> m (Body (Lore m))
ensureResultShape asserting loc =
ensureResultExtShape asserting loc . staticShapes
ensureResultExtShape :: MonadBinder m =>
(m Certificates -> m Certificates)
-> SrcLoc -> [ExtType] -> Body (Lore m)
-> m (Body (Lore m))
ensureResultExtShape asserting loc rettype body =
insertBindingsM $ do
es <- bodyBind body
let assertProperShape t se =
let name = "result_proper_shape"
in ensureExtShape asserting loc t name se
reses <- zipWithM assertProperShape rettype es
mkBodyM [] reses
ensureExtShape :: MonadBinder m =>
(m Certificates -> m Certificates)
-> SrcLoc -> ExtType -> String -> SubExp
-> m SubExp
ensureExtShape asserting loc t name orig
| Array{} <- t, Var v <- orig =
Var <$> ensureShapeVar asserting loc t name v
| otherwise = return orig
ensureShape :: MonadBinder m =>
(m Certificates -> m Certificates)
-> SrcLoc -> Type -> String -> SubExp
-> m SubExp
ensureShape asserting loc = ensureExtShape asserting loc . staticShapes1
ensureShapeVar :: MonadBinder m =>
(m Certificates -> m Certificates)
-> SrcLoc -> ExtType -> String -> VName
-> m VName
ensureShapeVar asserting loc t name v
| Array{} <- t = do
newshape <- arrayDims . removeExistentials t <$> lookupType v
oldshape <- arrayDims <$> lookupType v
let checkDim desired has =
letExp "shape_cert" =<<
eAssert (pure $ PrimOp $ CmpOp (CmpEq int32) desired has) loc
certs <- asserting $ zipWithM checkDim newshape oldshape
letExp name $ shapeCoerce certs newshape v
| otherwise = return v
removeExistentials :: ExtType -> Type -> Type
removeExistentials t1 t2 =
t1 `setArrayDims`
zipWith nonExistential
(extShapeDims $ arrayShape t1)
(arrayDims t2)
where nonExistential (Ext _) dim = dim
nonExistential (Free dim) _ = dim
| CulpaBS/wbBach | src/Futhark/Internalise/AccurateSizes.hs | bsd-3-clause | 3,529 | 0 | 17 | 1,005 | 1,112 | 541 | 571 | 90 | 2 |
module B where
import B1
| sdiehl/ghc | testsuite/tests/ghci/prog019/B.hs | bsd-3-clause | 26 | 0 | 3 | 6 | 7 | 5 | 2 | 2 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="de-DE">
<title>Requester</title>
<maps>
<homeID>requester</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/requester/src/main/javahelp/help_de_DE/helpset_de_DE.hs | apache-2.0 | 960 | 92 | 29 | 155 | 389 | 208 | 181 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Revisit | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/revisit/src/main/javahelp/org/zaproxy/zap/extension/revisit/resources/help_fa_IR/helpset_fa_IR.hs | apache-2.0 | 968 | 90 | 29 | 158 | 396 | 211 | 185 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ja-JP">
<title>Passive Scan Rules - Beta | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>コンテンツ</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>インデックス</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>検索</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>お気に入り</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/pscanrulesBeta/src/main/javahelp/org/zaproxy/zap/extension/pscanrulesBeta/resources/help_ja_JP/helpset_ja_JP.hs | apache-2.0 | 1,012 | 96 | 27 | 162 | 398 | 210 | 188 | -1 | -1 |
module A (T,t) where
data T = T
t = T
instance Eq T where
t1 == t2 = False
| ghc-android/ghc | testsuite/tests/driver/recomp008/A2.hs | bsd-3-clause | 80 | 0 | 6 | 25 | 43 | 24 | 19 | 5 | 1 |
module Main.Statements where
import Prelude
import Contravariant.Extras
import Hasql.Statement
import qualified Hasql.Encoders as E
import qualified Hasql.Decoders as D
createAccountTable :: Statement () ()
createAccountTable =
Statement sql E.noParams D.noResult False
where
sql =
"create table account (id serial not null, balance numeric not null, primary key (id))"
dropAccountTable :: Statement () ()
dropAccountTable =
Statement
"drop table account"
E.noParams
D.noResult
False
createAccount :: Statement Scientific Int64
createAccount =
Statement
"insert into account (balance) values ($1) returning id"
((E.param . E.nonNullable) E.numeric)
(D.singleRow ((D.column . D.nonNullable) D.int8))
True
modifyBalance :: Statement (Int64, Scientific) Bool
modifyBalance =
Statement
"update account set balance = balance + $2 where id = $1"
(contrazip2 ((E.param . E.nonNullable) E.int8) ((E.param . E.nonNullable) E.numeric))
(fmap (> 0) D.rowsAffected)
True
getBalance :: Statement Int64 (Maybe Scientific)
getBalance =
Statement
"select balance from account where id = $1"
((E.param . E.nonNullable) E.int8)
(D.rowMaybe ((D.column . D.nonNullable) D.numeric))
True
| nikita-volkov/hasql-transaction | conflicts-test/Main/Statements.hs | mit | 1,261 | 0 | 12 | 244 | 342 | 183 | 159 | 39 | 1 |
{-# LANGUAGE OverloadedStrings, CPP #-}
{-|
Module : Examples
Description : Provides predefined networks.
Copyright : (c) Tessa Belder 2015.
Provides predefined networks.
-}
module Examples(
networkMap, networkMapFlattened, networkMapTyped,
module Examples.InvariantTestNetwork,
module Examples.Macros,
module Examples.MergeSwitchNetwork,
module Examples.MergeBlock,
module Examples.ReorderTestNetwork,
module Examples.SmallAutomatonNetwork,
module Examples.SmallFunctionNetwork,
module Examples.SmallMacroTest,
module Examples.SmallMatchNetwork,
module Examples.SmallSwitchNetwork,
module Examples.SmallTestNetwork,
module Examples.TwoAgents,
module Examples.AllPrimitives,
module Examples.RedBlue,
module Examples.TypesAndFunctions
#ifdef EIGHT_ROUTER
,module Examples.EightRouterModel
#endif
) where
import Examples.InvariantTestNetwork
import Examples.Macros
import Examples.MergeBlock
import Examples.MergeSwitchNetwork
import Examples.ReorderTestNetwork
import Examples.SmallAutomatonNetwork
import Examples.SmallFunctionNetwork
import Examples.SmallMacroTest
import Examples.SmallMatchNetwork
import Examples.SmallSwitchNetwork
import Examples.SmallTestNetwork
import Examples.TwoAgents
import Examples.AllPrimitives
import Examples.RedBlue
import Examples.TypesAndFunctions
#ifdef EIGHT_ROUTER
import Examples.EightRouterModel
#endif
import Data.HashMap as Hash
import Madl.Network
import Utils.Text
-- | Map containing the predefined networks
networkMap :: Hash.Map Text MadlNetwork
networkMap = Hash.fromList [
("itn", itn),
("itn2", itn2),
("msn", msn),
("rtn-22F", rtn 1 2 2 False),
("rtn-32F", rtn 1 3 2 False),
("rtn-23F", rtn 1 2 3 False),
("rtn-34F", rtn 1 3 4 False),
("rtn-45F", rtn 1 4 5 False),
("rtn-22T", rtn 1 2 2 True),
("rtn-32T", rtn 1 3 2 True),
("rtn-23T", rtn 1 2 3 True),
("rtn-34T", rtn 1 3 4 True),
("rtn-45T", rtn 1 4 5 True),
("rtn2-2F", rtn 2 2 0 False),
("rtn2-3F", rtn 2 3 0 False),
("rtn2-2T", rtn 2 2 0 True),
("rtn2-3T", rtn 2 3 0 True),
("rtn3-22F", rtn 3 2 2 False),
("rtn3-32F", rtn 3 3 2 False),
("rtn3-23F", rtn 3 2 3 False),
("rtn3-22T", rtn 3 2 2 True),
("rtn3-32T", rtn 3 3 2 True),
("rtn3-23T", rtn 3 2 3 True),
("san", san),
("sfn", sfn),
("smn", smn),
("smt", smt),
("smt2", smt2),
("smt3", smt3),
("sct", sct),
("ssn", ssn),
("stn", stn),
("allprim", allPrimitives False),
("allprimDL", allPrimitives True),
("twoagents10", two_agents 10 10 2),
("twoagents9", two_agents 9 10 2),
("twoagents8", two_agents 8 10 2),
("twoagents2", two_agents 2 2 2),
("mergeblock", mergeBlock),
("redblue" , redblue False),
("redblueDL" , redblue True)
#ifdef EIGHT_ROUTER
,("8router", eightRouter True True 2)
,("8routerDL", eightRouter True False 2)
#endif
]
-- | Map containing flattened versions of the predefined networks.
networkMapFlattened :: Hash.Map Text FlattenedNetwork
networkMapFlattened = fmap unflatten networkMap_flat where
networkMap_flat :: Hash.Map Text FlatFlattenedNetwork
networkMap_flat = fmap unfoldMacros networkMap
-- | Map containing colored versions of the predefined networks.
networkMapTyped :: Hash.Map Text ColoredNetwork
networkMapTyped = fmap channelTypes networkMapFlattened
| julienschmaltz/madl | examples/Examples.hs | mit | 3,414 | 0 | 8 | 639 | 930 | 549 | 381 | 85 | 1 |
module Main (main) where
import Data.Word
import System.Environment (getArgs, getProgName)
import System.Exit (ExitCode(..), exitFailure)
import System.IO (hPutStrLn, stderr)
import qualified System.Process as P (shell)
import System.Process.ByteString ()
import qualified System.Process.Common as P
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Test.QuickCheck as QC
import qualified Test.QuickCheck.Monadic as QCM
type Runner = Word8x16 -> Word8x16 -> Data -> IO (ExitCode, ByteString, ByteString)
newtype Word8x16 = Word8x16 [Word8]
deriving (Show, Eq)
instance QC.Arbitrary Word8x16 where
arbitrary = Word8x16 `fmap` QC.vector 16
newtype Data = Data [Word8]
deriving (Show, Eq)
instance QC.Arbitrary Data where
arbitrary = do
cnt <- QC.arbitrary
-- When not using Positive, cnt happens to be 0 quite often,
-- and testing empty input is not very valuable
Data `fmap` QC.vector (QC.getPositive cnt * 32)
prop_equal :: Runner
-> Runner
-> Word8x16
-> Word8x16
-> Data
-> QC.Property
prop_equal runner1 runner2 low high dat = QCM.monadicIO $ do
result1 <- QCM.run (runner1 low high dat)
result2 <- QCM.run (runner2 low high dat)
let (exitCode1, _, _) = result1
QCM.assert (exitCode1 == ExitSuccess)
QCM.assert (result1 == result2)
runner :: String -> Runner
runner cmd (Word8x16 low) (Word8x16 high) (Data dat) = do
let stdin = BS.concat [BS.pack low, BS.pack high, BS.pack dat]
len = show $ length dat
proc = P.shell $ unwords [cmd, len]
P.readCreateProcess proc stdin
main :: IO ()
main = do
args <- getArgs
(verbose, cmd1, cmd2) <- case args of
["-v", cmd1, cmd2] -> return (True, cmd1, cmd2)
[cmd1, cmd2] -> return (False, cmd1, cmd2)
_ -> usage >> exitFailure
let runner1 = runner cmd1
runner2 = runner cmd2
check = if verbose then QC.verboseCheck else QC.quickCheck
check $ prop_equal runner1 runner2
where
usage = do
prog <- getProgName
hPutStrLn stderr $ "Usage: " ++ prog ++ " [-v] cmd1 cmd2"
| NicolasT/reedsolomon | cbits/reedsolomon-gal-mul-stdio-quickcheck.hs | mit | 2,259 | 0 | 13 | 607 | 722 | 392 | 330 | 55 | 4 |
{-# LANGUAGE FlexibleContexts, LambdaCase, RankNTypes, TypeSynonymInstances, FlexibleInstances, ViewPatterns, OverloadedStrings, ScopedTypeVariables, ConstraintKinds, NamedFieldPuns, NoMonomorphismRestriction #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures -fno-warn-partial-type-signatures -fno-warn-orphans #-}
module Commands.Plugins.Spiros.Extra
( module Commands.Plugins.Spiros.Extra
, module X
, module Commands.Plugins.Spiros.Extra.Types
, module Commands.Plugins.Spiros.Rank
, module Commands.Extra
-- , module Prelude.Spiros
) where
import Commands.Extra hiding (insert) -- the module is a reexport
-- Workflow.insert is used by the `.Run` modules
import Commands.Plugins.Spiros.Types
import Commands.Plugins.Spiros.Extra.Types
import Commands.Plugins.Spiros.Rank
import Commands.Mixins.DNS13OSX9
import Commands.Backends.Workflow
import qualified System.FilePath.Posix as FilePath
import System.Clock (TimeSpec,toNanoSecs,diffTimeSpec)
import Language.Python.Common.Token
import Language.Python.Common.SrcLocation
import Language.Python.Common.ParseError
import qualified Data.Text.Lazy as T
import Data.Text.Lazy (Text)
-- import Control.Lens(imap)
import Data.Semigroup ((<>))
import Data.Default as X
import Data.Foldable
import qualified Data.List as List
import System.Exit(ExitCode(..))
import GHC.Exts (IsString)
import Text.Printf (printf)
import System.IO
import System.Process
import Data.Function as X
import Control.Monad (replicateM_)
--import Prelude.Spiros -- already reexported by commands.extra
import Prelude(toEnum)
type Desugaring a = a -> SpirosMonad_ -- TODO
-- ================================================================ --
whenJust :: (Monad m) => Maybe a -> (m () -> m ())
whenJust condition_ action_ = ifJust condition_ action_ nothing
ifJust :: (Monad m) => Maybe b -> m a -> m a -> m a
ifJust condition_ actionTrue_ actionFalse_ = maybe (actionFalse_) (const actionTrue_) condition_
-- ================================================================ --
-- ================================================================ --
-- debugging
-- import Commands.Frontends.Dragon13
-- import Commands.Plugins.Example.Spacing
-- import Control.Lens hiding (from, ( # ))
-- import Data.List.NonEmpty (NonEmpty (..))
-- import Data.List.NonEmpty (NonEmpty (..))
-- import qualified Data.Text.Lazy as T
-- import qualified Data.Text.Lazy.IO as T
-- import Control.Concurrent.Async
-- import System.Timeout (timeout)
-- -- it seems to be synchronous, even with threaded I guess?
-- attemptAsynchronously :: Int -> IO () -> IO ()
-- attemptAsynchronously seconds action = do
-- (timeout (seconds * round (1e6::Double)) action) `withAsync` (waitCatch >=> \case
-- Left error -> print error
-- Right Nothing -> putStrLn "..."
-- Right (Just _) -> return ()
-- )
-- attempt = attemptAsynchronously 1
-- -- pseudo HTML ordered list
-- ol xs = ifor_ xs $ \i x -> do
-- putStrLn ""
-- putStrLn $ fold [show i, ". ", x]
-- attemptParse :: (Show a) => (forall z. DNSEarleyRHS z a) -> String -> IO ()
-- attemptParse rule s = do
-- putStrLn ""
-- attempt $ parseThrow rule ((T.words . T.pack) s) >>= \case
-- x :| _ -> print x
-- attemptSerialize rhs = attemptAsynchronously 3 $ do
-- serialized <- formatRHS rhs
-- either print printSerializedGrammar serialized
-- printSerializedGrammar SerializedGrammar{..} = do
-- replicateM_ 3 $ putStrLn ""
-- T.putStrLn $ displayDoc serializedRules
-- putStrLn ""
-- T.putStrLn $ displayDoc serializedLists
-- main = do
-- putStrLn ""
-- let rootG = root
-- attemptSerialize rootG
-- attemptMunge "par round grave camel lit with async break break action"
-- ================================================================ --
{- | returns the digits that make up a number in some base, most-significant digit first.
>>> toDigits 10 9
[9]
>>> toDigits 10 1234
[1,2,3,4]
assumes nonnegative input, but still is total.
prop> \(Positive k) n -> ('fromDigits' k . toDigits k) n == n
-}
toDigits :: (Integral a) => a -> a -> [a]
toDigits base = reverse . List.unfoldr go
where
go n = if n `lessThan` 1 then Nothing else Just ((n `mod` base), (n `div` base))
{- | returns the digits that make up a number in some base, most-significant digit first.
>>> fromDigits 10 [1,2,3,4]
1234
>>> fromDigits 10 [9]
9
>>> fromDigits 10 []
0
-}
fromDigits :: forall a. (Integral a) => a -> [a] -> a
fromDigits base = foldr (+) 0 . zipWith (\i n -> (base^i) * n) [(0::a)..] . reverse
-- TODO remove fromDecimalDigits = fromDigits 10
-- average xs = realToFrac (sum xs) / genericLength xs
safeAverage :: Foldable t => t Int -> Int
safeAverage (toList -> []) = 0
safeAverage xs = sum xs `div` length xs
pause = do
delay 30
-- slot :: MonadWorkflow m => String -> m ()
slot s = do
delay 10
sendText s
press "<ret>"
isDefaultBrowser :: MonadWorkflow m => m (Maybe String)
isDefaultBrowser = currentApplication >>= \case
x@"Google Chrome" -> return$ Just x
_ -> return$ Nothing
isBrowser x = if FilePath.takeBaseName x `elem` ["Firefox", "Chrome"]
then Just x
else Nothing
defaultDelay = 100 :: Int
chromeDelay = 250 :: Int -- milliseconds
-- in chrome, keypresses are lost when the delay isnt long enough
browserDelay = chromeDelay
-- runRepeat :: (MonadWorkflow m) => Int -> Number -> (m () -> m ())
runRepeat delay_ times_
= traverse_ id
. List.intersperse (delay delay_)
. replicate times_
--TODO action grouping: insert nullop between each, for logging
-- used when sendText is too slow/laggy
-- insertByClipboard :: String -> AMonadAction_
-- insertByClipboard :: MonadWorkflow m => String -> m ()
insertByClipboard s = do
setClipboard s
presspaste
-- presspaste :: MonadWorkflow m => m ()
presspaste = press "M-v"
-- runs the action, then restores the previous clipboard contents. dictation still pollutes clipboard history, but the most recent "manual" clipboard contents should be preserved.
-- benign race condition, as no lock is kept on the system clipboard
restoringClipboard :: Workflow a -> Workflow a
restoringClipboard m = do
contents <- getClipboard
x <- m
delay 100 -- otherwise, e.g. the old clipboard contents are reset before the temporary clipboard contents are paste TODO call Haskell from Objective-C on callback?
setClipboard contents
return x
diffTimeSpecAsMilliseconds :: TimeSpec -> TimeSpec -> Integer
diffTimeSpecAsMilliseconds x y = (toNanoSecs (diffTimeSpec x y)) `div` (1000*1000)
bool2exitcode :: Bool -> ExitCode
bool2exitcode False = ExitFailure 1
bool2exitcode True = ExitSuccess
padNumber :: Integral a => Int -> a -> String
padNumber padding n = printf ("%0." ++ show padding ++ "d") (toInteger n)
leftAppendLineNumbers :: Text -> (Int,Int,Text)
leftAppendLineNumbers code = (marginWidth, countWidth, (T.unlines . _imap go) allLines)
where
go ((+1) -> lineNumber) oneLine = getLeftMargin lineNumber <> oneLine
marginWidth = (fromInteger . toInteger . T.length) (getLeftMargin (0::Integer)) -- assumes the length is constant
getLeftMargin lineNumber = "[" <> T.pack (padNumber countWidth lineNumber) <> "]"
countWidth = length (show lineCount)
lineCount = length allLines
allLines = T.lines code
_imap :: (Integer -> a -> b) -> [a] -> [b]
_imap f xs = zipWith f [1..] xs
getPythonErrorSpan :: ParseError -> (Int,Int)
getPythonErrorSpan = maybe (1,1) id . go -- TODO default error span
where
go = \case
UnexpectedToken (token_span -> theSpan) -> fromSourceSpan theSpan
UnexpectedChar _ location -> fromSourceLocation location
_ -> Nothing
fromSourceSpan = \case
SpanCoLinear{ span_row, span_start_column } -> Just (span_row, span_start_column)
SpanMultiLine{ span_start_row, span_start_column } -> Just (span_start_row, span_start_column)
SpanPoint{ span_row, span_column } -> Just (span_row, span_column)
_ -> Nothing
fromSourceLocation = \case
Sloc{ sloc_row, sloc_column } -> Just (sloc_row, sloc_column)
_ -> Nothing
showWords :: [Text] -> String
showWords = T.unpack . T.intercalate (T.pack " ")
printMessage :: [String] -> IO ()
printMessage = putStrLn . List.intercalate "\n"
prompt :: String -> IO String
prompt s = do
putStr s
hFlush stdout
getLine
type Vocab a = [(String, a)]
vocabWith :: (IsString t, Show t, Functor'RHS n t f) => (a->b) -> Vocab a -> RHS n t f b
vocabWith f = vocab . fmap (fmap f)
{-| inputs milliseconds, outputs microseconds (which can be given to threadDelay).
>>> milliseconds 10
10000
-}
milliseconds :: Int -> Int
milliseconds = (*1000)
{-| inputs seconds, outputs microseconds (which can be given to threadDelay).
>>> seconds 1
1000000
-}
seconds :: Int -> Int
seconds = (*1000000)
{-|
-}
readCommand aCommand someArguments = do
(exitCode, standardInput, standardError) <- readProcessWithExitCode aCommand someArguments ""
return (exitCode, lines standardInput, lines standardError)
readSpirosContext :: String -> SpirosContext
readSpirosContext = \case
(isEmacsApp -> Just{}) -> EmacsContext
"Google Chrome" -> ChromeContext
"IntelliJ" -> IntelliJContext
_ -> GlobalContext
isEmacsApp :: FilePath -> Maybe FilePath
isEmacsApp fp = if FilePath.takeBaseName fp `elem` ["Emacs","Work","Notes","Diary","Obs","Commands"]
then Just fp
else Nothing
filterMempty :: (Monoid a, Eq a) => [a] -> [a]
filterMempty = filter (/= mempty)
{-| 'intersperse's a 'delay' between each action.
e.g.
@
withDelay 30 -- milliseconds
[ press "H-<tab>"
, insert =<< getClipboard
, press "H-<tab>"
]
@
-}
withDelay :: (MonadWorkflow m) => Int -> [m ()] -> m ()
withDelay t = sequence_ . List.intersperse (delay t)
-- | The constructors of a (zero-based) Enum.
--
-- >>> constructors :: [Bool]
-- [False,True]
--
-- (Bounded Constraint elided for convenience; doesn't terminate on un@Bounded@ @Enum@erations)
--
constructors :: (Enum a) => [a]
constructors = enumFrom (toEnum 0)
-- ================================================================ --
-- comapt
--TODO -- | my runWorkflow
-- runWorkflow' :: WorkflowT IO a -> IO a
-- runWorkflow' = runWorkflowT def
{- old
delaying
-}
replicateDelayingA :: (Applicative m) => Natural -> m a -> m ()
replicateDelayingA k m = replicateM_ (k&integral) m
integral :: (Integral i, Num j) => i -> j
integral = toInteger > fromInteger
| sboosali/commands | commands-spiros/config/Commands/Plugins/Spiros/Extra.hs | mit | 10,480 | 0 | 12 | 1,877 | 2,156 | 1,206 | 950 | 147 | 7 |
module Main where
import Control.Monad.Trans
import System.Directory
import System.FilePath
import System.Environment
import System.Exit
import Text.PrettyPrint.HughesPJ
import ConstantCode
import CSPMDataStructures
import CSPMParser
import CSPMPrettyPrinter
import CSPMRecursionRefactorings
import CSPMTypeChecker.TCModule
import CSPMTypeChecker.TCMonad
import OpSemRules
import OpSemParser
import OpSemTypeChecker
import Util
main :: IO ()
main =
do
args <- getArgs
res <- runTyger (tygerMain (args!!0) (args!!1))
case res of
Left err -> putStrLn (show err) >> exitFailure
Right _ -> exitSuccess
interactiveMain :: FilePath -> FilePath -> IO ()
interactiveMain opSemFile cspmFile =
do
res <- runTyger (tygerMain opSemFile cspmFile)
case res of
Left err -> putStrLn (show err)
Right _ -> putStrLn ("Done")
tygerMain :: FilePath -> FilePath -> Tyger ()
tygerMain opsemFile cspmFile =
do
inputOpDefn <- parseOpSemFile opsemFile
opSemDefn <- typeCheckOperators inputOpDefn
let compiledOps = compileOperators opSemDefn
cspmModules <- parseCSPMFile cspmFile opSemDefn
if length cspmModules > 1 then
panic "Modules are not currently supported"
else return ()
transformedModules <- runTypeChecker (do
typeCheckedModules <- typeCheckModules opSemDefn cspmModules
runTransformMonad $ transformModules opSemDefn typeCheckedModules)
let operatorsFile =
"module Operator_M"
++ indentEveryLine operatorModuleNotExported
++ (indentEveryLine . show) (rulesFunctionToCSP compiledOps)
++ ((indentEveryLine . show)
(discardableArgsFunctionToCSP compiledOps))
++ "exports\n"
++ (indentEveryLine . show) (channelsToCSP opSemDefn)
++ indentEveryLine operatorModuleExported
++ "endmodule\n"
++ makeHeading "User Callable Functions"
++ globalModule
++ makeHeading "Operators"
-- TODO: maybe move these into the exports of the module
++ show (operatorDatatypeToCSP compiledOps)++"\n"
++ show (operatorShortcutsToCSP compiledOps)++"\n"
++ show (replicatedOperatorsToCSP opSemDefn)
let outputOpSemFile = replaceExtension opsemFile ".csp"
let outputCSPMFile =
replaceFileName cspmFile
(dropExtension (takeFileName cspmFile)++"_Compiled.csp")
let [Annotated _ _ (GlobalModule decls)] = cspmModules
let channels = concat [n | Channel n _ <- map removeAnnotation decls]
absoluteCSPMFilePath <- liftIO $ canonicalizePath outputCSPMFile
absoluteOpSemFilePath <- liftIO $ canonicalizePath outputOpSemFile
let cspmFile =
"include \""++makeRelative (takeDirectory absoluteCSPMFilePath)
absoluteOpSemFilePath
++"\"\n\n"
++"UserEvents = {|"
++ (show . sep . punctuate comma . map prettyPrint) channels
++"|}\n\n"
++show (prettyPrint (head transformedModules))
liftIO $ writeFile outputOpSemFile (fixQuoting operatorsFile)
liftIO $ writeFile outputCSPMFile cspmFile
return ()
fixQuoting [] = []
fixQuoting ('\\':'|':'\\':']':xs) = '|' : ']' : fixQuoting xs
fixQuoting (x:xs) = x : fixQuoting xs
| tomgr/tyger | src/Main.hs | mit | 3,109 | 75 | 22 | 586 | 866 | 433 | 433 | 84 | 2 |
exs :: [([Bool], Bool)]
exs = [ ([True, True, True], True),
([False, False, False], False),
([True, False], True),
([False, True], True)
]
| santolucito/ives | tests/benchmarks/bool_or.hs | mit | 169 | 0 | 9 | 52 | 95 | 59 | 36 | 5 | 1 |
module Statistics where
import Data.List
import Control.Monad
import System.Directory
import Submissions
import Users
import Assignments
data Bucket = Bucket
{ rangeMin :: Double
, rangeMax :: Double
, count :: Int
} deriving Show
data Statistics = Statistics
{ minPossible :: Double
, maxPossible :: Double
, mean :: Double
, median :: Double
, minAchieved :: Double
, maxAchieved :: Double
, histogram :: [Bucket]
} deriving Show
data Score = Score
{ points :: Double
, pass :: Bool
} deriving (Eq,Show,Read)
instance Ord Score where
x `compare` y | pass x == pass y = points x `compare` points y
| otherwise = pass x `compare` pass y
data UserScore = UserScore
{ identifier :: UserIdentifier
, score :: Score
} deriving (Eq,Show,Read)
instance Ord UserScore where
x `compare` y = score y `compare` score x
atypes = [Homework, Exam, Project]
--test data
s1 = Score 4.5 False
s2 = Score 4.4 True
assign = Assignment 2015 Homework 5
-- for testing purposes
writeScore :: Assignment -> UserIdentifier -> Score -> IO ()
writeScore a uId score = do
let path = getAssignmentPath a ++ uId
createDirectoryIfMissing True path
writeFile (path ++ "/review") $ show score
readScore :: String -> IO Score
readScore path = do
exists <- doesFileExist path
if exists
then do
score <- (readFile path)
return (read score :: Score)
else
return $ Score 0 False
getScoreFromDir :: String -> String -> IO Score
getScoreFromDir uId dir = do
let path = dir ++ "/" ++ uId ++ "/review"
readScore path
processScores :: [Score] -> Score
processScores scores = Score (sum $ map (points) scores) (all pass scores)
typeScore :: Integer -> AType -> UserIdentifier -> IO Score
typeScore year atype uId = do
let rootPath = assignmentHome ++ show year ++ "/" ++ show atype
dirs <- listDirectory rootPath
let fullPathDirs = map (\x -> rootPath ++ "/" ++ x) dirs
scores <- mapM (getScoreFromDir uId) fullPathDirs
return $ processScores scores
assignmentScore :: Assignment -> UserIdentifier -> IO Score
assignmentScore a uId = do
let path = getAssignmentPath a ++ uId ++ "/review"
readScore path
yearScore :: Integer -> UserIdentifier -> IO Score
yearScore year uId = do
atypeScores <- mapM ((flip . typeScore) year uId) atypes
return $ processScores atypeScores
parseFullPath :: (String, [String]) -> [String]
parseFullPath pair = map (\x -> (fst pair) ++ "/" ++ x ++ "/") (snd pair)
typeListUsers :: Integer -> AType -> IO [UserIdentifier]
typeListUsers year atype = do
let rootFolder = assignmentHome ++ show year ++ "/" ++ show atype ++ "/"
problemFolders <- listDirectory rootFolder
let fullProblemFolders = map (\x -> rootFolder ++ x ++ "/") problemFolders
users <- mapM (listDirectory) fullProblemFolders
return $ nub $ concat users
yearListUsers :: Integer -> IO [UserIdentifier]
yearListUsers year = do
let rootFolder = assignmentHome ++ show year ++ "/"
users <- mapM (typeListUsers year) atypes
return $ nub $ concat users
ranked :: Integer -> IO [UserScore]
ranked year = do
users <- yearListUsers year
scores <- mapM (yearScore year) users
let userScores = map (\x -> uncurry UserScore x) $ zip users scores
return $ sort userScores
typeRanked :: Integer -> AType -> IO [UserScore]
typeRanked year atype = do
users <- typeListUsers year atype
scores <- mapM (yearScore year) users
let userScores = map (\x -> uncurry UserScore x) $ zip users scores
return $ sort userScores
assignmentRanked :: Assignment -> IO [UserScore]
assignmentRanked a = do
let assignmentPath = getAssignmentPath a
users <- listDirectory assignmentPath
scores <- mapM (`getScoreFromDir` assignmentPath) users
let userScores = map (\x -> uncurry UserScore x) $ zip users scores
return $ sort userScores | cromulen/puh-project | src/Statistics.hs | mit | 4,251 | 0 | 14 | 1,203 | 1,412 | 704 | 708 | 105 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- | Mobile application authentication flow example
--
-- Please remember to substitute __YOUR_API_KEY__,
-- __YOUR_SECRET__, __USERNAME__ and __PASSWORD__
-- for real values
import Control.Lens -- lens
import Data.Aeson.Lens -- lens-aeson
import qualified Data.Text as Text -- text
import qualified Data.Text.IO as Text -- text
import Lastfm -- liblastfm
import Lastfm.Authentication -- liblastfm
main :: IO ()
main = withConnection $ \conn -> do
r <- lastfm conn . sign s $ getMobileSession <*> username u <*> password p <*> apiKey ak <* json
let maybeSk = r ^? folded.key "session".key "key"._String
Text.putStrLn $ case maybeSk of
Just sk -> "Mobile session key: " `Text.append` sk
Nothing -> "Mobile session key wasn't retrieved, something goes wrong"
where
ak = "__YOUR_API_KEY__"
s = "__YOUR_SECRET__"
u = "__USERNAME__"
p = "__PASSWORD__"
| supki/liblastfm | example/mobile-authentication.hs | mit | 984 | 5 | 16 | 235 | 217 | 112 | 105 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.