code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
module Day4 where
import ClassyPrelude
import Control.Monad.Trans.Either
import Data.Attoparsec.Text hiding (take)
import Data.Char
import Debug.Trace
input :: IO Text
input = readFile "data/day4.txt"
test = "aaaaa-bbb-z-y-x-123[abxyz]\n\
\a-b-c-d-e-f-g-h-987[abcde]\n\
\not-a-real-room-404[oarel]\n\
\totally-real-room-200[decoy]"
name :: Parser String
name = intercalate "-" <$> (many1 letter `sepBy` char '-')
nameNoDash :: Parser String
nameNoDash = concat <$> (many1 letter `sepBy` char '-')
sector :: Parser Int
sector = decimal
checksum :: Parser String
checksum = char '[' *> many letter <* char ']'
data Entry =
Entry {_name :: String
,_room :: Int
,_check :: String}
deriving ((Show))
entry :: Parser Entry
entry = Entry <$> (name <* char '-') <*> sector <*> checksum
t1 = "aaaaa-bbb-z-y-x-123[abxyz]"
t4 = "totally-real-room-200[decoy]"
p1 = parseOnly entry t1
p4 = parseOnly entry t4
isValid :: Entry -> Bool
isValid e =
let gs = group $ sort (filter (/= '-') (_name e))
cs = concatMap toCounts gs
cs' = take 5 $ sortBy (comparing (\(i,c) -> (-i, c))) cs
check = map snd cs'
in _check e == check
where toCounts xs@(x:_) = [(length xs, x)]
toCounts _ = []
result1 =
runEitherT $
do es <- EitherT (parseOnly (entry `sepBy1` endOfLine) <$> input)
-- es <- EitherT (parseOnly (entry `sepBy1` endOfLine) <$> (pure test :: IO Text))
pure (sum . map _room $ filter isValid es)
shiftLetter :: Int -> Char -> Char
shiftLetter _ '-' = '-'
shiftLetter i c =
let a = ord 'a' :: Int
c' = ord c :: Int
in chr (((c' - a) + i) `mod` 26 + a)
result2 =
runEitherT $
do es <- EitherT (parseOnly (entry `sepBy1` endOfLine) <$> input)
-- es <- EitherT (parseOnly (entry `sepBy1` endOfLine) <$> (pure test :: IO Text))
-- pure $ (map decode . filter isValid) <$> ees
pure
(filter (\e -> decode e == "northpole-object-storage") $
filter isValid es)
where decode e =
map (shiftLetter (_room e))
(_name e)
| farrellm/advent-2016 | src/Day4.hs | mit | 2,122 | 0 | 16 | 498 | 700 | 372 | 328 | 57 | 2 |
{-# OPTIONS #-}
-- ------------------------------------------------------------
module Holumbus.Crawler
( module Holumbus.Crawler.Constants
, module Holumbus.Crawler.Core
, module Holumbus.Crawler.Html
, module Holumbus.Crawler.Logger
, module Holumbus.Crawler.Robots
, module Holumbus.Crawler.RobotTypes
, module Holumbus.Crawler.Types
, module Holumbus.Crawler.URIs
, module Holumbus.Crawler.Util
, module Holumbus.Crawler.XmlArrows
)
where
import Holumbus.Crawler.Constants
import Holumbus.Crawler.Core
import Holumbus.Crawler.Html
import Holumbus.Crawler.Logger
import Holumbus.Crawler.Robots
import Holumbus.Crawler.RobotTypes
import Holumbus.Crawler.Types
import Holumbus.Crawler.URIs
import Holumbus.Crawler.Util
import Holumbus.Crawler.XmlArrows
-- ------------------------------------------------------------
| ichistmeinname/holumbus | src/Holumbus/Crawler.hs | mit | 966 | 0 | 5 | 209 | 141 | 98 | 43 | 22 | 0 |
{-# LANGUAGE Haskell2010
, GeneralizedNewtypeDeriving
, DeriveDataTypeable
#-}
{-# OPTIONS
-Wall
-fno-warn-name-shadowing
#-}
-- | INTERNAL module is used to infer Haskell types from Java types.
module Foreign.Java.Bindings.Typomatic (
runTypomatic,
typomatic,
ArgInfo (..),
dataTName,
dataCName,
newtTName,
newtCName,
tyclTName
) where
import Foreign.Java.Util
-- import Foreign.Java.Utils
import Foreign.Java.Bindings.JavaTypes
import Control.Monad.State hiding (void)
import qualified Control.Monad.State as State
import Data.Functor.Identity
import Data.Generics
import qualified Data.Set as Set
import Data.Set (Set)
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.List as List
dataTName, dataCName, newtTName, newtCName, tyclTName :: String -> String
dataTName = (++ "''")
dataCName = id -- (++ "")
newtTName = (++ "'")
newtCName = (++ "'")
tyclTName = id -- (++ "")
-- | This is the information which is ultimately
-- gathered by the use of this module.
data ArgInfo = ArgInfo {
fSignature :: String,
fArguments :: [TVar],
fReturnType :: TVar,
fArgNames :: [String],
fJavaSignature :: String,
fJavaReturnType :: String,
fJniSignature :: String
}
--------------------
-- Type variables --
--------------------
data TVar = TVar String | TVars [TVar]
deriving (Eq, Ord, Show, Data, Typeable)
printTVar :: TVar -> String
printTVar var = case var of
(TVar v) -> v
(TVars vs) -> concat ["(", tail (concatMap ((' ':) . printTVar) vs), ")"]
---------------
-- Utilities --
---------------
printJniType :: JavaType -> String
printJniType t = case t of
JBoolean -> "JNI.boolean"
JChar -> "JNI.char"
JByte -> "JNI.byte"
JShort -> "JNI.short"
JInt -> "JNI.int"
JLong -> "JNI.long"
JFloat -> "JNI.float"
JDouble -> "JNI.double"
JObj n -> "JNI.object \"" ++ n ++ "\""
-- arrays are treated as objects:
JArr c -> "JNIS.object' \"[" ++ printJniRawType c ++ "\""
printJniRawType :: JavaType -> String
printJniRawType t = case t of
JBoolean -> "Z"
JChar -> "C"
JByte -> "B"
JShort -> "S"
JInt -> "I"
JLong -> "J"
JFloat -> "F"
JDouble -> "D"
JObj n -> 'L' : tr '.' '/' n ++ ";"
JArr c -> '[' : printJniRawType c
printJniSignature :: JavaMethod -> String
printJniSignature method = show name ++ " JNI.::= " ++ args ++ ret
where
name = methodName method
args = concatMap ((++ " --> ") . printJniType) (methodParams method)
ret = maybe "JNI.void" printJniType (methodReturnType method)
typomatic :: JavaClass -> JavaMethod -> Typomatic ArgInfo
typomatic clazz method_ = do
let className = classFullName clazz
classParams = classTypeParams clazz
-- sanitize tyVars tied to the method by distinguishing
-- from tyVars tied to the class by adding an apostrophe
-- to the name of the tyVar if it is tied to the method.
methodTypeVars = map paramName (methodTypeParams method_)
safe var@(TyVar str)
| var `elem` methodTypeVars = TyVar (str ++ "'")
| otherwise = var
method = everywhere (mkT safe) method_
-- the following three functions create the list of
-- argument parameters, including @this@ (if the method
-- is not static) and the return type. The return type
-- is separated later on again.
thisParam = if null classParams
then NotSoGeneric className
else Parameterized {
jgtBasetype = className,
jgtParameters = (map (TypeVarReference . paramName) classParams)
}
params = (if methodStatic method then [] else [thisParam])
++ methodGenericParams method
++ [maybe (TypeVarReference (TyVar "()"))
(const $ methodGenericReturnType method)
(methodReturnType method)]
jtypes = (if methodStatic method then [] else [JObj className])
++ methodParams method
-- turns a JavaGenericType definition into type variables.
-- The names are taken from the monad via 'newVar'.
tvar param = case jgtType param of
WildcardT -> do
name <- newVar
return $ TVar name
ParameterizedT -> do
name <- newVar
params <- mapM tvar (jgtParameters param)
return $ TVars $ TVar name : params
GenericArrayT -> do
name <- newVar
return $ TVar name
TypeVarReferenceT -> do
let name = (tyVarName (jgtName param))
return $ TVar name
NotSoGenericT -> do
name <- newVar
return $ TVar name
-- creates a haskell signature (-> String)
signature typeVars returnVar = do
-- retrieve the context and turn each variable into a String.
contexts <- getContext >>= mapM (\(tvar, context) -> do
return $ context ++ " " ++ printTVar tvar)
let argTypes = concatMap ((++ " -> ") . printTVar) typeVars
-- the final type is wrapped in the Java monad
returnType = "JNI.Java " ++ printTVar returnVar
-- finally assemble the conetext.
context = if null contexts then "" else
"(" ++ concat (List.intersperse ", " contexts) ++ ") => "
-- return the full signature, consisting of the context,
-- the type of the arguments, and the return type.
return $ concat [context, argTypes, returnType]
-- create a java signature (-> String)
javaSignature = do
let name = methodName method
args = map printJavaType $ methodParams method
return $ name ++ "(" ++ concat (List.intersperse ", " args) ++ ")"
-- if this is not a static method the first argument
-- is /this/. This merely pushed the name into the list
-- of type variable names in the monad.
when (not $ methodStatic method) (pushVar "this")
-- get type variables for all arguments, including the
-- return type (as the return type may be the same as
-- one of the argument types).
--
-- Split the result into arguments and return var again,
-- since the return variable will get special treatment
-- henceforth.
(typeVars, returnVar_) <- breakLast <$> mapM tvar params
-- augment the return type variable, i.e. if it is not
-- a type variable at all, replace the variable name by
-- a constant reference to a specific type.
--
-- This is only the case with parameterized type variables
-- and not-so-generic ones.
returnVar <- case methodReturnType method of
Just (JObj typeName) -> case jgtType (methodGenericReturnType method) of
ParameterizedT -> do
clazz <- getClass typeName
let (TVars (TVar _ : ts)) = returnVar_
return $ TVars (TVar (newtTName (classModName clazz)) : ts)
NotSoGenericT -> do
clazz <- getClass typeName
return $ TVar (newtTName (classModName clazz))
_ -> return returnVar_
_ -> return returnVar_
-- Add contexts for all arguments and augment array types
let makeContext typeVar jtype = case jtype of
JObj name -> do
clazz <- getClass name
addContext typeVar $ tyclTName $ classModName clazz
return typeVar
JArr componentType -> do
addContext typeVar "JNIS.Array"
return typeVar
JBoolean -> do
addContext typeVar "JNIS.JBoolean"
return typeVar
JChar -> do
addContext typeVar "JNIS.JChar"
return typeVar
JByte -> do
addContext typeVar "JNIS.JByte"
return typeVar
JShort -> do
addContext typeVar "JNIS.JShort"
return typeVar
JInt -> do
addContext typeVar "JNIS.JInt"
return typeVar
JLong -> do
addContext typeVar "JNIS.JLong"
return typeVar
JFloat -> do
addContext typeVar "JNIS.JFloat"
return typeVar
JDouble -> do
addContext typeVar "JNIS.JDouble"
return typeVar
-- Here makeContexts is applied (see above). In the same pass a new
-- set of typeVars (typeVars') is generated, since makeContexts
-- migth further investigate array and create type variables for
-- their component types.
typeVars' <- mapM (uncurry makeContext) (zip typeVars jtypes)
-- Create contexts for the ultimate return type.
returnVar' <- case methodReturnType method of
Nothing -> do
let tvar = TVar "void"
addContext tvar "JNIS.VoidResult"
return tvar
Just t -> case t of
JObj _ -> do
let tvar = TVars [TVar "object", returnVar]
addContext tvar "JNIS.ObjectResult"
return tvar
JArr _ -> do
let tvar = TVar "array"
addContext tvar "JNIS.ArrayResult"
return tvar
JBoolean -> do
let tvar = TVar "boolean"
addContext tvar "JNIS.BooleanResult"
return tvar
JChar -> do
let tvar = TVar "char"
addContext tvar "JNIS.CharResult"
return tvar
JByte -> do
let tvar = TVar "byte"
addContext tvar "JNIS.ByteResult"
return tvar
JShort -> do
let tvar = TVar "short"
addContext tvar "JNIS.ShortResult"
return tvar
JInt -> do
let tvar = TVar "int"
addContext tvar "JNIS.IntResult"
return tvar
JLong -> do
let tvar = TVar "long"
addContext tvar "JNIS.LongResult"
return tvar
JFloat -> do
let tvar = TVar "float"
addContext tvar "JNIS.FloatResult"
return tvar
JDouble -> do
let tvar = TVar "double"
addContext tvar "JNIS.DoubleResult"
return tvar
-- generate the Haskell signature (a String)
sig <- signature typeVars' returnVar'
-- generate the Java signature (a String).
-- This is used for documentation purposed later on
-- (i.e. inserted as haddock docstring).
jsig <- javaSignature
-- generate the names of the arguments
let argNames = (if methodStatic method then id else ("this":) . init)
$ zipWith (\_ i -> 'a' : show i) typeVars [(1 :: Integer)..]
-- assemble and return all the calculated information
return $ ArgInfo {
fArguments = typeVars,
fReturnType = returnVar',
fArgNames = argNames,
fSignature = sig,
fJavaSignature = jsig,
fJavaReturnType = maybe "void" printJavaType (methodReturnType method),
fJniSignature = printJniSignature method
}
--------------------------------------------
-- The following are utilities for the monad
--------------------------------------------
-- | The state of the monad.
data TypomaticState = TypomaticState {
tVars :: [String],
tContext :: Set (TVar, String),
tParams :: [String],
tClasses :: Map String JavaClass
}
-- | Retrieve the definition of a class.
--
-- The monad has an internal store of class names
-- and their definitions. See 'tClasses'.
getClass :: String -> Typomatic JavaClass
getClass name = do
state <- State.get
return ((tClasses state) Map.! name)
-- | Get the current context as a list.
getContext :: Typomatic [(TVar, String)]
getContext = State.get >>= return . Set.toList . tContext
-- | Add a context for a specific type variable.
addContext :: TVar -> String -> Typomatic ()
addContext tvar string = do
state <- State.get
State.put (state {tContext = ((tvar, string) `Set.insert` tContext state)})
-- | Introduce a new name.
--
-- This simply takes the next element in the infinite
-- 'tVars' list and stores the tail back in the monad.
newVar :: Typomatic String
newVar = do
state <- State.get
let (v:vs) = tVars state
State.put (state { tVars = vs})
return v
-- | Push a new name in the front of the available names.
pushVar :: String -> Typomatic ()
pushVar name = do
state <- State.get
State.put (state {tVars = (name : tVars state)})
-- | The monad.
newtype Typomatic a = Typomatic { _runTypomatic :: StateT TypomaticState Identity a }
deriving (Monad, MonadState TypomaticState, Functor, Applicative)
-- | Run a computation in the monad.
runTypomatic :: Map String JavaClass -> Typomatic a -> a
runTypomatic classes =
let state = TypomaticState { -- initial state
tVars = map (('v':) . show) [(1 :: Integer)..],
tContext = Set.empty,
tParams = [],
tClasses = classes
} in fst . runIdentity . flip runStateT state . _runTypomatic
| fehu/haskell-java-bridge-fork | src/Foreign/Java/Bindings/Typomatic.hs | mit | 13,693 | 0 | 24 | 4,608 | 2,933 | 1,499 | 1,434 | 262 | 32 |
module Proteome.Test.ResolveTest where
import qualified Data.Map as Map (fromList)
import Hedgehog ((/==), (===))
import Path (Abs, Dir, absdir, parseAbsDir)
import Ribosome.File (canonicalPaths)
import Ribosome.Test.Run (UnitTest, unitTest)
import Ribosome.Test.Unit (fixture)
import Test.Tasty (TestTree, testGroup)
import Proteome.Config (defaultTypeMarkers)
import Proteome.Data.Project (Project (Project))
import Proteome.Data.ProjectConfig (ProjectConfig (ProjectConfig))
import Proteome.Data.ProjectMetadata (ProjectMetadata (DirProject))
import Proteome.Data.ProjectRoot (ProjectRoot (ProjectRoot))
import Proteome.Data.ProjectType (ProjectType)
import Proteome.Project.Resolve (resolveProject)
import Proteome.Test.Project (fn, l, la, li, ta, ti, tp)
import Proteome.Test.Unit (ProteomeTest, testDef)
paths :: [FilePath]
paths = [
"~/../test/dir",
"~"
]
test_canonicalPaths :: UnitTest
test_canonicalPaths = do
canon <- canonicalPaths paths
canon /== paths
root :: ProjectRoot
root = ProjectRoot [absdir|/projects/haskell/flagellum|]
typeMap :: Map ProjectType [ProjectType]
typeMap = Map.fromList [(tp, [ti, ta])]
config :: ProjectConfig
config =
ProjectConfig [[absdir|/projects|]] def def typeMap def (Map.fromList [(tp, l)]) (Map.fromList [(l, [li, la])])
targetProject :: Project
targetProject =
Project (DirProject fn root (Just tp)) [ti, ta] (Just l) [li, la]
typeMapTest :: ProteomeTest ()
typeMapTest = do
project <- resolveProject [] config (Just root) fn (Just tp)
targetProject === project
test_typeMap :: UnitTest
test_typeMap =
testDef typeMapTest
markerConfig :: ProjectConfig
markerConfig = ProjectConfig def def def def defaultTypeMarkers def def
markerTarget :: ProjectRoot -> Project
markerTarget root' =
Project (DirProject fn root' (Just tp)) [] (Just l) []
markerTest :: ProteomeTest ()
markerTest = do
dir <- parseAbsDir =<< fixture "projects/haskell/flagellum"
let root' = ProjectRoot dir
project <- resolveProject [] markerConfig (Just root') fn Nothing
markerTarget root' === project
test_marker :: UnitTest
test_marker =
testDef markerTest
test_resolve :: TestTree
test_resolve =
testGroup "project resolution" [
unitTest "canonicalize paths" test_canonicalPaths,
unitTest "project type map" test_typeMap,
unitTest "root marker" test_marker
]
| tek/proteome | packages/test/test/Proteome/Test/ResolveTest.hs | mit | 2,342 | 0 | 10 | 328 | 734 | 418 | 316 | -1 | -1 |
-- | This file provides and helps extract version information.
--
-- Author: Thorsten Rangwich. See file <../LICENSE> for details.
module Version.Information
(
-- * Compile time and forever constants
module Version.Constants,
module Version.StaticVersion,
-- * Version information
versionString,
formatString,
grillVersion,
formatVersion,
-- * Validate version
checkGrill,
checkFormat,
-- * Parser to parse version from a string - only used by parsers
parseMagicBytes,
parseFormat,
parseVersion,
parseChecksum,
parseEndOfHeader
)
where
import qualified Control.Monad as Monad
import qualified Text.ParserCombinators.Parsec as Parsec
import Text.Printf as Printf
import Version.Constants -- needs to be imported unqualified or re-export will not work
import Version.StaticVersion -- same here
import Version.Types (SmallVersion)
-- | Version coded as string
versionString :: String
versionString = Printf.printf "%02d.%02d.%02d"
(fromIntegral Version.StaticVersion.grillMajor :: Int)
(fromIntegral Version.StaticVersion.grillMinor :: Int)
(fromIntegral Version.StaticVersion.grillMicro :: Int)
-- | Grill's calculation version.
grillVersion :: (SmallVersion, SmallVersion, SmallVersion)
grillVersion = (Version.StaticVersion.grillMajor, Version.StaticVersion.grillMinor, Version.StaticVersion.grillMicro)
-- | Sheet format version.
formatVersion :: (SmallVersion, SmallVersion, SmallVersion)
formatVersion = (Version.StaticVersion.formatMajor, Version.StaticVersion.formatMinor, Version.StaticVersion.formatMicro)
-- | Maximum sheet format grill can handle
formatString :: String
formatString = Printf.printf "%02d.%02d.%02d"
(fromIntegral Version.StaticVersion.formatMajor :: Int)
(fromIntegral Version.StaticVersion.formatMinor :: Int)
(fromIntegral Version.StaticVersion.formatMicro :: Int)
-- | Parse magic bytes at the beginning of every grill sheet
parseMagicBytes :: Parsec.Parser ()
parseMagicBytes = Parsec.string Version.Constants.grillPrefix >> return ()
-- | Help stub to parse one of the versions in the sheet header
parseVersionString :: Char -- ^ Identifier
-> String -- ^ String for fail messages
-> Parsec.Parser (SmallVersion, SmallVersion, SmallVersion) -- ^ Parsec data type for string parsers
parseVersionString prefix msg = do
_ <- Parsec.char prefix
sMajor <- Parsec.many1 Parsec.digit
_ <- Parsec.char '.'
sMinor <- Parsec.many1 Parsec.digit
_ <- Parsec.char '.'
sMicro <- Parsec.many1 Parsec.digit
Monad.when (length sMajor /= 2 || length sMinor /= 2 || length sMicro /= 2)
(fail $ "Invalid version string:" ++ sMajor ++ "." ++ sMinor ++ "." ++ sMicro ++ " for " ++ msg ++ ".")
return (read sMajor :: SmallVersion, read sMinor, read sMicro)
-- | Parse sheet version in header.
parseFormat :: Parsec.Parser (SmallVersion, SmallVersion, SmallVersion)
parseFormat = parseVersionString Version.Constants.sheetPrefix "sheet"
-- | Parse calc engine version in header.
parseVersion :: Parsec.Parser (SmallVersion, SmallVersion, SmallVersion)
parseVersion = parseVersionString Version.Constants.calcEnginePrefix "calc engine"
-- | Parse checksum in sheet header. Not yet implemented, always gives empty string currently.
parseChecksum :: Parsec.Parser String
parseChecksum = do
_ <- Parsec.char Version.Constants.checksumPrefix
c <- Parsec.many $ Parsec.oneOf "ABCDEF0123456789"
return c -- FIXME: The whole function should be possible without do and without return
-- | Parse header terminator.
parseEndOfHeader :: Parsec.Parser ()
parseEndOfHeader = Parsec.string Version.Constants.grillSuffix >> return ()
-- | Check version help stub.
checkVersion :: (SmallVersion, SmallVersion, SmallVersion) -- ^ The version to check.
-> SmallVersion -- ^ The major version supported
-> String -- ^ String for error message construction
-> Maybe String
checkVersion (m, _, _) c s = if m > c then
Just $ "Your grill supports " ++ s ++ " version " ++ show c
++ ", but sheet requires version " ++ show m ++ "."
else
Nothing
-- | Check a given version if it is supported by the current calculation.
checkGrill :: (SmallVersion, SmallVersion, SmallVersion)
-> Maybe String
checkGrill v = checkVersion v Version.StaticVersion.grillMajor "calculation engine"
-- | Check a given version if it is supported by the current sheet formatter.
checkFormat :: (SmallVersion, SmallVersion, SmallVersion)
-> Maybe String
checkFormat v = checkVersion v Version.StaticVersion.formatMajor "sheet format"
| tnrangwi/grill | src/Version/Information.hs | mit | 4,869 | 0 | 17 | 1,045 | 897 | 499 | 398 | 75 | 2 |
module Glucose.Test.IR.Unchecked (module Glucose.Test.IR.Core, alias) where
import Glucose.Test.IR.Core
import Data.Text
import Glucose.IR
import Glucose.Parser.Source
alias :: FromSource Text -> FromSource Text -> FromSource (Definition Unchecked)
alias to from = definition to $ reference UnknownKind from Unknown
| sardonicpresence/glucose | test/Glucose/Test/IR/Unchecked.hs | mit | 329 | 0 | 9 | 49 | 96 | 54 | 42 | 7 | 1 |
module Ternary.Arbitraries where
import Test.QuickCheck
import Test.QuickCheck.Checkers hiding (Binop)
import Control.Monad (liftM2)
import Ternary.Core.Digit
import Ternary.Util.Triad (Triad, makeTriad)
import Ternary.List.Exact (Exact(Exact))
import Ternary.List.FiniteExact (FiniteExact, unsafeFinite)
instance Arbitrary T1 where
arbitrary = elements [M, O, P]
instance Arbitrary T2 where
arbitrary = elements allT2
instance Arbitrary T4 where
arbitrary = elements [Ma4, Ma3, Ma2, Ma1, Oa0, Pa1, Pa2, Pa3, Pa4]
instance Arbitrary Triad where
arbitrary = liftM2 makeTriad arbitrary exponent
where exponent = fmap getNonNegative arbitrary
-- safe because arbitrary lists are always finite
instance Arbitrary FiniteExact where
arbitrary = liftM2 construct arbitrary exponent
where construct as p = unsafeFinite (Exact as p)
exponent = fmap getNonNegative arbitrary
| jeroennoels/exact-real | test/Ternary/Arbitraries.hs | mit | 901 | 0 | 10 | 143 | 252 | 144 | 108 | 21 | 0 |
import Control.Monad
import Data.List
factorList :: Int -> [a] -> [[a]]
factorList _ [] = []
factorList p list | length list < p = []
factorList p list = [(take p list)] ++ (factorList p $ tail list)
diagonals :: Int -> [[a]] -> [[a]]
diagonals _ [] = []
diagonals p m = concatMap (factorList p) lists
where
lists = (lasts (f lowerLeft)) ++
(lasts (f lowerRight)) ++
(heads (f upperLeft)) ++
(heads (f upperRight))
lowerLeft = zipWith take [0..] m
upperRight = zipWith drop [0..] m
lowerRight = zipWith take ll m
upperLeft = zipWith drop ll m
ll = [(n-1),(n-2)..0]
n = length m
heads :: [[a]] -> [[a]]
heads [] = []
heads list = [(map head list)] ++ (heads (f (map tail list)))
lasts :: [[a]] -> [[a]]
lasts [] = []
lasts list = [(map last list)] ++ (lasts (f (map init list)))
f :: [[a]] -> [[a]]
f = filter (not . null)
verticals :: Int -> [[a]] -> [[a]]
verticals p = (horizontals p . transpose)
horizontals :: Int -> [[a]] -> [[a]]
horizontals p = concatMap (factorList p)
solve :: [[Int]] -> Int -> Int
solve matrix p = maximum $ map product factors
where
factors = d ++ v ++ h
d = diagonals p matrix
v = verticals p matrix
h = horizontals p matrix
-- IO
rd :: String -> Int
rd x = read x :: Int
getInt :: IO Int
getInt = read `liftM` getLine :: IO Int
getInts :: IO [Int]
getInts = do
line <- getLine
return (map rd $ words line)
main :: IO ()
main = do
p <- getInt
n <- getInt
matrix <- replicateM n getInts
putStrLn $ show (solve matrix p)
| NorfairKing/project-euler | 011/haskell/solution.hs | gpl-2.0 | 1,721 | 0 | 13 | 566 | 832 | 437 | 395 | 51 | 1 |
data CircuitF :: * -> * where
IdentityF :: Int -> CircuitF x | FiveEye/playground | lang/foldingDSL.hs | gpl-2.0 | 62 | 0 | 7 | 14 | 24 | 13 | 11 | -1 | -1 |
module GRSynth.States where
import OBDD
import Prelude hiding ((||), or, and, not)
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Bits
import Data.Maybe (fromMaybe, fromJust)
import Data.Functor
import Data.List (foldl')
type PState = [Bool]
-- | Representation of a set of boolean vectors using an ordered binary
-- decision diagram. Includes a specification of the dimension of the
-- vectors.
data StateSet = Implicit { obdd :: (OBDD Int)
, setDim :: Maybe Int
}
data ExplicitStateSet = Explicit { states :: S.Set PState
, explicitDim :: Int
}
deriving (Eq)
instance Show StateSet where
show = show . all_models . obdd
instance Eq StateSet where
(==) set1 set2 = (toExplicit set1) == (toExplicit set2)
--Contains no states
newBottom :: StateSet
newBottom = Implicit (constant False) Nothing
--Contains every state
newTop :: StateSet
newTop = Implicit (constant True) Nothing
--Binary and unary state set operators.
setOr :: StateSet -> StateSet -> StateSet
setOr = stateSetBinaryOp (OBDD.||)
--Intersection of two state sets
setAnd :: StateSet -> StateSet -> StateSet
setAnd = stateSetBinaryOp (OBDD.&&)
empty :: StateSet -> Bool
empty (Implicit set _) = OBDD.null set
-- | Defines a binary operator on two state sets in terms of an operator on their underlying
-- OBDD's. Requires that one or both dimensions be Nothing, or that, if they are known, they're equal.
stateSetBinaryOp :: (OBDD Int -> OBDD Int -> OBDD Int) -> StateSet -> StateSet -> StateSet
stateSetBinaryOp op set1 set2 = let obdd1 = obdd set1
obdd2 = obdd set2
in case setDim set1 of
Nothing -> Implicit (obdd1 `op` obdd2) (setDim set2)
Just n1 -> case setDim set2 of
Nothing -> Implicit (obdd1 `op` obdd2) (setDim set1)
Just n2 -> if n1 == n2
then Implicit (obdd1 `op` obdd2) (Just n1)
else error "Dimensions must match"
setNot :: StateSet -> StateSet
setNot set = Implicit (not $ obdd set) (setDim set)
contains :: StateSet -> PState -> Bool
contains set state = satisfiable $ foldl inject (obdd set) [0..n-1]
where inject cur i = instantiate i (state !! i) cur
n = maybe 0 id (setDim set)
singleton :: PState -> StateSet
singleton state = let n = length state
units = map (\i -> unit i (state !! i)) [0..n-1]
in Implicit (OBDD.and units) (Just n)
fromExplicit :: [PState] -> StateSet
fromExplicit list = foldl' (\accum state -> accum `setOr` (singleton state)) newBottom list
--Used for testing
toExplicit :: StateSet -> [PState]
toExplicit set = filter (set `contains`) (enumerateStates n)
where n = maybe 0 id (setDim set)
{-
- Propositions are encoded as the set of states in which they are true.
-}
type PProp = StateSet
fromPredicate :: [PState] -> (PState -> Bool) -> PProp
fromPredicate domain f = let trueStates = filter f domain
in fromExplicit trueStates
{-
- Actions are encoded as OBDDs over two copies of the variables. We use the
- first n variables for the output, so we can easily AND the resulting OBDD with
- other OBDDs.
- See forceAction
-}
type PAction = StateSet
fromFunction :: [PState] -> (PState -> [PState]) -> PAction
fromFunction domain f = foldl setOr newBottom
(map (\s -> fromSingleFunctionApplication s (f s)) domain)
fromSingleFunctionApplication :: PState -> [PState] -> PAction
fromSingleFunctionApplication input output = let combinedVectors = map (++input) output
in fromExplicit combinedVectors
fromRelation :: [(PState, PState)] -> PAction
fromRelation rel = let singletons = map (\(i, o) -> singleton (o ++ i)) rel
in foldl setOr newBottom singletons
--The set of output states reachable from the given input
processAction :: StateSet -> PAction -> StateSet
processAction phi tr = let n = (fromJust $ setDim tr) `div` 2
inputForce = forceAction phi newTop
combined = tr `setAnd` inputForce
justOutputs = exists_many (S.fromList [n..(2*n-1)]) (obdd combined)
in Implicit justOutputs (Just n)
--The set of states from which a phi-state is reachable through the given PAction
throughAction :: StateSet -> PAction -> StateSet
throughAction phi tr = rebase $ forceAction phi tr
--Force an action to map onto phi-states.
--This is done by taking the intersection of valid (output, input) tuples with the set of desired phi-outputs.
forceAction :: StateSet -> PAction -> PAction
forceAction phi tr = tr `setAnd` (phi { setDim = (*2) <$> setDim phi })
forcedByAction :: StateSet -> PAction -> StateSet
forcedByAction phi tr = setNot $ throughAction (setNot phi) tr
--Whoo, power sets. This should be pretty efficient with laziness.
enumerateStates :: Int -> [PState]
enumerateStates dim = let cardinality = 2^dim::Int
ints = [0..cardinality-1]
toBitList n = map (testBit n) [0..dim-1]
in map toBitList ints
rebase :: PAction -> StateSet
rebase tr = let n = maybe 0 (`div` 2) (setDim tr)
justInputs = exists_many (S.fromList [0..n-1]) (obdd tr)
hashes = all_models justInputs
stateList = concatMap (rebaseMapToState n) hashes
in fromExplicit stateList
rebaseMapToState :: Int -> M.Map Int Bool -> [PState]
rebaseMapToState n hash = let allStates = enumerateStates n
matchesAtEveryIndex state = all (\i ->
(state !! i) == (fromMaybe
(state !! i)
(M.lookup (i+n) hash)))
[0..n-1]
in filter matchesAtEveryIndex allStates
| johnbcoughlin/mucalc | src/GRSynth/States.hs | gpl-2.0 | 6,457 | 0 | 18 | 2,145 | 1,678 | 896 | 782 | 99 | 4 |
fibonacci' :: (Int a) => a -> a
fibonacci' x
| x < 0 = "error int"
| x <= 1 = 1
| otherwise = fibonacci (x-1) + fibonacci (x-2)
| mingzhi0/misc | hs_code/fibonacci.hs | gpl-2.0 | 140 | 0 | 9 | 43 | 83 | 40 | 43 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell, QuasiQuotes #-}
-- Main function
import Web.Scotty
import Control.Monad(liftM,liftM2,when)
import Control.Monad.IO.Class(liftIO)
import qualified Data.ByteString.Lazy.Char8 as C
import qualified Data.Text.Lazy as T
import Network.HTTP.Types.Status(forbidden403, notFound404)
import Network.Wai.Middleware.Static
import Network.HTTP.Base
import Text.Regex
import Data.Aeson
import Network.HTTP.Conduit
import Control.Monad.IO.Class
import qualified Maps as M
import qualified Places as P
import qualified Nearby as N
import Template
--import Api
googleApiKey :: String
googleApiKey = ""
nearbyUrl :: String -> String
nearbyUrl location = "https://maps.googleapis.com/maps/api/place/nearbysearch/json?"
++ "key=" ++ googleApiKey
++ "&location=" ++ location
++ "&rankby=distance"
++ "&name=" ++ urlEncode "planned parenthood"
getLocation :: String -> String
getLocation loc = "https://maps.googleapis.com/maps/api/geocode/json?"
++ "address=" ++ urlEncode loc
++ "&key" ++ googleApiKey
getGPSCoor :: String -> Maybe [String]
getGPSCoor reg = matchRegex (mkRegex "([0-9.-]+) ([0-9.-]+)") reg
getNearbyClinics :: String -> IO C.ByteString
getNearbyClinics loc =
case getGPSCoor loc of
Just [slat,slong] -> simpleHttp $ nearbyUrl $ slat ++ "," ++ slong
Nothing -> do res <- simpleHttp $ getLocation loc
case decode (res) of
Just (M.Geolocresult jsn) ->
let (M.Geolocjson (M.Geolocgeometry (M.Geolocloc lat lng))) = head jsn
in simpleHttp $ nearbyUrl $ show lat ++ "," ++ show lng
Nothing -> return "Shit broke man"
placeUrl :: String -> String
placeUrl place = "https://maps.googleapis.com/maps/api/place/details/json?"
++ "placeid=" ++ urlEncode place
++ "&key;=" ++ urlEncode googleApiKey
getClinicInfo :: String -> IO C.ByteString
getClinicInfo placeid = do result <- simpleHttp $ placeUrl placeid
--let placeinfo = decodeJSON $ C.unpack result
-- P.PlacesResult (dets:_) = placeinfo
-- P.PlaceDetails
-- { opening_hours = PlaceHours open pers
-- , formatted_address = addr
-- , formatted_phone_number = numb
-- , permanently_closed = clsd
-- } = dets
-- days = encodeJSON pers
return result
directionsUrl :: String -> String -> String
directionsUrl orig dest = "https://maps.googleapis.com/maps/api/directions/json?"
++ "Directions"
++ "&key=" ++ googleApiKey
++ "origin=" ++ urlEncode orig
++ "&destination=" ++ urlEncode dest
++ "&mode=transit"
getRouteFromStr :: Maybe [String] -> Maybe [String] -> IO C.ByteString
getRouteFromStr (Just [lat1,lng1]) (Just [lat2,lng2]) = do
res <- simpleHttp $ directionsUrl geoStr1 geoStr2
return res
where geoStr1 = lat1 ++ "," ++ lng1
geoStr2 = lat2 ++ "," ++ lng2
-- Main loop
mainloop :: IO ()
mainloop = scotty 3000 $ do
middleware $ staticPolicy (noDots >-> addBase "static")
get "/" (html renderIndex)
get "/clinics/" $ do
location <- param "location"
clinics <- liftIO $ getNearbyClinics location
html $ renderResults clinics
post "/getDetails" $ do
placeid <- param "placeid"
clinicDetails <- liftIO $ getClinicInfo placeid
raw clinicDetails
get "/getDirections" $ do
start <- param "start"
end <- param "end"
route <- liftIO (getRouteFromStr (getGPSCoor start) (getGPSCoor end))
raw route
-- Entry Point
main :: IO ()
main = do
mainloop
| emollient/Aphrodite | Aphrodite/Aphrodite.hs | gpl-2.0 | 4,385 | 0 | 23 | 1,544 | 931 | 477 | 454 | 83 | 3 |
{-# LANGUAGE OverloadedStrings #-}
-- Copyright (C) 2014 Sami Liedes <sami.liedes@iki.fi>
--
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 2 of the
-- License, or (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful, but
-- WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
-- 02110-1301, USA.
module Main (main) where
import LoadImage
import qualified NonoSVG as S
import qualified Nonogram as N
import Control.Applicative ((<$>))
import System.Environment (getArgs)
import System.Exit (exitWith, ExitCode(..))
import Control.Monad (when)
import System.IO (hPutStrLn, stderr)
import qualified Data.Configurator as C
nPuzzle :: S.Puzzle -> N.Puzzle
nPuzzle s = N.puzzle (S.hClues s) (S.vClues s)
mkNono :: [[Double]] -> S.Puzzle
mkNono = S.mkPuzzle . map (map $ \x -> if x < 0.0 then '#' else '.')
loadThreshold :: IO Double
loadThreshold = do
cfg <- C.load [ C.Required "nonosvg.conf" ]
C.require cfg "threshold"
threshold :: Double -> [[Double]] -> [[Double]]
threshold th = map $ map $ subtract th
equalize :: [[Double]] -> [[Double]]
equalize ds =
let min_ = minimum $ concat ds
max_ = maximum $ concat ds
remap x = (x-min_) / max_
in map (map remap) ds
main :: IO ()
main = do
args <- getArgs
when (length args /= 1) $ do
hPutStrLn stderr "Usage: pngToNono filename.png"
exitWith $ ExitFailure 1
thresh <- loadThreshold
svgConfig <- S.loadConfig
let fname = head args
im <- equalize <$> loadToDoubles fname
let s = mkNono $ threshold thresh im
hPutStrLn stderr $ S.showPuzzle s
-- A puzzle is simple if it can be solved by propagation alone
hPutStrLn stderr $ "puzzle is simple: " ++ show (N.isSimple $ nPuzzle s)
putStr $ S.puzzleToSvg svgConfig s
| sliedes/nonogram-svg | pngToNono.hs | gpl-2.0 | 2,233 | 0 | 11 | 424 | 565 | 302 | 263 | 41 | 2 |
{-# OPTIONS_GHC -Wall #-}
module Main where
import Test.Framework
import GHC.IO.Encoding
import qualified Tests.Old
import qualified Tests.Readers.LaTeX
import qualified Tests.Readers.Markdown
import qualified Tests.Readers.Org
import qualified Tests.Readers.RST
import qualified Tests.Writers.ConTeXt
import qualified Tests.Writers.LaTeX
import qualified Tests.Writers.HTML
import qualified Tests.Writers.Docbook
import qualified Tests.Writers.Native
import qualified Tests.Writers.Markdown
import qualified Tests.Writers.AsciiDoc
import qualified Tests.Shared
import qualified Tests.Walk
import Text.Pandoc.Shared (inDirectory)
tests :: [Test]
tests = [ testGroup "Old" Tests.Old.tests
, testGroup "Shared" Tests.Shared.tests
, testGroup "Walk" Tests.Walk.tests
, testGroup "Writers"
[ testGroup "Native" Tests.Writers.Native.tests
, testGroup "ConTeXt" Tests.Writers.ConTeXt.tests
, testGroup "LaTeX" Tests.Writers.LaTeX.tests
, testGroup "HTML" Tests.Writers.HTML.tests
, testGroup "Docbook" Tests.Writers.Docbook.tests
, testGroup "Markdown" Tests.Writers.Markdown.tests
, testGroup "AsciiDoc" Tests.Writers.AsciiDoc.tests
]
, testGroup "Readers"
[ testGroup "LaTeX" Tests.Readers.LaTeX.tests
, testGroup "Markdown" Tests.Readers.Markdown.tests
, testGroup "Org" Tests.Readers.Org.tests
, testGroup "RST" Tests.Readers.RST.tests
]
]
main :: IO ()
main = do
setLocaleEncoding utf8
-- we ignore command-line arguments, since we're having cabal pass
-- the build directory as first argument, and we don't want test-framework
-- to choke on that.
inDirectory "tests" $ defaultMainWithArgs tests []
| nickbart1980/pandoc | tests/test-pandoc.hs | gpl-2.0 | 1,783 | 0 | 9 | 352 | 357 | 216 | 141 | 40 | 1 |
-- ID: FIB
-- Name: Rabbits and Recurrence relations
-- Author: Samuel Jackson
-- Email: samueljackson@outlook.com
-- Description: computes rabbit pairs over n months, with each rabbit producing k pairs.
rabbitPairs :: Int -> Int -> Int
rabbitPairs n k = last $ take n $ fibs k
where
--compute the fibonacci sequence Fn = Fn-1 + k*Fn-2
fibs :: Int -> [Int]
fibs k = map fst $ iterate (\(a,b) -> (b,a*k+b)) (1,1)
| samueljackson92/rosalind | src/FIB_rabbits_and_recurrence_relations.hs | gpl-2.0 | 438 | 0 | 13 | 100 | 112 | 62 | 50 | 4 | 1 |
module T where
import Tests.Basis
c = proc () ->
(| combLoop (\x ->
do x' <- (| delayAC (falseA -< ()) (| delayAC (falseA -< ()) (returnA -< x) |) |)
dupA <<< notA -< x'
) |)
prop_correct = take n (simulate c lhs) == take n rhs
where
n = 50
lhs = repeat ()
rhs = cycle [true, true, false, false]
test_constructive = isJust (isConstructive c)
| peteg/ADHOC | Tests/02_SequentialCircuits/012_alternate_combLoop_period_two.hs | gpl-2.0 | 414 | 7 | 19 | 142 | 186 | 98 | 88 | -1 | -1 |
{- Game2048.Board.IntMapBoard • represent grid as IntMap (patricia tree)
- Copyright ©2014 Christopher League <league@contrapunctus.net>
-
- This program is free software: you can redistribute it and/or modify it
- under the terms of the GNU General Public License as published by the Free
- Software Foundation, either version 3 of the License, or (at your option)
- any later version.
-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ViewPatterns #-}
module Game2048.Board.IntMapBoard(BoardT, BoardT') where
import qualified Data.Foldable as F
import Data.IntMap.Lazy (IntMap)
import qualified Data.IntMap.Lazy as Map
import Game2048.Board.Base
import Game2048.Coord
import Game2048.Tile
import Game2048.Util
import Prelude hiding (Left, Right)
newtype BoardT' a = Board {unBoard :: IntMap a}
deriving (Eq, Show, F.Foldable)
type BoardT = BoardT' Tile
instance Zero BoardT where
zero = Board Map.empty
instance Board' BoardT' where
freeCells (unBoard -> b) = filter p every
where p c = Map.notMember (fromEnum c) b
freeCount b = gridSize - Map.size (unBoard b)
fromList tt = Board $ Map.fromList $ foldlWithIndex (foldlWithIndex . f) [] tt
where f i j cts t =
if isEmpty t then cts
else (fromEnum(coord i j), t) : cts
move b = Board . Map.fromList . moveViaCoordLists b
placeTile t c = Board . Map.insert (fromEnum c) t . unBoard
tileAt b c = Map.findWithDefault zero (fromEnum c) (unBoard b)
foldr = F.foldr
foldr1 = F.foldr1
| league/game2048 | src/Game2048/Board/IntMapBoard.hs | gpl-3.0 | 1,662 | 0 | 13 | 377 | 395 | 216 | 179 | 31 | 0 |
{-# Language TypeSynonymInstances #-}
{-# Language FlexibleInstances #-}
module Infsabot.RobotAction.Logic (
RobotProgram, RobotProgramResult,
KnownState(KnownState),
peekAtSpot, material, stateLocation, stateAge, stateMemory, stateMessages,
RobotAction(Die, Noop, MoveIn, Dig, Spawn, Fire, Send),
SpawnAction(SpawnAction), SendAction(SendAction), FireAction(FireAction),
newProgram, newAppearance, newMaterial, newMemory, newDirection,
fireDirection, materialExpended,
messageToSend, sendDirection,
orderOfOperations,
actionCost
) where
import Infsabot.Base.Interface
import Infsabot.Parameters
import Infsabot.Tools.Interface
-- A robot program takes the Robot's state and returns a RobotProgramResult
type RobotProgram = KnownState -> RobotProgramResult
-- A robot program result consists of an action and a potentially modified internal state
type RobotProgramResult = (RobotAction, InternalState)
-- The Robot's concept of self.
data KnownState = KnownState {
-- Function the Robot can use to see around it.
-- Returns Nothing if the robot can't see that far.
peekAtSpot :: [RDirection] -> Maybe SeenSpot,
-- The quantity of material the robot has
material :: Int,
-- The current location of the robot
stateLocation :: (Int, Int),
-- The robot's age
stateAge :: Int,
-- The robot's memory
stateMemory :: InternalState,
-- The robot's received messages as a list of pairs of
-- message and direction received.
stateMessages :: [(String, RDirection)]
} deriving Show
-- Represents an action a robot can take.
-- If the action is impossible, nothing will occur
data RobotAction =
-- Robot will die
Die |
-- Robot will do nothing
Noop |
-- Robot will fire in a given direction
Fire FireAction |
-- Robot will send a message in a given direction
Send SendAction |
-- Robot will dig
Dig |
-- Robot will move in the given Direction
MoveIn RDirection |
-- Robot will spawn a new Robot
Spawn SpawnAction
deriving (Show, Eq)
data FireAction = FireAction {
-- Material devoted to this task.
-- More material means greater blow
materialExpended :: Natural,
-- Direction to fire in
fireDirection :: RDirection
} deriving (Show, Eq)
data SendAction = SendAction {
-- The message to send to another robot
messageToSend :: String,
-- The direction to send the message in
sendDirection :: RDirection
} deriving (Show, Eq)
data SpawnAction = SpawnAction {
-- The direction the new robot will be placed in
newDirection :: RDirection,
-- The program the new Robot will have
newProgram :: RobotProgram,
-- The appearance of the new Robot
newAppearance :: RobotAppearance,
-- The quantity of material to transfer to the new robot
newMaterial :: Natural,
-- The memory of the new robot
newMemory :: InternalState
} deriving (Show, Eq)
instance Show ([RDirection] -> Maybe SeenSpot) where
show _ = "Classified"
instance Show RobotProgram where
show _ = "basicProgram A"
instance Eq RobotProgram where
_ == _ = True
orderOfOperations :: RobotAction -> Int
orderOfOperations Die = 0
orderOfOperations Noop = 1
orderOfOperations (Fire _) = 2
orderOfOperations (Send _) = 3
orderOfOperations Dig = 4
orderOfOperations (MoveIn _) = 5
orderOfOperations (Spawn _) = 6
-- Outputs the cost of performing the given action.
actionCost :: Parameters -> RobotAction -> Natural
actionCost p Noop = paramNoopCost p
actionCost _ Die = 0
actionCost p (MoveIn _) = paramMoveCost p
actionCost p Dig = paramDigCost p
actionCost p (Spawn s) = newMaterial s + paramNewRobotCost p
actionCost p (Fire f) = materialExpended f + paramFireCost p
actionCost p (Send _) = actionCost p Noop
| kavigupta/Infsabot | Infsabot/RobotAction/Logic.hs | gpl-3.0 | 4,032 | 0 | 10 | 1,023 | 710 | 424 | 286 | 83 | 1 |
module Main where
combineWords adjectives nouns = [adjective ++ " " ++ noun | adjective <- adjectives, noun <- nouns] | yumerov/haskell-study | learnyouahaskell/00-starting-out/list-product.hs | gpl-3.0 | 118 | 0 | 7 | 20 | 41 | 22 | 19 | 2 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.People.People.DeleteContactPhoto
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Delete a contact\'s photo.
--
-- /See:/ <https://developers.google.com/people/ People API Reference> for @people.people.deleteContactPhoto@.
module Network.Google.Resource.People.People.DeleteContactPhoto
(
-- * REST Resource
PeopleDeleteContactPhotoResource
-- * Creating a Request
, peopleDeleteContactPhoto
, PeopleDeleteContactPhoto
-- * Request Lenses
, pdcpXgafv
, pdcpUploadProtocol
, pdcpResourceName
, pdcpAccessToken
, pdcpUploadType
, pdcpSources
, pdcpPersonFields
, pdcpCallback
) where
import Network.Google.People.Types
import Network.Google.Prelude
-- | A resource alias for @people.people.deleteContactPhoto@ method which the
-- 'PeopleDeleteContactPhoto' request conforms to.
type PeopleDeleteContactPhotoResource =
"v1" :>
CaptureMode "resourceName" "deleteContactPhoto" Text
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParams "sources" PeopleDeleteContactPhotoSources
:>
QueryParam "personFields" GFieldMask :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Delete '[JSON] DeleteContactPhotoResponse
-- | Delete a contact\'s photo.
--
-- /See:/ 'peopleDeleteContactPhoto' smart constructor.
data PeopleDeleteContactPhoto =
PeopleDeleteContactPhoto'
{ _pdcpXgafv :: !(Maybe Xgafv)
, _pdcpUploadProtocol :: !(Maybe Text)
, _pdcpResourceName :: !Text
, _pdcpAccessToken :: !(Maybe Text)
, _pdcpUploadType :: !(Maybe Text)
, _pdcpSources :: !(Maybe [PeopleDeleteContactPhotoSources])
, _pdcpPersonFields :: !(Maybe GFieldMask)
, _pdcpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PeopleDeleteContactPhoto' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pdcpXgafv'
--
-- * 'pdcpUploadProtocol'
--
-- * 'pdcpResourceName'
--
-- * 'pdcpAccessToken'
--
-- * 'pdcpUploadType'
--
-- * 'pdcpSources'
--
-- * 'pdcpPersonFields'
--
-- * 'pdcpCallback'
peopleDeleteContactPhoto
:: Text -- ^ 'pdcpResourceName'
-> PeopleDeleteContactPhoto
peopleDeleteContactPhoto pPdcpResourceName_ =
PeopleDeleteContactPhoto'
{ _pdcpXgafv = Nothing
, _pdcpUploadProtocol = Nothing
, _pdcpResourceName = pPdcpResourceName_
, _pdcpAccessToken = Nothing
, _pdcpUploadType = Nothing
, _pdcpSources = Nothing
, _pdcpPersonFields = Nothing
, _pdcpCallback = Nothing
}
-- | V1 error format.
pdcpXgafv :: Lens' PeopleDeleteContactPhoto (Maybe Xgafv)
pdcpXgafv
= lens _pdcpXgafv (\ s a -> s{_pdcpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pdcpUploadProtocol :: Lens' PeopleDeleteContactPhoto (Maybe Text)
pdcpUploadProtocol
= lens _pdcpUploadProtocol
(\ s a -> s{_pdcpUploadProtocol = a})
-- | Required. The resource name of the contact whose photo will be deleted.
pdcpResourceName :: Lens' PeopleDeleteContactPhoto Text
pdcpResourceName
= lens _pdcpResourceName
(\ s a -> s{_pdcpResourceName = a})
-- | OAuth access token.
pdcpAccessToken :: Lens' PeopleDeleteContactPhoto (Maybe Text)
pdcpAccessToken
= lens _pdcpAccessToken
(\ s a -> s{_pdcpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pdcpUploadType :: Lens' PeopleDeleteContactPhoto (Maybe Text)
pdcpUploadType
= lens _pdcpUploadType
(\ s a -> s{_pdcpUploadType = a})
-- | Optional. A mask of what source types to return. Defaults to
-- READ_SOURCE_TYPE_CONTACT and READ_SOURCE_TYPE_PROFILE if not set.
pdcpSources :: Lens' PeopleDeleteContactPhoto [PeopleDeleteContactPhotoSources]
pdcpSources
= lens _pdcpSources (\ s a -> s{_pdcpSources = a}) .
_Default
. _Coerce
-- | Optional. A field mask to restrict which fields on the person are
-- returned. Multiple fields can be specified by separating them with
-- commas. Defaults to empty if not set, which will skip the post mutate
-- get. Valid values are: * addresses * ageRanges * biographies * birthdays
-- * calendarUrls * clientData * coverPhotos * emailAddresses * events *
-- externalIds * genders * imClients * interests * locales * locations *
-- memberships * metadata * miscKeywords * names * nicknames * occupations
-- * organizations * phoneNumbers * photos * relations * sipAddresses *
-- skills * urls * userDefined
pdcpPersonFields :: Lens' PeopleDeleteContactPhoto (Maybe GFieldMask)
pdcpPersonFields
= lens _pdcpPersonFields
(\ s a -> s{_pdcpPersonFields = a})
-- | JSONP
pdcpCallback :: Lens' PeopleDeleteContactPhoto (Maybe Text)
pdcpCallback
= lens _pdcpCallback (\ s a -> s{_pdcpCallback = a})
instance GoogleRequest PeopleDeleteContactPhoto where
type Rs PeopleDeleteContactPhoto =
DeleteContactPhotoResponse
type Scopes PeopleDeleteContactPhoto =
'["https://www.googleapis.com/auth/contacts"]
requestClient PeopleDeleteContactPhoto'{..}
= go _pdcpResourceName _pdcpXgafv _pdcpUploadProtocol
_pdcpAccessToken
_pdcpUploadType
(_pdcpSources ^. _Default)
_pdcpPersonFields
_pdcpCallback
(Just AltJSON)
peopleService
where go
= buildClient
(Proxy :: Proxy PeopleDeleteContactPhotoResource)
mempty
| brendanhay/gogol | gogol-people/gen/Network/Google/Resource/People/People/DeleteContactPhoto.hs | mpl-2.0 | 6,456 | 0 | 17 | 1,428 | 880 | 514 | 366 | 130 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.Users.RevokeDeviceAccess
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Revokes access to all devices currently provisioned to the user. The
-- user will no longer be able to use the managed Play store on any of
-- their managed devices. This call only works with EMM-managed accounts.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.users.revokeDeviceAccess@.
module Network.Google.Resource.AndroidEnterprise.Users.RevokeDeviceAccess
(
-- * REST Resource
UsersRevokeDeviceAccessResource
-- * Creating a Request
, usersRevokeDeviceAccess
, UsersRevokeDeviceAccess
-- * Request Lenses
, urdaXgafv
, urdaUploadProtocol
, urdaEnterpriseId
, urdaAccessToken
, urdaUploadType
, urdaUserId
, urdaCallback
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.users.revokeDeviceAccess@ method which the
-- 'UsersRevokeDeviceAccess' request conforms to.
type UsersRevokeDeviceAccessResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"users" :>
Capture "userId" Text :>
"deviceAccess" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Revokes access to all devices currently provisioned to the user. The
-- user will no longer be able to use the managed Play store on any of
-- their managed devices. This call only works with EMM-managed accounts.
--
-- /See:/ 'usersRevokeDeviceAccess' smart constructor.
data UsersRevokeDeviceAccess =
UsersRevokeDeviceAccess'
{ _urdaXgafv :: !(Maybe Xgafv)
, _urdaUploadProtocol :: !(Maybe Text)
, _urdaEnterpriseId :: !Text
, _urdaAccessToken :: !(Maybe Text)
, _urdaUploadType :: !(Maybe Text)
, _urdaUserId :: !Text
, _urdaCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UsersRevokeDeviceAccess' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'urdaXgafv'
--
-- * 'urdaUploadProtocol'
--
-- * 'urdaEnterpriseId'
--
-- * 'urdaAccessToken'
--
-- * 'urdaUploadType'
--
-- * 'urdaUserId'
--
-- * 'urdaCallback'
usersRevokeDeviceAccess
:: Text -- ^ 'urdaEnterpriseId'
-> Text -- ^ 'urdaUserId'
-> UsersRevokeDeviceAccess
usersRevokeDeviceAccess pUrdaEnterpriseId_ pUrdaUserId_ =
UsersRevokeDeviceAccess'
{ _urdaXgafv = Nothing
, _urdaUploadProtocol = Nothing
, _urdaEnterpriseId = pUrdaEnterpriseId_
, _urdaAccessToken = Nothing
, _urdaUploadType = Nothing
, _urdaUserId = pUrdaUserId_
, _urdaCallback = Nothing
}
-- | V1 error format.
urdaXgafv :: Lens' UsersRevokeDeviceAccess (Maybe Xgafv)
urdaXgafv
= lens _urdaXgafv (\ s a -> s{_urdaXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
urdaUploadProtocol :: Lens' UsersRevokeDeviceAccess (Maybe Text)
urdaUploadProtocol
= lens _urdaUploadProtocol
(\ s a -> s{_urdaUploadProtocol = a})
-- | The ID of the enterprise.
urdaEnterpriseId :: Lens' UsersRevokeDeviceAccess Text
urdaEnterpriseId
= lens _urdaEnterpriseId
(\ s a -> s{_urdaEnterpriseId = a})
-- | OAuth access token.
urdaAccessToken :: Lens' UsersRevokeDeviceAccess (Maybe Text)
urdaAccessToken
= lens _urdaAccessToken
(\ s a -> s{_urdaAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
urdaUploadType :: Lens' UsersRevokeDeviceAccess (Maybe Text)
urdaUploadType
= lens _urdaUploadType
(\ s a -> s{_urdaUploadType = a})
-- | The ID of the user.
urdaUserId :: Lens' UsersRevokeDeviceAccess Text
urdaUserId
= lens _urdaUserId (\ s a -> s{_urdaUserId = a})
-- | JSONP
urdaCallback :: Lens' UsersRevokeDeviceAccess (Maybe Text)
urdaCallback
= lens _urdaCallback (\ s a -> s{_urdaCallback = a})
instance GoogleRequest UsersRevokeDeviceAccess where
type Rs UsersRevokeDeviceAccess = ()
type Scopes UsersRevokeDeviceAccess =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient UsersRevokeDeviceAccess'{..}
= go _urdaEnterpriseId _urdaUserId _urdaXgafv
_urdaUploadProtocol
_urdaAccessToken
_urdaUploadType
_urdaCallback
(Just AltJSON)
androidEnterpriseService
where go
= buildClient
(Proxy :: Proxy UsersRevokeDeviceAccessResource)
mempty
| brendanhay/gogol | gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/Users/RevokeDeviceAccess.hs | mpl-2.0 | 5,678 | 0 | 20 | 1,312 | 793 | 462 | 331 | 118 | 1 |
module Main where
import Data.Function
--Identity monad
type I a = a
--Identity Function
unitI :: a -> a
unitI a = a
-- plain postfix application (always call)
bindI :: a -> (a -> I b) -> I b
bindI a f = f a
-- just creating as a common f# operator
-- https://stackoverflow.com/a/5758326/5397116
(|>) :: a -> (a -> I b) -> I b
(|>) a f = f a
--showI :: a -> String
showI a = show a
f = \x -> x*2
-- lift one to the I monad
i1 = unitI 1
-- bind I1 to f. In the I world, this is the same as applying f to i1
i2 = i1 `bindI` f
-- same as the two last lines in just one line
i3 = (unitI 1) `bindI` f
-- same thing without monads involved with the fsharp operator
i4 = 1 |> f
-- same thing with the default haskell operator
i5 = 1 & f
--Monad M
data M a = M a
unitM :: a -> M a
unitM a = M a
bindM :: M a -> (a -> M b) -> M b
bindM (M a) f = f a
--Monad E
data E a = Ok a | Fail String
unitE :: a -> E a
unitE a = Ok a
failE :: String -> E a
failE s = Fail s
-- conditional postfix application (only when success)
-- on error I do not call f, and propagates the error.
bindE :: E a -> (a -> E b) -> E b
bindE (Ok a) f = f a
bindE (Fail s) f = Fail s
showE :: (Show a) => E a -> String
showE (Ok a) = "OK " ++ (show a)
showE (Fail s) = "FAIL " ++ s
--Monad S
-- this looks like a shadow state that will run between the main
-- functions.
type S a b = b -> (a, b)
--actually creates a pair with the iteration argument and the current state
unitS :: a -> S a b
unitS a = \s0 -> (a, s0)
bindS :: S a b -> (a -> S a c) -> S a c
bindS m k = \s0 -> let
(a, s1) = m s0 -- iteration argument and current state
(b, s2) = k a s1 -- iterate to the new state. b can considered as the return value
in (b, s2)
--showS m = let (a, s1) = m 0
-- in "{a:" ++ (show a) ++ ",s1:" ++ (show s1) ++ "}"
--Monad S2 -- more close to class oriented languages
data S2 this = This this
unitS2 this = This this
bindS2 (This this) k = (flip k) this
--Stack using State Monad
pop :: [Int] -> (Int,[Int])
pop (x:xs) = (x,xs)
push :: Int -> [Int] -> [Int]
push a xs = a:xs
x = show i5
main = do
let mysum a b = Ok $ a + b
let mydiv a b = if (b == 0) then Fail "Div by zero" else Ok $ a / b
putStrLn "MONAD E"
--MONAD E
let e1 = Ok 1
--e2 = Ok 1 + 1 does not compile because of the associassion
--ther parser thinks that we want e2 = (Ok 1) + 1
-- to solve this we can use the $ operator that just apply the function
-- ($) :: (a -> b) -> a -> b
let e2_1 = Ok $ 1 + 1
-- or of course just parenthize correctly the expression
let e2_2 = Ok (1 + 1)
putStrLn $ showE e2_1
-- bindE will call mysum because bindE of
-- OK is just a postfix application
putStrLn $ showE $ e2_1 `bindE` (mysum 1)
let e3 = mydiv 1 0
putStrLn $ showE e3
-- bindE wil NOT call mysum because bindE of
-- FAIL is short-circuit
putStrLn $ showE $ e3 `bindE` (mysum 1)
putStrLn "MONAD E"
--MONAD S - Stack without bind
let stacka1 = [1,2,3]
let (one, stacka2) = pop stacka1
let (two, stacka3) = pop stacka2
let (three, stacka4) = pop stacka3
putStrLn (show one)
putStrLn (show two)
putStrLn (show three)
--MONAD S - Stack with bind
let stackb1 = [1,2,3]
let qtd = \s -> ()
-- first we lift the push argument "i" to the state monad
-- then we bind the lifted value wit hthe iteration function
let push2stack i = (unitS ()) `bindS` (\a -> push i)
let pop2stack = (unitS ()) `bindS` (\a xs -> pop xs)
let alg = do
a <- pop2stack
b <- pop2stack
return b
putStrLn $ show $ alg stackb1
--
-- let push2stackb1 = (unitS2 stackb1) `bindS2` push
-- let pop2stackb1 = (unitS2 stackb1) `bindS2` pop
-- let ((),stackb3) = push2stackb1 13
-- let (thirteen, stackb4) = pop2stackb1
-- putStrLn (show thirteen)
| xunilrj/sandbox | sources/haskell/papers/reactive.hs | apache-2.0 | 4,438 | 0 | 13 | 1,633 | 1,253 | 657 | 596 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module DecisionProcedure.Opra2
( module DecisionProcedure.Opra
) where
-- standard modules
import Data.Maybe
import qualified Data.Set as Set
-- local modules
import Basics
import Calculus.Opra2
import DecisionProcedure
import DecisionProcedure.AlgebraicClosure
--import DecisionProcedure.AlgebraicGeometric
import DecisionProcedure.Opra
instance HasBinAClosureGqr ARel Opra2
instance HasBinAClosureGqr GRel Opra2
instance HasBinAClosureSparq ARel Opra2
instance HasBinAClosureSparq GRel Opra2
--instance HasAReasoning ARel Opra2
--instance HasAReasoning GRel Opra2
instance HasDecisionProcedure (ARel Opra2) where
procedures _ =
[ algebraicClosureGQR
, algebraicClosure
-- , algebraicReasoning
] ++ map (firstApply opramNetToOpraNetAtomic)
(procedures (undefined :: ARel Opra))
instance HasDecisionProcedure (GRel Opra2) where
procedures _ =
[ algebraicClosureGQR
, algebraicClosure
]
| spatial-reasoning/zeno | src/DecisionProcedure/Opra2.hs | bsd-2-clause | 1,053 | 0 | 11 | 208 | 177 | 99 | 78 | 24 | 0 |
{-# LANGUAGE TupleSections, TemplateHaskell #-}
{-| Configuration server for the metadata daemon.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Metad.ConfigServer where
import Control.Exception (finally)
import Control.Monad.Reader
import Ganeti.Path as Path
import Ganeti.Daemon (DaemonOptions, cleanupSocket, describeError)
import Ganeti.Runtime (GanetiDaemon(..))
import Ganeti.THH.RPC
import Ganeti.UDSServer (ConnectConfig(..), ServerConfig(..))
import qualified Ganeti.UDSServer as UDSServer
import Ganeti.Metad.ConfigCore
-- * The handler that converts RPCs to calls to the above functions
handler :: RpcServer MetadMonadInt
handler = $( mkRpcM exportedFunctions )
-- * The main server code
start :: DaemonOptions -> MetadHandle -> IO ()
start _ config = do
socket_path <- Path.defaultMetadSocket
cleanupSocket socket_path
server <- describeError "binding to the socket" Nothing (Just socket_path)
$ UDSServer.connectServer metadConfig True socket_path
finally
(forever $ runMetadMonadInt (UDSServer.listener handler server) config)
(UDSServer.closeServer server)
where
metadConfig = ServerConfig GanetiMetad $ ConnectConfig 60 60
| dimara/ganeti | src/Ganeti/Metad/ConfigServer.hs | bsd-2-clause | 2,470 | 0 | 13 | 392 | 259 | 144 | 115 | 23 | 1 |
{-# LANGUAGE BangPatterns, ExistentialQuantification,
TypeSynonymInstances, FlexibleInstances, MultiParamTypeClasses,
RecordWildCards #-}
module Codec.Compression.HLZ4.Decode
-- (
-- decompress
-- )
where
import Control.Monad (when)
import Control.Monad.IO.Class
import Control.Applicative ((<$>), Applicative(..))
import Data.Bits ((.|.), (.&.), shiftR, shiftL, testBit, clearBit)
import Data.ByteString as BS (ByteString, pack, drop, length)
import Data.ByteString.Internal (toForeignPtr, fromForeignPtr)
import Data.Word (Word8, Word16, Word32)
import Foreign.ForeignPtr (touchForeignPtr, newForeignPtr)
import Foreign.ForeignPtr.Unsafe (unsafeForeignPtrToPtr)
import Foreign.Marshal.Alloc (mallocBytes, finalizerFree)
import Foreign.Marshal.Utils as F (copyBytes)
import Foreign.Ptr (Ptr, plusPtr, nullPtr, castPtr)
import Foreign.Storable (peekByteOff, peek, poke)
import Text.Printf
debug :: Bool
debug = False
type P8 = Ptr Word8
-- | tuple of src and dest pointers, offset into and their lengths
-- type PtrState = (P8, Int, Int, P8, Int, Int)
data PtrState = PS {
_src :: !P8,
_soff :: !Int,
_slen :: !Int,
_dst :: !P8,
_doff :: !Int,
_dlen :: !Int}
advanceSrcState :: Int -> PtrState -> PtrState
advanceSrcState n st =
st {_soff = _soff st + n}
advanceDestState :: Int -> PtrState -> PtrState
advanceDestState n st =
st {_doff = _doff st + n}
advanceBothState :: Int -> PtrState -> PtrState
advanceBothState n st =
st {_soff = _soff st + n, _doff = _doff st + n}
newtype Decoder a = Decoder (PtrState -> IO (PtrState, DResult a))
runDecoder :: Decoder a -> PtrState -> IO (PtrState, DResult a)
runDecoder (Decoder p) = p
data DResult a
= DDone a
-- ^ returns the src ptr and the length remaining, and a pointer to
| DPartial (Decoder a)
-- ^ More input is required, pass in a Ptr Word8 and the length in bytes
| DError String
-- ^ Something bad happened
instance Show a => Show (DResult a) where
show x = case x of
DDone a -> "DDone " ++ show a
DError str -> "DError: " ++ str
DPartial _ -> "DPartial <p>"
instance Functor Decoder where
fmap f m = m >>= return . f
instance Applicative Decoder where
pure = return
mf <*> ma = mf >>= \f -> ma >>= \a -> return (f a)
instance Monad Decoder where
return a = Decoder $ \s -> return (s,DDone a)
{-# INLINE return #-}
Decoder m >>= f = Decoder $ \s -> do
(s',r) <- m s
case r of
DDone x ->
let Decoder fx = f x in fx s'
DPartial g ->
return $ (s',DPartial (g >>= f))
DError str ->
return (s',DError str)
{-# INLINE (>>=) #-}
instance MonadIO Decoder where
liftIO m = Decoder $ \s -> do
x <- m
return (s,DDone x)
-- setSrc :: (P8,Int,Int) -> Decoder ()
-- setSrc (src,soff,slen) = do
-- Decoder $ \(_,_,_,dst,doff,dlen) ->
-- return ((src,soff,slen,dst,doff,dlen),DDone ())
demandInput :: Decoder ()
demandInput = Decoder $ \s -> return (s,DPartial (return ()))
getState :: Decoder PtrState
getState = Decoder $ \s -> return (s, DDone s)
putState :: PtrState -> Decoder ()
putState s = Decoder $ \_s -> return (s,DDone ())
modifyState :: (PtrState -> PtrState) -> Decoder ()
modifyState f = Decoder $ \s -> return (f s,DDone ())
startDecoder :: Show a => Decoder a -> ByteString -> IO HLZ4Result
startDecoder p bs = do
let (srcptr, off, len) = toForeignPtr bs
r <- runD bs (PS (unsafeForeignPtrToPtr srcptr) off len nullPtr 0 0) p
touchForeignPtr srcptr
return r
runD :: Show a => ByteString -> PtrState -> Decoder a -> IO HLZ4Result
runD input s (Decoder p) = do
(s',x) <- p s
when debug $ print x
case x of
DDone _ -> do
fptr <- newForeignPtr finalizerFree (_dst s')
let res = (fromForeignPtr fptr 0 (_doff s'))
if (_soff s' == _slen s')
then return $ Done res
else return $ Block res (BS.drop (_soff s') input)
DPartial g ->
return $ Partial (feed s' g)
DError str -> return $ Error str
feed :: Show a => PtrState -> Decoder a -> ByteString -> IO HLZ4Result
feed st g bs = do
let (srcptr, off, len) = toForeignPtr bs
r <- runD bs (st {_src = unsafeForeignPtrToPtr srcptr, _soff = off, _slen = len}) g
touchForeignPtr srcptr
return r
data HLZ4Result
= Done ByteString -- ^ Finished decoding all there is
| Block ByteString ByteString -- ^ Decoded a block, and remiaing input
| Partial (ByteString -> IO HLZ4Result) -- ^ More data is needed to decompress
| Error String -- ^ Something bad happened
instance Show HLZ4Result where
show x = case x of
Done bs -> "Done " ++ show bs
Block b1 b2 -> "Block " ++ show b1 ++ " (" ++ show b2 ++ ")"
Error str -> "Error: " ++ str
Partial _ -> "Partial"
err :: String -> Decoder a
err str = Decoder $ \s -> return $ (s,DError str)
getSrcRemaining :: Decoder Int
getSrcRemaining = getState >>= \st -> return (_slen st - _soff st)
getDestRemaining :: Decoder Int
getDestRemaining = getState >>= \st -> return (_dlen st - _doff st)
advanceSrc :: Int -> Decoder ()
advanceSrc n = modifyState(advanceSrcState n)
advanceDest :: Int -> Decoder ()
advanceDest n = modifyState(advanceDestState n)
advanceBoth :: Int -> Decoder ()
advanceBoth n = modifyState(advanceBothState n)
peekByte :: Ptr Word8 -> Int -> IO Word8
peekByte = peekByteOff
fi8 :: Integral a => Word8 -> a
fi8 = fromIntegral
getWord8 :: Decoder Word8
getWord8 = do
PS {..} <- getState
if _soff < _slen
then do
advanceSrc 1
b <- liftIO $ peekByte _src _soff
when debug $ liftIO $ printf "getWord8: %d\n" b
return b
else demandInput >> getWord8
{-# INLINE getWord8 #-}
getWord16LE :: Decoder Word16
getWord16LE = do
PS {..} <- getState
if _slen - _soff >= 2
then do
advanceSrc 2
liftIO $ peek (castPtr (_src `plusPtr` _soff))
else do
a <- getWord8
b <- getWord8
return (fi8 b `shiftL` 8 .|. fi8 a)
{-# INLINE getWord16LE #-}
getWord32LE :: Decoder Word32
getWord32LE = do
PS {..} <- getState
if _slen - _soff >= 4
then do
advanceSrc 4
liftIO $ peek (castPtr (_src `plusPtr` _soff))
else do
a <- getWord8
b <- getWord8
c <- getWord8
d <- getWord8
return (fi8 d `shiftL` 24 .|. fi8 c `shiftL` 16 .|. fi8 b `shiftL` 8 .|. fi8 a)
{-# INLINE getWord32LE #-}
-- | Allocate a new destination buffer, returning the bytestring representing the
-- current destination buffer.
allocateDest :: Int -> Decoder ByteString
allocateDest n =
Decoder $ \st -> do
fptr <- newForeignPtr finalizerFree (_dst st)
let res = fromForeignPtr fptr 0 (_doff st)
dst' <- mallocBytes n
return ( st {_dst = dst', _doff = 0, _dlen = n}, DDone res)
-- | Decodes a number encoded using repeated values of 255 and returns how many bytes were used
getLength :: Decoder Int
getLength = go 0 where
go n = do
b <- getWord8
case b of
255 -> go (n+255)
_ -> return (n + fromIntegral b)
{-# INLINE getLength #-}
-- | Transfers `count` bytes from src into dest. If there are not enough
-- bytes in src, the remaining bytes will be copied and more input demanded.
-- If there is not enough room in the destination and error is produced.
transfer :: Int -> Decoder ()
transfer count = do
PS {..} <- getState
if _doff + count >= _dlen
then err $ "transfer: transfer of "
++ show count
++ " bytes would overflow destination buffer "
++ show (_doff,_dlen)
else if _soff + count < _slen
then do
liftIO $ F.copyBytes (_dst `plusPtr` _doff) (_src `plusPtr` _soff) count
advanceBoth count
else do
let srem = _slen - _soff
liftIO $ F.copyBytes (_dst `plusPtr` _doff) (_src `plusPtr` _soff) srem
advanceDest srem
demandInput
transfer (count-srem)
-- | Moves `count` bytes of data from `offset` bytes before current position into
-- the current position in the destination, and advances the state. If count is greater
-- than the space remaining in the destination buffer, an error is returned.
lookback :: Int -> Int -> Decoder ()
lookback count offset = do
PS {..} <- getState
when (_doff + count > _dlen) $
err $ "lookback: copy of " ++ show count ++ " bytes would overflow destination buffer"
when (offset > _doff) $
err $ "lookback: copy from offset " ++ show offset ++ " before beginning of buffer, dest offset: " ++ show _doff
let mmove :: Ptr Word8 -> Ptr Word8 -> Int -> IO ()
mmove !_ !_ 0 = return ()
mmove dest src n = peek src >>= poke dest
>> mmove (dest `plusPtr` 1) (src `plusPtr` 1) (n-1)
liftIO $ mmove (_dst `plusPtr` _doff) (_dst `plusPtr` (_doff-offset)) count
advanceDest count
getByteString :: Int -> Decoder ByteString
getByteString len = do
bsptr <- liftIO $ mallocBytes len
go bsptr len
fptr <- liftIO $ newForeignPtr finalizerFree bsptr
return $ fromForeignPtr fptr 0 len
where go ptr len = do
PS {..} <- getState
if _soff + len < _slen
then do
liftIO $ F.copyBytes ptr (_src `plusPtr` _soff) len
advanceSrc len
else do
let srem = _slen-_soff
liftIO $ F.copyBytes ptr (_src `plusPtr` _soff) srem
demandInput
go (ptr `plusPtr` srem) (len-srem)
-- | Decodes a single LZ4 sequence within a block (lit len, lits, offset backwards, copy len).
-- Returns the number of bytes written to the destination
decodeSequence :: Decoder Int
decodeSequence = do
token <- getWord8
let lLen = fromIntegral $ token `shiftR` 4
mLen = fromIntegral $ (token .&. 0x0F) + 4
-- Write literals
litLength <- if lLen == 15
then (15+) <$> getLength
else return lLen
transfer litLength
-- copy length from offset
drem <- getDestRemaining
if drem > 0 then do
offset <- getWord16LE
matchLen <- if mLen == 19
then (19+) <$> getLength
else return mLen
lookback matchLen (fromIntegral offset)
return (litLength + matchLen)
else
return litLength
decodeSequences :: Int -> Decoder ()
decodeSequences len
| len < 0 = err $ "decodeSequence: read more than block length bytes from source: " ++ show len
| len == 0 = return ()
| otherwise = do
ssize <- decodeSequence
decodeSequences (len-ssize)
getBlock :: Decoder ()
getBlock = do
len' <- getWord32LE
let len = fromIntegral (len' `clearBit` 31)
when debug $ liftIO $ print len'
allocateDest len
if testBit len' 31
then transfer len
else decodeSequences len
-- Tests --
test1 :: IO Bool
test1 = do
r <- startDecoder getBlock $ pack [15,0,0,0, 0x56, 49,50,51,52,53, 5,0]
case r of
Done bs -> return $ bs == pack [49, 50,51,52,53,49,50,51,52,53,49, 50,51,52,53]
_ -> return False
test2 :: IO Bool
test2 = do
r <- startDecoder getBlock $ pack [15,0,0,0, 0x56, 49,50,51,52,53, 5,0,0,0,0,0]
case r of
Block bs res -> return $ bs == pack [49, 50,51,52,53,49,50,51,52,53,49, 50,51,52,53]
&& res == pack [0,0,0,0]
_ -> return False
test3 :: IO Bool
test3 = do
r <- startDecoder getBlock $ pack [30,0,0,0, 0x56, 49,50,51,52,53, 5,0, 0x56, 49, 50,51,52,53, 5,0]
case r of
Done bs -> return $ bs == pack [49,50,51,52,53,49,50,51,52,53,49,50,51,52,53,
49,50,51,52,53,49,50,51,52,53,49,50,51,52,53]
_ -> return False
| axman6/HLZ4 | src/Codec/Compression/HLZ4/Decode.hs | bsd-3-clause | 12,266 | 0 | 18 | 3,661 | 4,067 | 2,106 | 1,961 | 298 | 4 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.EN.AU.Corpus
( allExamples
) where
import Data.String
import Prelude
import Duckling.Testing.Types hiding (examples)
import Duckling.Time.Corpus
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
allExamples :: [Example]
allExamples = concat
[ examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "15/2"
, "on 15/2"
, "15 / 2"
, "15-2"
, "15 - 2"
]
, examples (datetime (1974, 10, 31, 0, 0, 0) Day)
[ "31/10/1974"
, "31/10/74"
, "31-10-74"
, "31.10.1974"
, "31 10 1974"
]
, examples (datetime (2013, 4, 25, 16, 0, 0) Minute)
[ "25/4 at 4:00pm"
]
, examples (datetime (2013, 10, 10, 0, 0, 0) Day)
[ "10/10"
, "10/10/2013"
]
, examples (datetimeHoliday (2013, 11, 28, 0, 0, 0) Day "Thanksgiving Day")
[ "thanksgiving day"
, "thanksgiving"
, "thanksgiving 2013"
, "this thanksgiving"
, "next thanksgiving day"
]
, examples (datetimeHoliday (2014, 11, 27, 0, 0, 0) Day "Thanksgiving Day")
[ "thanksgiving of next year"
, "thanksgiving 2014"
]
, examples (datetimeHoliday (2012, 11, 22, 0, 0, 0) Day "Thanksgiving Day")
[ "last thanksgiving"
, "thanksgiving day 2012"
]
, examples (datetimeHoliday (2016, 11, 24, 0, 0, 0) Day "Thanksgiving Day")
[ "thanksgiving 2016"
]
, examples (datetimeHoliday (2017, 11, 23, 0, 0, 0) Day "Thanksgiving Day")
[ "thanksgiving 2017"
]
, examples (datetimeHoliday (2013, 4, 25, 0, 0, 0) Day "ANZAC Day")
[ "anzac day"
]
, examples (datetimeHoliday (2013, 9, 1, 0, 0, 0) Day "Father's Day")
[ "Father's Day"
]
, examples (datetimeHoliday (2012, 9, 2, 0, 0, 0) Day "Father's Day")
[ "last fathers day"
]
, examples (datetimeHoliday (1996, 9, 1, 0, 0, 0) Day "Father's Day")
[ "fathers day 1996"
]
, examples (datetimeIntervalHoliday ((2020, 2, 8, 0, 0, 0), (2020, 2, 11, 0, 0, 0)) Day "Royal Hobart Regatta")
[ "Royal Hobart Regatta 2020"
]
, examples (datetimeIntervalHoliday ((2018, 7, 8, 0, 0, 0), (2018, 7, 16, 0, 0, 0)) Day "NAIDOC Week")
[ "NAIDOC week 2018"
]
, examples (datetimeHoliday (2013, 5, 12, 0, 0, 0) Day "Mother's Day")
[ "Mother's Day"
, "next mothers day"
]
, examples (datetimeHoliday (2012, 5, 13, 0, 0, 0) Day "Mother's Day")
[ "last mothers day"
]
, examples (datetimeHoliday (2014, 5, 11, 0, 0, 0) Day "Mother's Day")
[ "mothers day 2014"
]
, examples (datetimeHoliday (2013, 10, 7, 0, 0, 0) Day "Labour Day")
[ "labour day"
]
, examples (datetimeHoliday (2012, 10, 1, 0, 0, 0) Day "Labour Day")
[ "labour day of last year"
, "Labour Day 2012"
]
, examples (datetimeHoliday (2018, 5, 28, 0, 0, 0) Day "Reconciliation Day")
[ "reconciliation day 2018"
]
, examples (datetimeHoliday (2019, 5, 27, 0, 0, 0) Day "Reconciliation Day")
[ "reconciliation day 2019"
]
, examples (datetimeIntervalHoliday ((2013, 8, 9, 0, 0, 0), (2013, 8, 19, 0, 0, 0)) Day "Royal Queensland Show")
[ "ekka"
, "royal national agricultural show"
]
, examples (datetimeHoliday (2018, 8, 15, 0, 0, 0) Day "Royal Queensland Show Day")
[ "ekka day 2018"
, "RNA Show Day 2018"
, "Royal Queensland Show Day in five years"
]
, examples (datetimeHoliday (2013, 5, 3, 0, 0, 0) Day "Administrative Professionals' Day")
[ "admin day"
]
]
| facebookincubator/duckling | Duckling/Time/EN/AU/Corpus.hs | bsd-3-clause | 4,316 | 0 | 11 | 1,538 | 1,185 | 717 | 468 | 81 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{- |
Copyright : Copyright (C) 2008 Bjorn Buckwalter
License : BSD3
Maintainer : bjorn.buckwalter@gmail.com
Stability : experimental
Portability: GHC only?
An implementation of TDB conversions based on formula (2.6) of
[Kaplan2005]. The formula reportedly has a maximum error in the
conversions of about 10 microseconds between the years 1600 and
2200.
This module exports no data types or functions, it only provides
additional 'Astro.Time.Convert' instances.
-}
module Astro.Time.Barycentric.Kaplan2005 (tdbToTT, ttToTDB) where
import Astro.Time
import Numeric.Units.Dimensional.Prelude
import qualified Prelude
-- | The difference between the TDB and TT time scales as a function of
-- TT epoch.
tdbMinusTT :: Floating a => E TT a -> Time a
tdbMinusTT tt = 0.001657*~second * sin ( 628.3076 *~rpc * t + 6.2401 *~radian)
+ 0.000022*~second * sin ( 575.3385 *~rpc * t + 4.2970 *~radian)
+ 0.000014*~second * sin (1256.6152 *~rpc * t + 6.1969 *~radian)
+ 0.000005*~second * sin ( 606.9777 *~rpc * t + 4.0212 *~radian)
+ 0.000005*~second * sin ( 52.9691 *~rpc * t + 0.4444 *~radian)
+ 0.000002*~second * sin ( 21.3299 *~rpc * t + 5.5431 *~radian)
+ 0.000010*~(second/century) * t * sin ( 628.3076 *~rpc * t + 4.2490 *~radian)
where
rpc = radian / century
t = diffEpoch tt j2000
{-
We incorrectly substitute TDB for TT in 'ttMinusTDB' below but the error
introduced by this approximation is less than a picosecond (far less than
the 10 microsecond accuracy inherent in the formula in the first place).
-}
-- | The difference between the TDB and TT time scales as a function of
-- TT epoch.
ttMinusTDB :: Floating a => E TDB a -> Time a
ttMinusTDB = negate . tdbMinusTT . coerceE
-- | Convert a TT epoch into a TDB epoch.
ttToTDB :: Floating a => E TT a -> E TDB a
ttToTDB tt = coerceE $ addTime tt (tdbMinusTT tt)
-- | Convert a TDB epoch into a TT epoch.
tdbToTT :: Floating a => E TDB a -> E TT a
tdbToTT tdb = coerceE $ addTime tdb (ttMinusTDB tdb)
| bjornbm/astro | src/Astro/Time/Barycentric/Kaplan2005.hs | bsd-3-clause | 2,118 | 0 | 30 | 475 | 474 | 244 | 230 | 21 | 1 |
module App.Password where
import Control.Monad.IO.Class (MonadIO(liftIO))
import Crypto.Scrypt
(EncryptedPass(EncryptedPass), Pass(Pass), encryptPassIO',
getEncryptedPass, verifyPass')
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8With, encodeUtf8)
import Data.Text.Encoding.Error (lenientDecode)
import App.Db.Types (PasswordHash(..))
data PasswordCheck
= PasswordIncorrect
| PasswordCorrect
deriving (Show)
foldPasswordCheck
:: a -- ^ Value to use for incorrect password.
-> a -- ^ Value to use for correct password.
-> PasswordCheck
-> a
foldPasswordCheck incorrectVal _ PasswordIncorrect = incorrectVal
foldPasswordCheck _ correctVal PasswordCorrect = correctVal
hashPassword
:: MonadIO m
=> Text -> m PasswordHash
hashPassword password =
let pass = passwordToPass password
in encryptedPassToPasswordHash <$> liftIO (encryptPassIO' pass)
passwordToPass :: Text -> Pass
passwordToPass = Pass . encodeUtf8
encryptedPassToPasswordHash :: EncryptedPass -> PasswordHash
encryptedPassToPasswordHash =
PasswordHash . decodeUtf8With lenientDecode . getEncryptedPass
passwordHashToEncryptedPass :: PasswordHash -> EncryptedPass
passwordHashToEncryptedPass = EncryptedPass . encodeUtf8 . unPasswordHash
checkPassword :: Text -> PasswordHash -> PasswordCheck
checkPassword password passwordHash =
let res =
verifyPass'
(passwordToPass password)
(passwordHashToEncryptedPass passwordHash)
in case res of
True -> PasswordCorrect
False -> PasswordIncorrect
| arow-oss/arow-sample-webapp | src/App/Password.hs | bsd-3-clause | 1,556 | 0 | 11 | 252 | 349 | 194 | 155 | 42 | 2 |
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ViewPatterns #-}
module GHC.Util (
module GHC.Util.View
, module GHC.Util.FreeVars
, module GHC.Util.ApiAnnotation
, module GHC.Util.HsDecl
, module GHC.Util.HsExpr
, module GHC.Util.SrcLoc
, module GHC.Util.DynFlags
, module GHC.Util.Scope
, module GHC.Util.Unify
, parsePragmasIntoDynFlags
, fileToModule
, pattern SrcSpan, srcSpanFilename, srcSpanStartLine', srcSpanStartColumn, srcSpanEndLine', srcSpanEndColumn
, pattern SrcLoc, srcFilename, srcLine, srcColumn
, showSrcSpan,
) where
import GHC.Util.View
import GHC.Util.FreeVars
import GHC.Util.ApiAnnotation
import GHC.Util.HsExpr
import GHC.Util.HsDecl
import GHC.Util.SrcLoc
import GHC.Util.DynFlags
import GHC.Util.Scope
import GHC.Util.Unify
import Language.Haskell.GhclibParserEx.GHC.Parser (parseFile)
import Language.Haskell.GhclibParserEx.GHC.Driver.Session (parsePragmasIntoDynFlags)
import Language.Haskell.GhclibParserEx.GHC.Utils.Outputable
import GHC.Hs
import GHC.Parser.Lexer
import GHC.Types.SrcLoc
import GHC.Driver.Session
import GHC.Data.FastString
import System.FilePath
import Language.Preprocessor.Unlit
fileToModule :: FilePath -> String -> DynFlags -> ParseResult (Located HsModule)
fileToModule filename str flags =
parseFile filename flags
(if takeExtension filename /= ".lhs" then str else unlit filename str)
{-# COMPLETE SrcSpan #-}
-- | The \"Line'\" thing is because there is already e.g. 'SrcLoc.srcSpanStartLine'
pattern SrcSpan :: String -> Int -> Int -> Int -> Int -> SrcSpan
pattern SrcSpan
{ srcSpanFilename
, srcSpanStartLine'
, srcSpanStartColumn
, srcSpanEndLine'
, srcSpanEndColumn
}
<-
(toOldeSpan ->
( srcSpanFilename
, srcSpanStartLine'
, srcSpanStartColumn
, srcSpanEndLine'
, srcSpanEndColumn
))
toOldeSpan :: SrcSpan -> (String, Int, Int, Int, Int)
toOldeSpan (RealSrcSpan span _) =
( unpackFS $ srcSpanFile span
, srcSpanStartLine span
, srcSpanStartCol span
, srcSpanEndLine span
, srcSpanEndCol span
)
-- TODO: the bad locations are all (-1) right now
-- is this fine? it should be, since noLoc from HSE previously also used (-1) as an invalid location
toOldeSpan (UnhelpfulSpan _) =
( "no-span"
, -1
, -1
, -1
, -1
)
{-# COMPLETE SrcLoc #-}
pattern SrcLoc :: String -> Int -> Int -> SrcLoc
pattern SrcLoc
{ srcFilename
, srcLine
, srcColumn
}
<-
(toOldeLoc ->
( srcFilename
, srcLine
, srcColumn
))
toOldeLoc :: SrcLoc -> (String, Int, Int)
toOldeLoc (RealSrcLoc loc _) =
( unpackFS $ srcLocFile loc
, srcLocLine loc
, srcLocCol loc
)
toOldeLoc (UnhelpfulLoc _) =
( "no-loc"
, -1
, -1
)
showSrcSpan :: SrcSpan -> String
showSrcSpan = unsafePrettyPrint
| ndmitchell/hlint | src/GHC/Util.hs | bsd-3-clause | 2,787 | 0 | 10 | 519 | 658 | 396 | 262 | 88 | 2 |
module Main where
import Prelude ()
import Prelude.Compat
import Test.Tasty (TestTree, defaultMain, testGroup)
import Test.Golden (goldenTests)
main :: IO ()
main = do
-- createDirectoryIfMissing False (testDir </> "empty-dir")
tests <- testsIO
defaultMain tests
testsIO :: IO TestTree
testsIO = do
return $ testGroup "tests" [goldenTests]
| cdepillabout/highlight | test/Test.hs | bsd-3-clause | 354 | 0 | 9 | 60 | 98 | 54 | 44 | 12 | 1 |
{-# LANGUAGE RankNTypes, PolyKinds, DataKinds, TypeOperators, TypeInType,
TypeFamilies, FlexibleContexts, UndecidableInstances, GADTs #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Singletons.Decide
-- Copyright : (C) 2013 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg (eir@cis.upenn.edu)
-- Stability : experimental
-- Portability : non-portable
--
-- Defines the class 'SDecide', allowing for decidable equality over singletons.
--
----------------------------------------------------------------------------
module Data.Singletons.Decide (
-- * The SDecide class
SDecide(..),
-- * Supporting definitions
(:~:)(..), Void, Refuted, Decision(..)
) where
import Data.Kind
import Data.Singletons
import Data.Type.Equality
import Data.Void
----------------------------------------------------------------------
---- SDecide ---------------------------------------------------------
----------------------------------------------------------------------
-- | Because we can never create a value of type 'Void', a function that type-checks
-- at @a -> Void@ shows that objects of type @a@ can never exist. Thus, we say that
-- @a@ is 'Refuted'
type Refuted a = (a -> Void)
-- | A 'Decision' about a type @a@ is either a proof of existence or a proof that @a@
-- cannot exist.
data Decision a = Proved a -- ^ Witness for @a@
| Disproved (Refuted a) -- ^ Proof that no @a@ exists
-- | Members of the 'SDecide' "kind" class support decidable equality. Instances
-- of this class are generated alongside singleton definitions for datatypes that
-- derive an 'Eq' instance.
class SDecide k where
-- | Compute a proof or disproof of equality, given two singletons.
(%~) :: forall (a :: k) (b :: k). Sing a -> Sing b -> Decision (a :~: b)
instance SDecide k => TestEquality (Sing :: k -> Type) where
testEquality a b =
case a %~ b of
Proved Refl -> Just Refl
Disproved _ -> Nothing
| int-index/singletons | src/Data/Singletons/Decide.hs | bsd-3-clause | 2,113 | 1 | 12 | 383 | 248 | 154 | 94 | 20 | 0 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module Main where
import Stashh.App
import Stashh.Env
import Stashh.Command
import Control.Monad.Reader
import Control.Monad.IO.Class (MonadIO, liftIO)
main :: IO ()
main = do
env <- parseEnv
runApp app env
app :: AppT IO ()
app = do
env <- ask
debugout env ["-- Env --", show env, ""]
dispatch
| yuroyoro/stashh | src/Main.hs | bsd-3-clause | 362 | 0 | 9 | 68 | 118 | 64 | 54 | 16 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.Applicative (optional, (<$>))
import Control.Category (Category, (<<<))
import qualified Control.Category as Cat (id, (.))
import Control.DeepSeq (NFData, rnf)
import Control.Exception (evaluate)
import Control.Monad
import Control.Parallel.HdpH
import qualified Control.Parallel.HdpH as HdpH (declareStatic)
import Data.Array (Array, array, bounds, listArray, (!))
import Data.Int (Int64)
import qualified Data.IntMap.Strict as StrictMap (findWithDefault,
fromAscList)
import Data.IntSet (IntSet)
import qualified Data.IntSet as VertexSet (delete, difference,
fromAscList,
intersection, member,
minView, null, size)
import Data.IORef (newIORef)
import Data.List (delete, group, groupBy, sort, sortBy,
stripPrefix)
import Data.Monoid (mconcat)
import Data.Maybe (fromMaybe)
import Options.Applicative hiding (defaultPrefs)
import System.Clock
import System.Environment (getArgs)
import System.Exit (exitFailure, exitSuccess)
import System.IO (BufferMode (..), hSetBuffering,
stderr, stdout)
import System.IO.Unsafe
import Clique (Clique, emptyClique, isClique)
import DIMACParser (parseDIMACS2)
import Graph
import GraphBitArray
import Solvers.SequentialSolver (sequentialMaxClique)
import Solvers.SequentialSolverBitSetArray (sequentialBitSetArrayMaxClique)
import Solvers.SequentialSolverBBMC (sequentialMaxCliqueBBMC)
import Solvers.BonesSolver ({- findSolution, -} randomWSIntSet, randomWSBitArray, safeSkeletonIntSet,
{- safeSkeletonIntSetDynamic, -} safeSkeletonBitSetArray)
import qualified Solvers.BonesSolver as BonesSolver (declareStatic)
import qualified Bones.Skeletons.BranchAndBound.HdpH.Unordered as Unordered
import qualified Bones.Skeletons.BranchAndBound.HdpH.Ordered as Ordered
import Bones.Skeletons.BranchAndBound.HdpH.GlobalRegistry
--------------------------------------------------------------------------------
-- Misc Functions
--------------------------------------------------------------------------------
timeIO :: (TimeSpec -> TimeSpec -> Double) -> IO a -> IO (a, Double)
timeIO diffT action = do
s <- getTime Monotonic
x <- action
e <- getTime Monotonic
return (x, diffT s e)
diffTime :: Integral a => a -> TimeSpec -> TimeSpec -> Double
diffTime factor (TimeSpec s1 n1) (TimeSpec s2 n2) = fromIntegral (t2 - t1)
/
fromIntegral factor
where t1 = (fromIntegral s1 * 10 ^ 9) + fromIntegral n1
t2 = (fromIntegral s2 * 10 ^ 9) + fromIntegral n2
diffTimeMs :: TimeSpec -> TimeSpec -> Double
diffTimeMs = diffTime (10 ^ 6)
diffTimeS :: TimeSpec -> TimeSpec -> Double
diffTimeS = diffTime (10 ^ 9)
timeIOMs :: IO a -> IO (a, Double)
timeIOMs = timeIO diffTimeMs
timeIOS :: IO a -> IO (a, Double)
timeIOS = timeIO diffTimeS
--------------------------------------------------------------------------------
-- Argument Handling
--------------------------------------------------------------------------------
data Algorithm = Sequential
| SequentialBBMC
| SequentialBitSetArray
| UnorderedIntSet
| UnorderedBBMC
| OrderedIntSet
| OrderedBBMC
-- | FindSolution
-- | OrderedSkeletonIntSetDynamic
deriving (Read, Show)
data Options = Options
{ algorithm :: Algorithm
, dataFile :: FilePath
, noPerm :: Bool
, verbose :: Bool
, discrepancy :: Bool
, targetSize :: Maybe Int
, spawnDepth :: Maybe Int
, numTasks :: Maybe Int
}
optionParser :: Parser Options
optionParser = Options
<$> option auto
( long "algorithm"
<> short 'a'
<> help ("Which MaxClique algorithm to use: " ++ printAlgorithms)
)
<*> strOption
( long "inputfile"
<> short 'f'
<> help "Location of an input graph in DIMACS2 format"
)
<*> switch
( long "noperm"
<> help "Don't permute the input graph."
)
<*> switch
( long "verbose"
<> short 'v'
<> help "Enable verbose output"
)
<*> switch
( long "discrepancySearch"
<> help "Use discrepancy search in parallel."
)
<*> optional (option auto
( long "targetSize"
<> short 's'
<> help "Clique size to search for (use with FindSolution skeleton)"
))
<*> optional (option auto
( long "spawnDepth"
<> short 'd'
<> help "Spawn depth can effect many skeletons"
))
<*> optional (option auto
( long "NumDynamicTasks"
<> short 't'
<> help "Number of Tasks to attempt to keep in the Dynamic WorkQueue"
))
where printAlgorithms = unlines ["[Sequential,"
," SequentialBitSetArray,"
," SequentialBBMC,"
," OrderedIntSet,"
," OrderedBBMC,"
," UnorderedIntSet,"
," UnorderedBBMC]"]
optsParser = info (helper <*> optionParser)
( fullDesc
<> progDesc "Find the maximum clique in a given graph"
<> header "MaxClique"
)
defaultPrefs :: ParserPrefs
defaultPrefs = ParserPrefs
{ prefMultiSuffix = ""
, prefDisambiguate = False
, prefShowHelpOnError = False
, prefBacktrack = True
, prefColumns = 80 }
--------------------------------------------------------------------------------
-- HdpH
--------------------------------------------------------------------------------
parseHdpHOpts :: [String] -> IO (RTSConf, Int, [String])
parseHdpHOpts args = do
either_conf <- updateConf args defaultRTSConf
case either_conf of
Left err_msg -> error $ "parseHdpHOpts: " ++ err_msg
Right (conf, []) -> return (conf, 0, [])
Right (conf, arg':args') ->
case stripPrefix "-rand=" arg' of
Just s -> return (conf, read s, args')
Nothing -> return (conf, 0, arg':args')
$(return []) -- Bring all types into scope for TH.
declareStatic :: StaticDecl
declareStatic = mconcat
[
HdpH.declareStatic
, BonesSolver.declareStatic
]
--------------------------------------------------------------------------------
-- Main
--------------------------------------------------------------------------------
main :: IO ()
main = do
-- parsing command line arguments
args <- getArgs
(conf, seed, args') <- parseHdpHOpts args
(Options
algorithm filename noPerm
verbose discrepancySearch solSize
depth numTasks) <- handleParseResult $ execParserPure defaultPrefs optsParser args'
let permute = not noPerm
-- reading input graph
((uG,n,edges), t_read) <- timeIOMs $ do
input <- if null filename
then getContents
else readFile filename
let (n, edges) = parseDIMACS2 input
let uG' = mkUG n edges
evaluate (rnf uG')
return (uG',n,edges)
when verbose $ do
putStrLn $ "Time to construct (undirected) input graph: " ++ show t_read
printGraphStatistics uG
-- permuting and converting input graph
((alpha, bigUG, bigG), t_permute) <- timeIOMs $ do
let alpha' | permute = antiMonotonizeDegreesPermUG uG
| otherwise = Cat.id
uG_alpha = appUG (inv permHH <<< alpha') uG
-- uG_alpha in non-decreasing degree order, vertices numbered from 0.
bigG' = mkG uG_alpha
evaluate (rnf bigG')
evaluate (rnf uG_alpha)
return (alpha', uG_alpha, bigG')
when verbose $
if permute
then putStrLn $ "Time to Permute Graph: " ++ show t_permute
else putStrLn $ "Time to Construct Graph: " ++ show t_permute
-- Buffer Configuration
hSetBuffering stdout LineBuffering
hSetBuffering stderr LineBuffering
-- Run (and time) the max clique algorithm
(res, t_compute) <- case algorithm of
Sequential -> timeIOS $ do
let (bigCstar', !calls') = sequentialMaxClique bigG
evaluate (rnf bigCstar')
return $ Just bigCstar'
SequentialBitSetArray -> timeIOS $ do
g <- mkGraphArray bigUG
gC <- mkGraphArray $ complementUG bigUG
sol <- sequentialBitSetArrayMaxClique (g, gC) n
return $ Just sol
SequentialBBMC -> timeIOS $ do
let (bigCstar', !call') = sequentialMaxCliqueBBMC n edges
evaluate (rnf bigCstar')
return $ (Just bigCstar')
UnorderedIntSet -> do
register (Main.declareStatic <> Unordered.declareStatic)
-- -- Make sure the graph is available globally
graph <- newIORef bigG
addGlobalSearchSpaceToRegistry graph
let depth' = fromMaybe 0 depth
timeIOS $ evaluate =<< runParIO conf (randomWSIntSet bigG depth')
UnorderedBBMC -> do
register (Main.declareStatic <> Unordered.declareStatic)
g <- mkGraphArray bigUG
gC <- mkGraphArray $ complementUG bigUG
graph <- newIORef (g, gC)
addGlobalSearchSpaceToRegistry graph
let depth' = fromMaybe 0 depth
timeIOS $ evaluate =<< runParIO conf (randomWSBitArray n depth')
OrderedIntSet -> do
register (Main.declareStatic <> Ordered.declareStatic)
-- -- Make sure the graph is available globally
graph <- newIORef bigG
addGlobalSearchSpaceToRegistry graph
let depth' = fromMaybe 0 depth
timeIOS $ evaluate =<< runParIO conf (safeSkeletonIntSet bigG depth' discrepancySearch)
{-
OrderedSkeletonIntSetDynamic -> do
register (Main.declareStatic <> Ordered.declareStatic)
-- -- Make sure the graph is available globally
graph <- newIORef bigG
addGlobalSearchSpaceToRegistry graph
let depth' = fromMaybe 0 depth
ntasks = fromMaybe 0 numTasks
if ntasks == 0
then error "Must provide the NumDynamicTasks (-t) argument when using dynamic work generation"
else timeIOS $ evaluate =<< runParIO conf (safeSkeletonIntSetDynamic bigG depth' ntasks)
-}
OrderedBBMC -> do
register (Main.declareStatic <> Ordered.declareStatic)
-- -- Make sure the graph is available globally
g <- mkGraphArray bigUG
gC <- mkGraphArray $ complementUG bigUG
graph <- newIORef (g, gC)
addGlobalSearchSpaceToRegistry graph
let depth' = fromMaybe 0 depth
timeIOS $ evaluate =<< runParIO conf (safeSkeletonBitSetArray n depth' discrepancySearch)
{-
FindSolution -> do
solSize' <- case solSize of
Nothing -> error "You must provide the target size (-s) argument\
\when using the FindSolution algorithm"
Just s -> return s
register (Main.declareStatic <> Unordered.declareStatic)
g <- mkGraphArray bigUG
gC <- mkGraphArray $ complementUG bigUG
graph <- newIORef (g, gC)
addGlobalSearchSpaceToRegistry graph
let depth' = fromMaybe 0 depth
timeIOS $ evaluate =<< runParIO conf (findSolution n depth' solSize')
-}
case res of
Nothing -> exitSuccess
Just (clq, clqSize) -> do
let bigCstar_alpha_inv = map (app (inv alpha <<< permHH)) clq
putStrLn $ "Results\n======= "
putStrLn $ " C*: " ++ show bigCstar_alpha_inv
putStrLn $ "sort C*: " ++ show (sort bigCstar_alpha_inv)
putStrLn $ "size: " ++ show clqSize
putStrLn $ "isClique: " ++ show (isClique bigG clq)
putStrLn $ "TIMED: " ++ show t_compute ++ " s"
exitSuccess
| BlairArchibald/bones | apps/maxclique/src/Main.hs | bsd-3-clause | 12,772 | 0 | 20 | 4,139 | 2,585 | 1,346 | 1,239 | 235 | 10 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Feldspar.Core.Constructs.Save where
import Language.Syntactic
import Language.Syntactic.Constructs.Binding
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
data Save a
where
Save :: Type a => Save (a :-> Full a)
instance Semantic Save
where
semantics Save = Sem "save" id
semanticInstances ''Save
instance EvalBind Save where evalBindSym = evalBindSymDefault
instance AlphaEq dom dom dom env => AlphaEq Save Save dom env
where
alphaEqSym = alphaEqSymDefault
instance Sharable Save
instance Cumulative Save
instance SizeProp (Save :|| Type)
where
sizeProp (C' Save) (WrapFull a :* Nil) = infoSize a
instance ( (Save :|| Type) :<: dom
, OptimizeSuper dom)
=> Optimize (Save :|| Type) dom
where
constructFeatUnOpt opts x@(C' _) = constructFeatUnOptDefault opts x
| emwap/feldspar-language | src/Feldspar/Core/Constructs/Save.hs | bsd-3-clause | 2,700 | 0 | 9 | 491 | 294 | 171 | 123 | 29 | 0 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, NoImplicitPrelude
, RecordWildCards
, BangPatterns
, NondecreasingIndentation
, MagicHash
#-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Text
-- Copyright : (c) The University of Glasgow, 1992-2008
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- String I\/O functions
--
-----------------------------------------------------------------------------
module GHC.IO.Handle.Text (
hWaitForInput, hGetChar, hGetLine, hGetContents, hPutChar, hPutStr,
commitBuffer', -- hack, see below
hGetBuf, hGetBufSome, hGetBufNonBlocking, hPutBuf, hPutBufNonBlocking,
memcpy, hPutStrLn,
) where
import GHC.IO
import GHC.IO.FD
import GHC.IO.Buffer
import qualified GHC.IO.BufferedIO as Buffered
import GHC.IO.Exception
import {-# SOURCE #-} GHC.Exception
import GHC.IO.Handle.Types
import GHC.IO.Handle.Internals
import qualified GHC.IO.Device as IODevice
import qualified GHC.IO.Device as RawIO
import Foreign
import Foreign.C
import qualified Control.Exception as Exception
import Data.Typeable
import System.IO.Error
import Data.Maybe
import GHC.IORef
import GHC.Base
import GHC.Real
import GHC.Num
import GHC.Show
import GHC.List
-- ---------------------------------------------------------------------------
-- Simple input operations
-- If hWaitForInput finds anything in the Handle's buffer, it
-- immediately returns. If not, it tries to read from the underlying
-- OS handle. Notice that for buffered Handles connected to terminals
-- this means waiting until a complete line is available.
-- | Computation 'hWaitForInput' @hdl t@
-- waits until input is available on handle @hdl@.
-- It returns 'True' as soon as input is available on @hdl@,
-- or 'False' if no input is available within @t@ milliseconds. Note that
-- 'hWaitForInput' waits until one or more full /characters/ are available,
-- which means that it needs to do decoding, and hence may fail
-- with a decoding error.
--
-- If @t@ is less than zero, then @hWaitForInput@ waits indefinitely.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
--
-- * a decoding error, if the input begins with an invalid byte sequence
-- in this Handle's encoding.
--
-- NOTE for GHC users: unless you use the @-threaded@ flag,
-- @hWaitForInput hdl t@ where @t >= 0@ will block all other Haskell
-- threads for the duration of the call. It behaves like a
-- @safe@ foreign call in this respect.
--
hWaitForInput :: Handle -> Int -> IO Bool
hWaitForInput h msecs = do
wantReadableHandle_ "hWaitForInput" h $ \ handle_@Handle__{..} -> do
cbuf <- readIORef haCharBuffer
if not (isEmptyBuffer cbuf) then return True else do
if msecs < 0
then do cbuf' <- readTextDevice handle_ cbuf
writeIORef haCharBuffer cbuf'
return True
else do
-- there might be bytes in the byte buffer waiting to be decoded
cbuf' <- decodeByteBuf handle_ cbuf
writeIORef haCharBuffer cbuf'
if not (isEmptyBuffer cbuf') then return True else do
r <- IODevice.ready haDevice False{-read-} msecs
if r then do -- Call hLookAhead' to throw an EOF
-- exception if appropriate
_ <- hLookAhead_ handle_
return True
else return False
-- XXX we should only return when there are full characters
-- not when there are only bytes. That would mean looping
-- and re-running IODevice.ready if we don't have any full
-- characters; but we don't know how long we've waited
-- so far.
-- ---------------------------------------------------------------------------
-- hGetChar
-- | Computation 'hGetChar' @hdl@ reads a character from the file or
-- channel managed by @hdl@, blocking until a character is available.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetChar :: Handle -> IO Char
hGetChar handle =
wantReadableHandle_ "hGetChar" handle $ \handle_@Handle__{..} -> do
-- buffering mode makes no difference: we just read whatever is available
-- from the device (blocking only if there is nothing available), and then
-- return the first character.
-- See [note Buffered Reading] in GHC.IO.Handle.Types
buf0 <- readIORef haCharBuffer
buf1 <- if isEmptyBuffer buf0
then readTextDevice handle_ buf0
else return buf0
(c1,i) <- readCharBuf (bufRaw buf1) (bufL buf1)
let buf2 = bufferAdjustL i buf1
if haInputNL == CRLF && c1 == '\r'
then do
mbuf3 <- if isEmptyBuffer buf2
then maybeFillReadBuffer handle_ buf2
else return (Just buf2)
case mbuf3 of
-- EOF, so just return the '\r' we have
Nothing -> do
writeIORef haCharBuffer buf2
return '\r'
Just buf3 -> do
(c2,i2) <- readCharBuf (bufRaw buf2) (bufL buf2)
if c2 == '\n'
then do
writeIORef haCharBuffer (bufferAdjustL i2 buf3)
return '\n'
else do
-- not a \r\n sequence, so just return the \r
writeIORef haCharBuffer buf3
return '\r'
else do
writeIORef haCharBuffer buf2
return c1
-- ---------------------------------------------------------------------------
-- hGetLine
-- | Computation 'hGetLine' @hdl@ reads a line from the file or
-- channel managed by @hdl@.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file is encountered when reading
-- the /first/ character of the line.
--
-- If 'hGetLine' encounters end-of-file at any other point while reading
-- in a line, it is treated as a line terminator and the (partial)
-- line is returned.
hGetLine :: Handle -> IO String
hGetLine h =
wantReadableHandle_ "hGetLine" h $ \ handle_ -> do
hGetLineBuffered handle_
hGetLineBuffered :: Handle__ -> IO String
hGetLineBuffered handle_@Handle__{..} = do
buf <- readIORef haCharBuffer
hGetLineBufferedLoop handle_ buf []
hGetLineBufferedLoop :: Handle__
-> CharBuffer -> [String]
-> IO String
hGetLineBufferedLoop handle_@Handle__{..}
buf@Buffer{ bufL=r0, bufR=w, bufRaw=raw0 } xss =
let
-- find the end-of-line character, if there is one
loop raw r
| r == w = return (False, w)
| otherwise = do
(c,r') <- readCharBuf raw r
if c == '\n'
then return (True, r) -- NB. not r': don't include the '\n'
else loop raw r'
in do
(eol, off) <- loop raw0 r0
debugIO ("hGetLineBufferedLoop: r=" ++ show r0 ++ ", w=" ++ show w ++ ", off=" ++ show off)
(xs,r') <- if haInputNL == CRLF
then unpack_nl raw0 r0 off ""
else do xs <- unpack raw0 r0 off ""
return (xs,off)
-- if eol == True, then off is the offset of the '\n'
-- otherwise off == w and the buffer is now empty.
if eol -- r' == off
then do writeIORef haCharBuffer (bufferAdjustL (off+1) buf)
return (concat (reverse (xs:xss)))
else do
let buf1 = bufferAdjustL r' buf
maybe_buf <- maybeFillReadBuffer handle_ buf1
case maybe_buf of
-- Nothing indicates we caught an EOF, and we may have a
-- partial line to return.
Nothing -> do
-- we reached EOF. There might be a lone \r left
-- in the buffer, so check for that and
-- append it to the line if necessary.
--
let pre = if not (isEmptyBuffer buf1) then "\r" else ""
writeIORef haCharBuffer buf1{ bufL=0, bufR=0 }
let str = concat (reverse (pre:xs:xss))
if not (null str)
then return str
else ioe_EOF
Just new_buf ->
hGetLineBufferedLoop handle_ new_buf (xs:xss)
maybeFillReadBuffer :: Handle__ -> CharBuffer -> IO (Maybe CharBuffer)
maybeFillReadBuffer handle_ buf
= Exception.catch
(do buf' <- getSomeCharacters handle_ buf
return (Just buf')
)
(\e -> do if isEOFError e
then return Nothing
else ioError e)
-- See GHC.IO.Buffer
#define CHARBUF_UTF32
-- #define CHARBUF_UTF16
-- NB. performance-critical code: eyeball the Core.
unpack :: RawCharBuffer -> Int -> Int -> [Char] -> IO [Char]
unpack !buf !r !w acc0
| r == w = return acc0
| otherwise =
withRawBuffer buf $ \pbuf ->
let
unpackRB acc !i
| i < r = return acc
| otherwise = do
-- Here, we are rather careful to only put an *evaluated* character
-- in the output string. Due to pointer tagging, this allows the consumer
-- to avoid ping-ponging between the actual consumer code and the thunk code
#ifdef CHARBUF_UTF16
-- reverse-order decoding of UTF-16
c2 <- peekElemOff pbuf i
if (c2 < 0xdc00 || c2 > 0xdffff)
then unpackRB (unsafeChr (fromIntegral c2) : acc) (i-1)
else do c1 <- peekElemOff pbuf (i-1)
let c = (fromIntegral c1 - 0xd800) * 0x400 +
(fromIntegral c2 - 0xdc00) + 0x10000
case desurrogatifyRoundtripCharacter (unsafeChr c) of
{ C# c# -> unpackRB (C# c# : acc) (i-2) }
#else
c <- peekElemOff pbuf i
unpackRB (c : acc) (i-1)
#endif
in
unpackRB acc0 (w-1)
-- NB. performance-critical code: eyeball the Core.
unpack_nl :: RawCharBuffer -> Int -> Int -> [Char] -> IO ([Char],Int)
unpack_nl !buf !r !w acc0
| r == w = return (acc0, 0)
| otherwise =
withRawBuffer buf $ \pbuf ->
let
unpackRB acc !i
| i < r = return acc
| otherwise = do
c <- peekElemOff pbuf i
if (c == '\n' && i > r)
then do
c1 <- peekElemOff pbuf (i-1)
if (c1 == '\r')
then unpackRB ('\n':acc) (i-2)
else unpackRB ('\n':acc) (i-1)
else do
unpackRB (c : acc) (i-1)
in do
c <- peekElemOff pbuf (w-1)
if (c == '\r')
then do
-- If the last char is a '\r', we need to know whether or
-- not it is followed by a '\n', so leave it in the buffer
-- for now and just unpack the rest.
str <- unpackRB acc0 (w-2)
return (str, w-1)
else do
str <- unpackRB acc0 (w-1)
return (str, w)
-- Note [#5536]
--
-- We originally had
--
-- let c' = desurrogatifyRoundtripCharacter c in
-- c' `seq` unpackRB (c':acc) (i-1)
--
-- but this resulted in Core like
--
-- case (case x <# y of True -> C# e1; False -> C# e2) of c
-- C# _ -> unpackRB (c:acc) (i-1)
--
-- which compiles into a continuation for the outer case, with each
-- branch of the inner case building a C# and then jumping to the
-- continuation. We'd rather not have this extra jump, which makes
-- quite a difference to performance (see #5536) It turns out that
-- matching on the C# directly causes GHC to do the case-of-case,
-- giving much straighter code.
-- -----------------------------------------------------------------------------
-- hGetContents
-- hGetContents on a DuplexHandle only affects the read side: you can
-- carry on writing to it afterwards.
-- | Computation 'hGetContents' @hdl@ returns the list of characters
-- corresponding to the unread portion of the channel or file managed
-- by @hdl@, which is put into an intermediate state, /semi-closed/.
-- In this state, @hdl@ is effectively closed,
-- but items are read from @hdl@ on demand and accumulated in a special
-- list returned by 'hGetContents' @hdl@.
--
-- Any operation that fails because a handle is closed,
-- also fails if a handle is semi-closed. The only exception is 'hClose'.
-- A semi-closed handle becomes closed:
--
-- * if 'hClose' is applied to it;
--
-- * if an I\/O error occurs when reading an item from the handle;
--
-- * or once the entire contents of the handle has been read.
--
-- Once a semi-closed handle becomes closed, the contents of the
-- associated list becomes fixed. The contents of this final list is
-- only partially specified: it will contain at least all the items of
-- the stream that were evaluated prior to the handle becoming closed.
--
-- Any I\/O errors encountered while a handle is semi-closed are simply
-- discarded.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetContents :: Handle -> IO String
hGetContents handle =
wantReadableHandle "hGetContents" handle $ \handle_ -> do
xs <- lazyRead handle
return (handle_{ haType=SemiClosedHandle}, xs )
-- Note that someone may close the semi-closed handle (or change its
-- buffering), so each time these lazy read functions are pulled on,
-- they have to check whether the handle has indeed been closed.
lazyRead :: Handle -> IO String
lazyRead handle =
unsafeInterleaveIO $
withHandle "hGetContents" handle $ \ handle_ -> do
case haType handle_ of
SemiClosedHandle -> lazyReadBuffered handle handle_
ClosedHandle
-> ioException
(IOError (Just handle) IllegalOperation "hGetContents"
"delayed read on closed handle" Nothing Nothing)
_ -> ioException
(IOError (Just handle) IllegalOperation "hGetContents"
"illegal handle type" Nothing Nothing)
lazyReadBuffered :: Handle -> Handle__ -> IO (Handle__, [Char])
lazyReadBuffered h handle_@Handle__{..} = do
buf <- readIORef haCharBuffer
Exception.catch
(do
buf'@Buffer{..} <- getSomeCharacters handle_ buf
lazy_rest <- lazyRead h
(s,r) <- if haInputNL == CRLF
then unpack_nl bufRaw bufL bufR lazy_rest
else do s <- unpack bufRaw bufL bufR lazy_rest
return (s,bufR)
writeIORef haCharBuffer (bufferAdjustL r buf')
return (handle_, s)
)
(\e -> do (handle_', _) <- hClose_help handle_
debugIO ("hGetContents caught: " ++ show e)
-- We might have a \r cached in CRLF mode. So we
-- need to check for that and return it:
let r = if isEOFError e
then if not (isEmptyBuffer buf)
then "\r"
else ""
else
throw (augmentIOError e "hGetContents" h)
return (handle_', r)
)
-- ensure we have some characters in the buffer
getSomeCharacters :: Handle__ -> CharBuffer -> IO CharBuffer
getSomeCharacters handle_@Handle__{..} buf@Buffer{..} =
case bufferElems buf of
-- buffer empty: read some more
0 -> readTextDevice handle_ buf
-- if the buffer has a single '\r' in it and we're doing newline
-- translation: read some more
1 | haInputNL == CRLF -> do
(c,_) <- readCharBuf bufRaw bufL
if c == '\r'
then do -- shuffle the '\r' to the beginning. This is only safe
-- if we're about to call readTextDevice, otherwise it
-- would mess up flushCharBuffer.
-- See [note Buffer Flushing], GHC.IO.Handle.Types
_ <- writeCharBuf bufRaw 0 '\r'
let buf' = buf{ bufL=0, bufR=1 }
readTextDevice handle_ buf'
else do
return buf
-- buffer has some chars in it already: just return it
_otherwise ->
return buf
-- ---------------------------------------------------------------------------
-- hPutChar
-- | Computation 'hPutChar' @hdl ch@ writes the character @ch@ to the
-- file or channel managed by @hdl@. Characters may be buffered if
-- buffering is enabled for @hdl@.
--
-- This operation may fail with:
--
-- * 'isFullError' if the device is full; or
--
-- * 'isPermissionError' if another system resource limit would be exceeded.
hPutChar :: Handle -> Char -> IO ()
hPutChar handle c = do
c `seq` return ()
wantWritableHandle "hPutChar" handle $ \ handle_ -> do
hPutcBuffered handle_ c
hPutcBuffered :: Handle__ -> Char -> IO ()
hPutcBuffered handle_@Handle__{..} c = do
buf <- readIORef haCharBuffer
if c == '\n'
then do buf1 <- if haOutputNL == CRLF
then do
buf1 <- putc buf '\r'
putc buf1 '\n'
else do
putc buf '\n'
writeCharBuffer handle_ buf1
when is_line $ flushByteWriteBuffer handle_
else do
buf1 <- putc buf c
writeCharBuffer handle_ buf1
return ()
where
is_line = case haBufferMode of
LineBuffering -> True
_ -> False
putc buf@Buffer{ bufRaw=raw, bufR=w } c = do
debugIO ("putc: " ++ summaryBuffer buf)
w' <- writeCharBuf raw w c
return buf{ bufR = w' }
-- ---------------------------------------------------------------------------
-- hPutStr
-- We go to some trouble to avoid keeping the handle locked while we're
-- evaluating the string argument to hPutStr, in case doing so triggers another
-- I/O operation on the same handle which would lead to deadlock. The classic
-- case is
--
-- putStr (trace "hello" "world")
--
-- so the basic scheme is this:
--
-- * copy the string into a fresh buffer,
-- * "commit" the buffer to the handle.
--
-- Committing may involve simply copying the contents of the new
-- buffer into the handle's buffer, flushing one or both buffers, or
-- maybe just swapping the buffers over (if the handle's buffer was
-- empty). See commitBuffer below.
-- | Computation 'hPutStr' @hdl s@ writes the string
-- @s@ to the file or channel managed by @hdl@.
--
-- This operation may fail with:
--
-- * 'isFullError' if the device is full; or
--
-- * 'isPermissionError' if another system resource limit would be exceeded.
hPutStr :: Handle -> String -> IO ()
hPutStr handle str = hPutStr' handle str False
-- | The same as 'hPutStr', but adds a newline character.
hPutStrLn :: Handle -> String -> IO ()
hPutStrLn handle str = hPutStr' handle str True
-- An optimisation: we treat hPutStrLn specially, to avoid the
-- overhead of a single putChar '\n', which is quite high now that we
-- have to encode eagerly.
hPutStr' :: Handle -> String -> Bool -> IO ()
hPutStr' handle str add_nl =
do
(buffer_mode, nl) <-
wantWritableHandle "hPutStr" handle $ \h_ -> do
bmode <- getSpareBuffer h_
return (bmode, haOutputNL h_)
case buffer_mode of
(NoBuffering, _) -> do
hPutChars handle str -- v. slow, but we don't care
when add_nl $ hPutChar handle '\n'
(LineBuffering, buf) -> do
writeBlocks handle True add_nl nl buf str
(BlockBuffering _, buf) -> do
writeBlocks handle False add_nl nl buf str
hPutChars :: Handle -> [Char] -> IO ()
hPutChars _ [] = return ()
hPutChars handle (c:cs) = hPutChar handle c >> hPutChars handle cs
getSpareBuffer :: Handle__ -> IO (BufferMode, CharBuffer)
getSpareBuffer Handle__{haCharBuffer=ref,
haBuffers=spare_ref,
haBufferMode=mode}
= do
case mode of
NoBuffering -> return (mode, error "no buffer!")
_ -> do
bufs <- readIORef spare_ref
buf <- readIORef ref
case bufs of
BufferListCons b rest -> do
writeIORef spare_ref rest
return ( mode, emptyBuffer b (bufSize buf) WriteBuffer)
BufferListNil -> do
new_buf <- newCharBuffer (bufSize buf) WriteBuffer
return (mode, new_buf)
-- NB. performance-critical code: eyeball the Core.
writeBlocks :: Handle -> Bool -> Bool -> Newline -> Buffer CharBufElem -> String -> IO ()
writeBlocks hdl line_buffered add_nl nl
buf@Buffer{ bufRaw=raw, bufSize=len } s =
let
shoveString :: Int -> [Char] -> [Char] -> IO ()
shoveString !n [] [] = do
commitBuffer hdl raw len n False{-no flush-} True{-release-}
shoveString !n [] rest = do
shoveString n rest []
shoveString !n (c:cs) rest
-- n+1 so we have enough room to write '\r\n' if necessary
| n + 1 >= len = do
commitBuffer hdl raw len n False{-flush-} False
shoveString 0 (c:cs) rest
| c == '\n' = do
n' <- if nl == CRLF
then do
n1 <- writeCharBuf raw n '\r'
writeCharBuf raw n1 '\n'
else do
writeCharBuf raw n c
if line_buffered
then do
-- end of line, so write and flush
commitBuffer hdl raw len n' True{-flush-} False
shoveString 0 cs rest
else do
shoveString n' cs rest
| otherwise = do
n' <- writeCharBuf raw n c
shoveString n' cs rest
in
shoveString 0 s (if add_nl then "\n" else "")
-- -----------------------------------------------------------------------------
-- commitBuffer handle buf sz count flush release
--
-- Write the contents of the buffer 'buf' ('sz' bytes long, containing
-- 'count' bytes of data) to handle (handle must be block or line buffered).
commitBuffer
:: Handle -- handle to commit to
-> RawCharBuffer -> Int -- address and size (in bytes) of buffer
-> Int -- number of bytes of data in buffer
-> Bool -- True <=> flush the handle afterward
-> Bool -- release the buffer?
-> IO ()
commitBuffer hdl !raw !sz !count flush release =
wantWritableHandle "commitBuffer" hdl $ \h_@Handle__{..} -> do
debugIO ("commitBuffer: sz=" ++ show sz ++ ", count=" ++ show count
++ ", flush=" ++ show flush ++ ", release=" ++ show release)
writeCharBuffer h_ Buffer{ bufRaw=raw, bufState=WriteBuffer,
bufL=0, bufR=count, bufSize=sz }
when flush $ flushByteWriteBuffer h_
-- release the buffer if necessary
when release $ do
-- find size of current buffer
old_buf@Buffer{ bufSize=size } <- readIORef haCharBuffer
when (sz == size) $ do
spare_bufs <- readIORef haBuffers
writeIORef haBuffers (BufferListCons raw spare_bufs)
return ()
-- backwards compatibility; the text package uses this
commitBuffer' :: RawCharBuffer -> Int -> Int -> Bool -> Bool -> Handle__
-> IO CharBuffer
commitBuffer' raw sz@(I# _) count@(I# _) flush release h_@Handle__{..}
= do
debugIO ("commitBuffer: sz=" ++ show sz ++ ", count=" ++ show count
++ ", flush=" ++ show flush ++ ", release=" ++ show release)
let this_buf = Buffer{ bufRaw=raw, bufState=WriteBuffer,
bufL=0, bufR=count, bufSize=sz }
writeCharBuffer h_ this_buf
when flush $ flushByteWriteBuffer h_
-- release the buffer if necessary
when release $ do
-- find size of current buffer
old_buf@Buffer{ bufSize=size } <- readIORef haCharBuffer
when (sz == size) $ do
spare_bufs <- readIORef haBuffers
writeIORef haBuffers (BufferListCons raw spare_bufs)
return this_buf
-- ---------------------------------------------------------------------------
-- Reading/writing sequences of bytes.
-- ---------------------------------------------------------------------------
-- hPutBuf
-- | 'hPutBuf' @hdl buf count@ writes @count@ 8-bit bytes from the
-- buffer @buf@ to the handle @hdl@. It returns ().
--
-- 'hPutBuf' ignores any text encoding that applies to the 'Handle',
-- writing the bytes directly to the underlying file or device.
--
-- 'hPutBuf' ignores the prevailing 'TextEncoding' and
-- 'NewlineMode' on the 'Handle', and writes bytes directly.
--
-- This operation may fail with:
--
-- * 'ResourceVanished' if the handle is a pipe or socket, and the
-- reading end is closed. (If this is a POSIX system, and the program
-- has not asked to ignore SIGPIPE, then a SIGPIPE may be delivered
-- instead, whose default action is to terminate the program).
hPutBuf :: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO ()
hPutBuf h ptr count = do _ <- hPutBuf' h ptr count True
return ()
hPutBufNonBlocking
:: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO Int -- returns: number of bytes written
hPutBufNonBlocking h ptr count = hPutBuf' h ptr count False
hPutBuf':: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> Bool -- allow blocking?
-> IO Int
hPutBuf' handle ptr count can_block
| count == 0 = return 0
| count < 0 = illegalBufferSize handle "hPutBuf" count
| otherwise =
wantWritableHandle "hPutBuf" handle $
\ h_@Handle__{..} -> do
debugIO ("hPutBuf count=" ++ show count)
r <- bufWrite h_ (castPtr ptr) count can_block
-- we must flush if this Handle is set to NoBuffering. If
-- it is set to LineBuffering, be conservative and flush
-- anyway (we didn't check for newlines in the data).
case haBufferMode of
BlockBuffering _ -> do return ()
_line_or_no_buffering -> do flushWriteBuffer h_
return r
bufWrite :: Handle__-> Ptr Word8 -> Int -> Bool -> IO Int
bufWrite h_@Handle__{..} ptr count can_block =
seq count $ do -- strictness hack
old_buf@Buffer{ bufRaw=old_raw, bufR=w, bufSize=size }
<- readIORef haByteBuffer
-- enough room in handle buffer?
if (size - w > count)
-- There's enough room in the buffer:
-- just copy the data in and update bufR.
then do debugIO ("hPutBuf: copying to buffer, w=" ++ show w)
copyToRawBuffer old_raw w ptr count
writeIORef haByteBuffer old_buf{ bufR = w + count }
return count
-- else, we have to flush
else do debugIO "hPutBuf: flushing first"
old_buf' <- Buffered.flushWriteBuffer haDevice old_buf
-- TODO: we should do a non-blocking flush here
writeIORef haByteBuffer old_buf'
-- if we can fit in the buffer, then just loop
if count < size
then bufWrite h_ ptr count can_block
else if can_block
then do writeChunk h_ (castPtr ptr) count
return count
else writeChunkNonBlocking h_ (castPtr ptr) count
writeChunk :: Handle__ -> Ptr Word8 -> Int -> IO ()
writeChunk h_@Handle__{..} ptr bytes
| Just fd <- cast haDevice = RawIO.write (fd::FD) ptr bytes
| otherwise = error "Todo: hPutBuf"
writeChunkNonBlocking :: Handle__ -> Ptr Word8 -> Int -> IO Int
writeChunkNonBlocking h_@Handle__{..} ptr bytes
| Just fd <- cast haDevice = RawIO.writeNonBlocking (fd::FD) ptr bytes
| otherwise = error "Todo: hPutBuf"
-- ---------------------------------------------------------------------------
-- hGetBuf
-- | 'hGetBuf' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@ until either EOF is reached or
-- @count@ 8-bit bytes have been read.
-- It returns the number of bytes actually read. This may be zero if
-- EOF was reached before any data was read (or if @count@ is zero).
--
-- 'hGetBuf' never raises an EOF exception, instead it returns a value
-- smaller than @count@.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBuf' will behave as if EOF was reached.
--
-- 'hGetBuf' ignores the prevailing 'TextEncoding' and 'NewlineMode'
-- on the 'Handle', and reads bytes directly.
hGetBuf :: Handle -> Ptr a -> Int -> IO Int
hGetBuf h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBuf" count
| otherwise =
wantReadableHandle_ "hGetBuf" h $ \ h_@Handle__{..} -> do
flushCharReadBuffer h_
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
<- readIORef haByteBuffer
if isEmptyBuffer buf
then bufReadEmpty h_ buf (castPtr ptr) 0 count
else bufReadNonEmpty h_ buf (castPtr ptr) 0 count
-- small reads go through the buffer, large reads are satisfied by
-- taking data first from the buffer and then direct from the file
-- descriptor.
bufReadNonEmpty :: Handle__ -> Buffer Word8 -> Ptr Word8 -> Int -> Int -> IO Int
bufReadNonEmpty h_@Handle__{..}
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
ptr !so_far !count
= do
let avail = w - r
if (count < avail)
then do
copyFromRawBuffer ptr raw r count
writeIORef haByteBuffer buf{ bufL = r + count }
return (so_far + count)
else do
copyFromRawBuffer ptr raw r avail
let buf' = buf{ bufR=0, bufL=0 }
writeIORef haByteBuffer buf'
let remaining = count - avail
so_far' = so_far + avail
ptr' = ptr `plusPtr` avail
if remaining == 0
then return so_far'
else bufReadEmpty h_ buf' ptr' so_far' remaining
bufReadEmpty :: Handle__ -> Buffer Word8 -> Ptr Word8 -> Int -> Int -> IO Int
bufReadEmpty h_@Handle__{..}
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
ptr so_far count
| count > sz, Just fd <- cast haDevice = loop fd 0 count
| otherwise = do
(r,buf') <- Buffered.fillReadBuffer haDevice buf
if r == 0
then return so_far
else do writeIORef haByteBuffer buf'
bufReadNonEmpty h_ buf' ptr so_far count
where
loop :: FD -> Int -> Int -> IO Int
loop fd off bytes | bytes <= 0 = return (so_far + off)
loop fd off bytes = do
r <- RawIO.read (fd::FD) (ptr `plusPtr` off) bytes
if r == 0
then return (so_far + off)
else loop fd (off + r) (bytes - r)
-- ---------------------------------------------------------------------------
-- hGetBufSome
-- | 'hGetBufSome' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@. If there is any data available to read,
-- then 'hGetBufSome' returns it immediately; it only blocks if there
-- is no data to be read.
--
-- It returns the number of bytes actually read. This may be zero if
-- EOF was reached before any data was read (or if @count@ is zero).
--
-- 'hGetBufSome' never raises an EOF exception, instead it returns a value
-- smaller than @count@.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBufSome' will behave as if EOF was reached.
--
-- 'hGetBufSome' ignores the prevailing 'TextEncoding' and 'NewlineMode'
-- on the 'Handle', and reads bytes directly.
hGetBufSome :: Handle -> Ptr a -> Int -> IO Int
hGetBufSome h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBufSome" count
| otherwise =
wantReadableHandle_ "hGetBufSome" h $ \ h_@Handle__{..} -> do
flushCharReadBuffer h_
buf@Buffer{ bufSize=sz } <- readIORef haByteBuffer
if isEmptyBuffer buf
then case count > sz of -- large read? optimize it with a little special case:
True | Just fd <- haFD h_ -> do RawIO.read fd (castPtr ptr) count
_ -> do (r,buf') <- Buffered.fillReadBuffer haDevice buf
if r == 0
then return 0
else do writeIORef haByteBuffer buf'
bufReadNBNonEmpty h_ buf' (castPtr ptr) 0 (min r count)
-- new count is (min r count), so
-- that bufReadNBNonEmpty will not
-- issue another read.
else
let count' = min count (bufferElems buf)
in bufReadNBNonEmpty h_ buf (castPtr ptr) 0 count'
haFD :: Handle__ -> Maybe FD
haFD h_@Handle__{..} = cast haDevice
-- | 'hGetBufNonBlocking' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@ until either EOF is reached, or
-- @count@ 8-bit bytes have been read, or there is no more data available
-- to read immediately.
--
-- 'hGetBufNonBlocking' is identical to 'hGetBuf', except that it will
-- never block waiting for data to become available, instead it returns
-- only whatever data is available. To wait for data to arrive before
-- calling 'hGetBufNonBlocking', use 'hWaitForInput'.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBufNonBlocking' will behave as if EOF was reached.
--
-- 'hGetBufNonBlocking' ignores the prevailing 'TextEncoding' and
-- 'NewlineMode' on the 'Handle', and reads bytes directly.
--
-- NOTE: on Windows, this function does not work correctly; it
-- behaves identically to 'hGetBuf'.
hGetBufNonBlocking :: Handle -> Ptr a -> Int -> IO Int
hGetBufNonBlocking h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBufNonBlocking" count
| otherwise =
wantReadableHandle_ "hGetBufNonBlocking" h $ \ h_@Handle__{..} -> do
flushCharReadBuffer h_
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
<- readIORef haByteBuffer
if isEmptyBuffer buf
then bufReadNBEmpty h_ buf (castPtr ptr) 0 count
else bufReadNBNonEmpty h_ buf (castPtr ptr) 0 count
bufReadNBEmpty :: Handle__ -> Buffer Word8 -> Ptr Word8 -> Int -> Int -> IO Int
bufReadNBEmpty h_@Handle__{..}
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
ptr so_far count
| count > sz,
Just fd <- cast haDevice = do
m <- RawIO.readNonBlocking (fd::FD) ptr count
case m of
Nothing -> return so_far
Just n -> return (so_far + n)
| otherwise = do
buf <- readIORef haByteBuffer
(r,buf') <- Buffered.fillReadBuffer0 haDevice buf
case r of
Nothing -> return so_far
Just 0 -> return so_far
Just r -> do
writeIORef haByteBuffer buf'
bufReadNBNonEmpty h_ buf' ptr so_far (min count r)
-- NOTE: new count is min count r
-- so we will just copy the contents of the
-- buffer in the recursive call, and not
-- loop again.
bufReadNBNonEmpty :: Handle__ -> Buffer Word8 -> Ptr Word8 -> Int -> Int -> IO Int
bufReadNBNonEmpty h_@Handle__{..}
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
ptr so_far count
= do
let avail = w - r
if (count < avail)
then do
copyFromRawBuffer ptr raw r count
writeIORef haByteBuffer buf{ bufL = r + count }
return (so_far + count)
else do
copyFromRawBuffer ptr raw r avail
let buf' = buf{ bufR=0, bufL=0 }
writeIORef haByteBuffer buf'
let remaining = count - avail
so_far' = so_far + avail
ptr' = ptr `plusPtr` avail
if remaining == 0
then return so_far'
else bufReadNBEmpty h_ buf' ptr' so_far' remaining
-- ---------------------------------------------------------------------------
-- memcpy wrappers
copyToRawBuffer :: RawBuffer e -> Int -> Ptr e -> Int -> IO ()
copyToRawBuffer raw off ptr bytes =
withRawBuffer raw $ \praw ->
do _ <- memcpy (praw `plusPtr` off) ptr (fromIntegral bytes)
return ()
copyFromRawBuffer :: Ptr e -> RawBuffer e -> Int -> Int -> IO ()
copyFromRawBuffer ptr raw off bytes =
withRawBuffer raw $ \praw ->
do _ <- memcpy ptr (praw `plusPtr` off) (fromIntegral bytes)
return ()
foreign import ccall unsafe "memcpy"
memcpy :: Ptr a -> Ptr a -> CSize -> IO (Ptr ())
-----------------------------------------------------------------------------
-- Internal Utils
illegalBufferSize :: Handle -> String -> Int -> IO a
illegalBufferSize handle fn sz =
ioException (IOError (Just handle)
InvalidArgument fn
("illegal buffer size " ++ showsPrec 9 sz [])
Nothing Nothing)
| bitemyapp/ghc | libraries/base/GHC/IO/Handle/Text.hs | bsd-3-clause | 38,280 | 0 | 26 | 12,018 | 7,734 | 3,947 | 3,787 | 560 | 7 |
module Utils where
import qualified Data.DList as DList
breakWith :: (a -> Maybe b) -> [a] -> ([b], [a])
breakWith f = go mempty
where
go accum (x:xs)
| Just y <- f x = go (accum `DList.snoc` y) xs
| otherwise = (DList.toList accum, x:xs)
go accum [] = (DList.toList accum, [])
| bgamari/diff-utils | src/Utils.hs | bsd-3-clause | 312 | 0 | 11 | 87 | 161 | 86 | 75 | 8 | 2 |
{-
-----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2001-2003
--
-- Access to system tools: gcc, cp, rm etc
--
-----------------------------------------------------------------------------
-}
{-# LANGUAGE CPP, MultiWayIf, ScopedTypeVariables #-}
module SysTools (
-- * Initialisation
initSysTools,
lazyInitLlvmConfig,
-- * Interface to system tools
module SysTools.Tasks,
module SysTools.Info,
linkDynLib,
copy,
copyWithHeader,
-- * General utilities
Option(..),
expandTopDir,
-- * Platform-specifics
libmLinkOpts,
-- * Mac OS X frameworks
getPkgFrameworkOpts,
getFrameworkOpts
) where
#include "HsVersions.h"
import GhcPrelude
import GHC.Settings
import Module
import Packages
import Outputable
import ErrUtils
import GHC.Platform
import DynFlags
import Control.Monad.Trans.Except (runExceptT)
import System.FilePath
import System.IO
import System.IO.Unsafe (unsafeInterleaveIO)
import SysTools.ExtraObj
import SysTools.Info
import SysTools.Tasks
import SysTools.BaseDir
import SysTools.Settings
{-
Note [How GHC finds toolchain utilities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
SysTools.initSysProgs figures out exactly where all the auxiliary programs
are, and initialises mutable variables to make it easy to call them.
To do this, it makes use of definitions in Config.hs, which is a Haskell
file containing variables whose value is figured out by the build system.
Config.hs contains two sorts of things
cGCC, The *names* of the programs
cCPP e.g. cGCC = gcc
cUNLIT cCPP = gcc -E
etc They do *not* include paths
cUNLIT_DIR The *path* to the directory containing unlit, split etc
cSPLIT_DIR *relative* to the root of the build tree,
for use when running *in-place* in a build tree (only)
---------------------------------------------
NOTES for an ALTERNATIVE scheme (i.e *not* what is currently implemented):
Another hair-brained scheme for simplifying the current tool location
nightmare in GHC: Simon originally suggested using another
configuration file along the lines of GCC's specs file - which is fine
except that it means adding code to read yet another configuration
file. What I didn't notice is that the current package.conf is
general enough to do this:
Package
{name = "tools", import_dirs = [], source_dirs = [],
library_dirs = [], hs_libraries = [], extra_libraries = [],
include_dirs = [], c_includes = [], package_deps = [],
extra_ghc_opts = ["-pgmc/usr/bin/gcc","-pgml${topdir}/bin/unlit", ... etc.],
extra_cc_opts = [], extra_ld_opts = []}
Which would have the advantage that we get to collect together in one
place the path-specific package stuff with the path-specific tool
stuff.
End of NOTES
---------------------------------------------
************************************************************************
* *
\subsection{Initialisation}
* *
************************************************************************
-}
-- Note [LLVM configuration]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- The `llvm-targets` and `llvm-passes` files are shipped with GHC and contain
-- information needed by the LLVM backend to invoke `llc` and `opt`.
-- Specifically:
--
-- * llvm-targets maps autoconf host triples to the corresponding LLVM
-- `data-layout` declarations. This information is extracted from clang using
-- the script in utils/llvm-targets/gen-data-layout.sh and should be updated
-- whenever we target a new version of LLVM.
--
-- * llvm-passes maps GHC optimization levels to sets of LLVM optimization
-- flags that GHC should pass to `opt`.
--
-- This information is contained in files rather the GHC source to allow users
-- to add new targets to GHC without having to recompile the compiler.
--
-- Since this information is only needed by the LLVM backend we load it lazily
-- with unsafeInterleaveIO. Consequently it is important that we lazily pattern
-- match on LlvmConfig until we actually need its contents.
lazyInitLlvmConfig :: String
-> IO LlvmConfig
lazyInitLlvmConfig top_dir
= unsafeInterleaveIO $ do -- see Note [LLVM configuration]
targets <- readAndParse "llvm-targets" mkLlvmTarget
passes <- readAndParse "llvm-passes" id
return $ LlvmConfig { llvmTargets = targets, llvmPasses = passes }
where
readAndParse name builder =
do let llvmConfigFile = top_dir </> name
llvmConfigStr <- readFile llvmConfigFile
case maybeReadFuzzy llvmConfigStr of
Just s -> return (fmap builder <$> s)
Nothing -> pgmError ("Can't parse " ++ show llvmConfigFile)
mkLlvmTarget :: (String, String, String) -> LlvmTarget
mkLlvmTarget (dl, cpu, attrs) = LlvmTarget dl cpu (words attrs)
initSysTools :: String -- TopDir path
-> IO Settings -- Set all the mutable variables above, holding
-- (a) the system programs
-- (b) the package-config file
-- (c) the GHC usage message
initSysTools top_dir = do
res <- runExceptT $ initSettings top_dir
case res of
Right a -> pure a
Left (SettingsError_MissingData msg) -> pgmError msg
Left (SettingsError_BadData msg) -> pgmError msg
{- Note [Windows stack usage]
See: #8870 (and #8834 for related info) and #12186
On Windows, occasionally we need to grow the stack. In order to do
this, we would normally just bump the stack pointer - but there's a
catch on Windows.
If the stack pointer is bumped by more than a single page, then the
pages between the initial pointer and the resulting location must be
properly committed by the Windows virtual memory subsystem. This is
only needed in the event we bump by more than one page (i.e 4097 bytes
or more).
Windows compilers solve this by emitting a call to a special function
called _chkstk, which does this committing of the pages for you.
The reason this was causing a segfault was because due to the fact the
new code generator tends to generate larger functions, we needed more
stack space in GHC itself. In the x86 codegen, we needed approximately
~12kb of stack space in one go, which caused the process to segfault,
as the intervening pages were not committed.
GCC can emit such a check for us automatically but only when the flag
-fstack-check is used.
See https://gcc.gnu.org/onlinedocs/gnat_ugn/Stack-Overflow-Checking.html
for more information.
-}
copy :: DynFlags -> String -> FilePath -> FilePath -> IO ()
copy dflags purpose from to = copyWithHeader dflags purpose Nothing from to
copyWithHeader :: DynFlags -> String -> Maybe String -> FilePath -> FilePath
-> IO ()
copyWithHeader dflags purpose maybe_header from to = do
showPass dflags purpose
hout <- openBinaryFile to WriteMode
hin <- openBinaryFile from ReadMode
ls <- hGetContents hin -- inefficient, but it'll do for now. ToDo: speed up
maybe (return ()) (header hout) maybe_header
hPutStr hout ls
hClose hout
hClose hin
where
-- write the header string in UTF-8. The header is something like
-- {-# LINE "foo.hs" #-}
-- and we want to make sure a Unicode filename isn't mangled.
header h str = do
hSetEncoding h utf8
hPutStr h str
hSetBinaryMode h True
{-
************************************************************************
* *
\subsection{Support code}
* *
************************************************************************
-}
linkDynLib :: DynFlags -> [String] -> [InstalledUnitId] -> IO ()
linkDynLib dflags0 o_files dep_packages
= do
let -- This is a rather ugly hack to fix dynamically linked
-- GHC on Windows. If GHC is linked with -threaded, then
-- it links against libHSrts_thr. But if base is linked
-- against libHSrts, then both end up getting loaded,
-- and things go wrong. We therefore link the libraries
-- with the same RTS flags that we link GHC with.
dflags1 = if platformMisc_ghcThreaded $ platformMisc dflags0
then addWay' WayThreaded dflags0
else dflags0
dflags2 = if platformMisc_ghcDebugged $ platformMisc dflags1
then addWay' WayDebug dflags1
else dflags1
dflags = updateWays dflags2
verbFlags = getVerbFlags dflags
o_file = outputFile dflags
pkgs <- getPreloadPackagesAnd dflags dep_packages
let pkg_lib_paths = collectLibraryPaths dflags pkgs
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| ( osElfTarget (platformOS (targetPlatform dflags)) ||
osMachOTarget (platformOS (targetPlatform dflags)) ) &&
dynLibLoader dflags == SystemDependent &&
WayDyn `elem` ways dflags
= ["-L" ++ l, "-Xlinker", "-rpath", "-Xlinker", l]
-- See Note [-Xlinker -rpath vs -Wl,-rpath]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
-- We don't want to link our dynamic libs against the RTS package,
-- because the RTS lib comes in several flavours and we want to be
-- able to pick the flavour when a binary is linked.
-- On Windows we need to link the RTS import lib as Windows does
-- not allow undefined symbols.
-- The RTS library path is still added to the library search path
-- above in case the RTS is being explicitly linked in (see #3807).
let platform = targetPlatform dflags
os = platformOS platform
pkgs_no_rts = case os of
OSMinGW32 ->
pkgs
_ ->
filter ((/= rtsUnitId) . packageConfigId) pkgs
let pkg_link_opts = let (package_hs_libs, extra_libs, other_flags) = collectLinkOpts dflags pkgs_no_rts
in package_hs_libs ++ extra_libs ++ other_flags
-- probably _stub.o files
-- and last temporary shared object file
let extra_ld_inputs = ldInputs dflags
-- frameworks
pkg_framework_opts <- getPkgFrameworkOpts dflags platform
(map unitId pkgs)
let framework_opts = getFrameworkOpts dflags platform
case os of
OSMinGW32 -> do
-------------------------------------------------------------
-- Making a DLL
-------------------------------------------------------------
let output_fn = case o_file of
Just s -> s
Nothing -> "HSdll.dll"
runLink dflags (
map Option verbFlags
++ [ Option "-o"
, FileOption "" output_fn
, Option "-shared"
] ++
[ FileOption "-Wl,--out-implib=" (output_fn ++ ".a")
| gopt Opt_SharedImplib dflags
]
++ map (FileOption "") o_files
-- Permit the linker to auto link _symbol to _imp_symbol
-- This lets us link against DLLs without needing an "import library"
++ [Option "-Wl,--enable-auto-import"]
++ extra_ld_inputs
++ map Option (
lib_path_opts
++ pkg_lib_path_opts
++ pkg_link_opts
))
_ | os == OSDarwin -> do
-------------------------------------------------------------------
-- Making a darwin dylib
-------------------------------------------------------------------
-- About the options used for Darwin:
-- -dynamiclib
-- Apple's way of saying -shared
-- -undefined dynamic_lookup:
-- Without these options, we'd have to specify the correct
-- dependencies for each of the dylibs. Note that we could
-- (and should) do without this for all libraries except
-- the RTS; all we need to do is to pass the correct
-- HSfoo_dyn.dylib files to the link command.
-- This feature requires Mac OS X 10.3 or later; there is
-- a similar feature, -flat_namespace -undefined suppress,
-- which works on earlier versions, but it has other
-- disadvantages.
-- -single_module
-- Build the dynamic library as a single "module", i.e. no
-- dynamic binding nonsense when referring to symbols from
-- within the library. The NCG assumes that this option is
-- specified (on i386, at least).
-- -install_name
-- Mac OS/X stores the path where a dynamic library is (to
-- be) installed in the library itself. It's called the
-- "install name" of the library. Then any library or
-- executable that links against it before it's installed
-- will search for it in its ultimate install location.
-- By default we set the install name to the absolute path
-- at build time, but it can be overridden by the
-- -dylib-install-name option passed to ghc. Cabal does
-- this.
-------------------------------------------------------------------
let output_fn = case o_file of { Just s -> s; Nothing -> "a.out"; }
instName <- case dylibInstallName dflags of
Just n -> return n
Nothing -> return $ "@rpath" `combine` (takeFileName output_fn)
runLink dflags (
map Option verbFlags
++ [ Option "-dynamiclib"
, Option "-o"
, FileOption "" output_fn
]
++ map Option o_files
++ [ Option "-undefined",
Option "dynamic_lookup",
Option "-single_module" ]
++ (if platformArch platform == ArchX86_64
then [ ]
else [ Option "-Wl,-read_only_relocs,suppress" ])
++ [ Option "-install_name", Option instName ]
++ map Option lib_path_opts
++ extra_ld_inputs
++ map Option framework_opts
++ map Option pkg_lib_path_opts
++ map Option pkg_link_opts
++ map Option pkg_framework_opts
++ [ Option "-Wl,-dead_strip_dylibs" ]
)
_ -> do
-------------------------------------------------------------------
-- Making a DSO
-------------------------------------------------------------------
let output_fn = case o_file of { Just s -> s; Nothing -> "a.out"; }
unregisterised = platformUnregisterised (targetPlatform dflags)
let bsymbolicFlag = -- we need symbolic linking to resolve
-- non-PIC intra-package-relocations for
-- performance (where symbolic linking works)
-- See Note [-Bsymbolic assumptions by GHC]
["-Wl,-Bsymbolic" | not unregisterised]
runLink dflags (
map Option verbFlags
++ libmLinkOpts
++ [ Option "-o"
, FileOption "" output_fn
]
++ map Option o_files
++ [ Option "-shared" ]
++ map Option bsymbolicFlag
-- Set the library soname. We use -h rather than -soname as
-- Solaris 10 doesn't support the latter:
++ [ Option ("-Wl,-h," ++ takeFileName output_fn) ]
++ extra_ld_inputs
++ map Option lib_path_opts
++ map Option pkg_lib_path_opts
++ map Option pkg_link_opts
)
-- | Some platforms require that we explicitly link against @libm@ if any
-- math-y things are used (which we assume to include all programs). See #14022.
libmLinkOpts :: [Option]
libmLinkOpts =
#if defined(HAVE_LIBM)
[Option "-lm"]
#else
[]
#endif
getPkgFrameworkOpts :: DynFlags -> Platform -> [InstalledUnitId] -> IO [String]
getPkgFrameworkOpts dflags platform dep_packages
| platformUsesFrameworks platform = do
pkg_framework_path_opts <- do
pkg_framework_paths <- getPackageFrameworkPath dflags dep_packages
return $ map ("-F" ++) pkg_framework_paths
pkg_framework_opts <- do
pkg_frameworks <- getPackageFrameworks dflags dep_packages
return $ concat [ ["-framework", fw] | fw <- pkg_frameworks ]
return (pkg_framework_path_opts ++ pkg_framework_opts)
| otherwise = return []
getFrameworkOpts :: DynFlags -> Platform -> [String]
getFrameworkOpts dflags platform
| platformUsesFrameworks platform = framework_path_opts ++ framework_opts
| otherwise = []
where
framework_paths = frameworkPaths dflags
framework_path_opts = map ("-F" ++) framework_paths
frameworks = cmdlineFrameworks dflags
-- reverse because they're added in reverse order from the cmd line:
framework_opts = concat [ ["-framework", fw]
| fw <- reverse frameworks ]
{-
Note [-Bsymbolic assumptions by GHC]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC has a few assumptions about interaction of relocations in NCG and linker:
1. -Bsymbolic resolves internal references when the shared library is linked,
which is important for performance.
2. When there is a reference to data in a shared library from the main program,
the runtime linker relocates the data object into the main program using an
R_*_COPY relocation.
3. If we used -Bsymbolic, then this results in multiple copies of the data
object, because some references have already been resolved to point to the
original instance. This is bad!
We work around [3.] for native compiled code by avoiding the generation of
R_*_COPY relocations.
Unregisterised compiler can't evade R_*_COPY relocations easily thus we disable
-Bsymbolic linking there.
See related tickets: #4210, #15338
-}
| sdiehl/ghc | compiler/main/SysTools.hs | bsd-3-clause | 18,898 | 0 | 28 | 5,674 | 2,111 | 1,096 | 1,015 | 195 | 11 |
{-# LANGUAGE QuasiQuotes #-}
module Test0 () where
import LiquidHaskell
[lq| type GeNum a N = {v: a | N <= v} |]
[lq| type PosInt = GeNum Int {0} |]
[lq| myabs :: Int -> PosInt |]
myabs x = if (x > 0) then x else (-x)
[lq| incr :: x:Int -> GeNum Int {x} |]
incr x = x + 1
| spinda/liquidhaskell | tests/gsoc15/working/pos/alias01.hs | bsd-3-clause | 283 | 0 | 7 | 75 | 79 | 50 | 29 | 9 | 2 |
{-
Gifcurry
(C) 2018 David Lettier
lettier.com
-}
{-# LANGUAGE
DuplicateRecordFields
#-}
module GuiRecords where
import Data.IORef
import Data.Int
import qualified GI.Gtk
import qualified Graphics.Rendering.Pango.Font as GRPF
import GI.Gst
data GuiComponents =
GuiComponents
{ window :: GI.Gtk.Window
, startTimeSpinButton :: GI.Gtk.SpinButton
, endTimeSpinButton :: GI.Gtk.SpinButton
, widthSpinButton :: GI.Gtk.SpinButton
, fpsSpinButton :: GI.Gtk.SpinButton
, colorCountSpinButton :: GI.Gtk.SpinButton
, leftCropSpinButton :: GI.Gtk.SpinButton
, rightCropSpinButton :: GI.Gtk.SpinButton
, topCropSpinButton :: GI.Gtk.SpinButton
, bottomCropSpinButton :: GI.Gtk.SpinButton
, inFileChooserButton :: GI.Gtk.Button
, inFileChooserDialogCancelButton :: GI.Gtk.Button
, inFileChooserDialogOpenButton :: GI.Gtk.Button
, outFileChooserDialogCancelButton :: GI.Gtk.Button
, outFileChooserDialogViewButton :: GI.Gtk.Button
, outFileChooserDialogSaveButton :: GI.Gtk.Button
, outFileButton :: GI.Gtk.Button
, textOverlaysAddButton :: GI.Gtk.Button
, textOverlaysOpenButton :: GI.Gtk.Button
, textOverlaysSaveButton :: GI.Gtk.Button
, textOverlaysRemoveAllButton :: GI.Gtk.Button
, confirmMessageDialogYesButton :: GI.Gtk.Button
, confirmMessageDialogNoButton :: GI.Gtk.Button
, aboutButton :: GI.Gtk.Button
, aboutDialogCloseButton :: GI.Gtk.Button
, giphyUploadButton :: GI.Gtk.Button
, imgurUploadButton :: GI.Gtk.Button
, outFileChooserDialogGifRadioButton :: GI.Gtk.RadioButton
, fileSizeToggleButton :: GI.Gtk.ToggleButton
, cropToggleButton :: GI.Gtk.ToggleButton
, textOverlaysToggleButton :: GI.Gtk.ToggleButton
, uploadToggleButton :: GI.Gtk.ToggleButton
, videoPreviewPauseToggleButton :: GI.Gtk.ToggleButton
, ditherToggleButton :: GI.Gtk.ToggleButton
, inFileChooserDialogLabel :: GI.Gtk.Label
, inFileChooserButtonLabel :: GI.Gtk.Label
, startTimeAdjustment :: GI.Gtk.Adjustment
, endTimeAdjustment :: GI.Gtk.Adjustment
, widthAdjustment :: GI.Gtk.Adjustment
, fpsAdjustment :: GI.Gtk.Adjustment
, colorCountAdjustment :: GI.Gtk.Adjustment
, aboutDialogLabel :: GI.Gtk.Label
, statusLabel :: GI.Gtk.Label
, sidebarControlsPreviewbox :: GI.Gtk.Box
, mainPreviewBox :: GI.Gtk.Box
, imagesPreviewBox :: GI.Gtk.Box
, videoPreviewBox :: GI.Gtk.Box
, videoPreviewOverlayChildBox :: GI.Gtk.Box
, cropSpinButtonsBox :: GI.Gtk.Box
, textOverlaysMainBox :: GI.Gtk.Box
, textOverlaysBox :: GI.Gtk.Box
, uploadBox :: GI.Gtk.Box
, fileSizeSpinButtonsGrid :: GI.Gtk.Grid
, videoPreviewDrawingArea :: GI.Gtk.DrawingArea
, timeSlicesDrawingArea :: GI.Gtk.DrawingArea
, firstFramePreviewImageDrawingArea :: GI.Gtk.DrawingArea
, lastFramePreviewImageDrawingArea :: GI.Gtk.DrawingArea
, inFileChooserButtonImage :: GI.Gtk.Image
, firstFrameImage :: GI.Gtk.Image
, lastFrameImage :: GI.Gtk.Image
, inFileChooserDialog :: GI.Gtk.Dialog
, outFileChooserDialog :: GI.Gtk.FileChooserDialog
, textOverlaysOpenDialog :: GI.Gtk.FileChooserDialog
, textOverlaysSaveDialog :: GI.Gtk.FileChooserDialog
, confirmMessageDialog :: GI.Gtk.MessageDialog
, aboutDialog :: GI.Gtk.Dialog
, inFileChooserWidget :: GI.Gtk.FileChooserWidget
, outFileChooserDialogGifFileFilter :: GI.Gtk.FileFilter
, outFileChooserDialogVideoFileFilter :: GI.Gtk.FileFilter
, maybeVideoPreviewWidget :: Maybe GI.Gtk.Widget
, maybePlaybinElement :: Maybe GI.Gst.Element
, temporaryDirectory :: FilePath
, textOverlaysRef :: IORef [GuiTextOverlayComponents]
, guiInFilePropertiesRef :: IORef GuiInFileProperties
, guiPreviewStateRef :: IORef GuiPreviewState
}
data GuiPreviewState =
GuiPreviewState
{ maybeInFilePath :: Maybe String
, maybeInFileLoadedAt :: Maybe Int
, maybeStartTime :: Maybe Double
, maybeEndTime :: Maybe Double
, maybeColorCount :: Maybe Double
, maybeDither :: Maybe Bool
, loopRunning :: Bool
}
data GuiInFileProperties =
GuiInFileProperties
{ inFileUri :: String
, inFileFps :: Double
, inFileDuration :: Double
, inFileWidth :: Double
, inFileHeight :: Double
, inFileLoadedAt :: Int
}
data GuiTextOverlayComponents =
GuiTextOverlayComponents
{ textOverlayId :: Int
, textOverlayBox :: GI.Gtk.Box
, textOverlayVisibilityBox :: GI.Gtk.Box
, textOverlayVisibilityToggleButton :: GI.Gtk.ToggleButton
, textOverlayHorizontalSpinButton :: GI.Gtk.SpinButton
, textOverlayVerticalSpinButton :: GI.Gtk.SpinButton
, textOverlayStartTimeSpinButton :: GI.Gtk.SpinButton
, textOverlayEndTimeSpinButton :: GI.Gtk.SpinButton
, textOverlayRotationSpinButton :: GI.Gtk.SpinButton
, textOverlayOutlineSizeSpinButton :: GI.Gtk.SpinButton
, textOverlayOutlineColorButton :: GI.Gtk.ColorButton
, textOverlayFillColorButton :: GI.Gtk.ColorButton
, textOverlayTextEntry :: GI.Gtk.Entry
, textOverlayFontButton :: GI.Gtk.FontButton
, textOverlayCloneButton :: GI.Gtk.Button
, textOverlayRemoveButton :: GI.Gtk.Button
, textOverlayOrderUpButton :: GI.Gtk.Button
, textOverlayOrderDownButton :: GI.Gtk.Button
}
data GuiTextOverlayData =
GuiTextOverlayData
{ textOverlayControlsVisible :: Bool
, textOverlayText :: String
, textOverlayHorizontal :: Double
, textOverlayVertical :: Double
, textOverlayStartTime :: Double
, textOverlayEndTime :: Double
, textOverlayEndTime :: Double
, textOverlayRotation :: Int32
, textOverlayOutlineSize :: Int32
, textOverlayOutlineColor :: String
, textOverlayFillColor :: String
, textOverlayMaybeFontDesc :: Maybe GRPF.FontDescription
}
data GuiPreviewFunctionArgs =
GuiPreviewFunctionArgs
{ guiComponents :: GuiComponents
, inFilePath :: String
, startTime :: Double
, endTime :: Double
, colorCount :: Double
, inFileWidth :: Double
, inFileHeight :: Double
, dither :: Bool
, inFilePathChanged :: Bool
, inFileLoadedAtChanged :: Bool
, startTimeChanged :: Bool
, endTimeChanged :: Bool
, colorCountChanged :: Bool
, ditherChanged :: Bool
}
data GuiMakeFramePreviewFunctionArgs =
GuiMakeFramePreviewFunctionArgs
{ inFilePath :: String
, startTime :: Double
, endTime :: Double
, colorCount :: Double
, previewWidth :: Double
, dither :: Bool
, firstFrameImage :: GI.Gtk.Image
, lastFrameImage :: GI.Gtk.Image
, temporaryDirectory :: String
, window :: GI.Gtk.Window
}
data GuiSetOrResetFramePrevewFunctionArgs =
GuiSetOrResetFramePrevewFunctionArgs
{ inputValid :: Bool
, inFilePath :: String
, outFilePath :: String
, time :: Double
, colorCount :: Double
, previewWidth :: Double
, dither :: Bool
, image :: GI.Gtk.Image
, window :: GI.Gtk.Window
}
defaultGuiPreviewState :: GuiPreviewState
defaultGuiPreviewState =
GuiPreviewState
{ maybeInFilePath = Nothing
, maybeInFileLoadedAt = Nothing
, maybeStartTime = Nothing
, maybeEndTime = Nothing
, maybeColorCount = Nothing
, maybeDither = Nothing
, loopRunning = False
}
defaultGuiInFileProperties
:: GuiInFileProperties
defaultGuiInFileProperties =
GuiInFileProperties
{ inFileUri = ""
, inFileLoadedAt = 0
, inFileFps = 0.0
, inFileDuration = 0.0
, inFileWidth = 0.0
, inFileHeight = 0.0
}
| lettier/gifcurry | src/gui/GuiRecords.hs | bsd-3-clause | 9,187 | 0 | 10 | 3,110 | 1,437 | 937 | 500 | 195 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Sesyrel.FaultTree.Dynamic (
compileDynamicFaultTree
, logDynamicFactorInfo
, DynamicFactor(..)
, dynamicFactorExpr
) where
import Sesyrel.FaultTree.Base hiding (Variable)
import qualified Sesyrel.FaultTree.Base as F (Variable(..))
import Sesyrel.Expression
import Sesyrel.Texify
import Control.Monad (forM_)
import Control.Monad.Logger
import Data.Monoid ((<>))
import qualified Data.IntMap.Strict as IM (delete, lookup, singleton, empty)
import Data.Maybe (fromMaybe)
import Data.List (delete, sort)
--data DynamicFactor = DynamicFactor (Expr Rational, [F.Variable])
data DynamicFactor = DynamicFactor [F.Variable] (Expr Rational)
dynamicFactorExpr :: DynamicFactor -> Expr Rational
dynamicFactorExpr (DynamicFactor _ e) = e
instance Texifiable DynamicFactor where
texify' (DynamicFactor _ expr) = "$ " <> texify' expr <> " $ "
instance Factor DynamicFactor where
variables (DynamicFactor vs _) = vs
eliminate var (DynamicFactor vars expr) | not $ var `elem` vars = DynamicFactor vars expr
| otherwise = DynamicFactor vars' expr'
where
vars' = delete var vars
expr' = integrate expr (unVariable var) (Constant 0) (Constant plusInfinity)
times (DynamicFactor vs1 expr1) (DynamicFactor vs2 expr2) =
DynamicFactor (vs1 `unionVariables` vs2) $ productExpression [expr1, expr2]
one = DynamicFactor [] $ productExpression []
texifyVariableElimination (F.Variable var) (DynamicFactor _ expr) =
"$ " <> "\\int\\limits_0^{+\\infty} "
<> texify expr <> "\\textrm{dx}_{" <> texify var
<> "}$ "
productExpression :: Num a => [Expr a] -> Expr a
productExpression es = ExprN (Term (Atom 1 emptyBundle emptyBundle emptyBundle IM.empty) es)
logDynamicFactorInfo :: MonadLogger m => DynamicFactor -> [Double] -> m ()
logDynamicFactorInfo (DynamicFactor [F.Variable var] expr) points = do
let mttf = fromRational $ calcMttf var expr
distr = calcDistribution var expr
texifyPoint p v =
logInfoN ("\\\\ $ F(" <> texifyDoubleE 3 p <> ") = " <> texifyDoubleE 3 v <> " $\n")
logInfoN "\n\\subsection{Some information}\n\n"
logInfoN $ "$ F(x_{" <> texify var <> "}) = " <> texify distr <> "$ , $ MTTF = " <> texifyDoubleE 3 mttf <> " $\n"
let distr' = mapExprType fromRational distr
logInfoN "\nEvaluation of some points in distribution:\n"
forM_ points $ \p ->
texifyPoint p (evalExpr distr' (IM.singleton var p))
logInfoN "\n"
logDynamicFactorInfo _ _ = return ()
calcMttf :: (Eq a, Fractional a) => Int -> Expr a -> a
calcMttf var = sum . map mapTerm . toList
where
checkAtom (Atom _ ds us is expnt) =
nullBundle ds && nullBundle us && nullBundle is && all (== 0) (IM.delete var expnt)
mapTerm (Term a@(Atom k _ _ _ expnt) []) | checkAtom a =
k / (fromMaybe (error "calcMttf: lookup fail") (IM.lookup var expnt)) ^ (2 :: Integer)
| otherwise =
error "calcMttf: too complex expr"
mapTerm (Term _ _) = error "calcMttf: expr is not atomized"
calcDistribution :: (Ord a, Fractional a, Texifiable a, RealInfinite a) => Int -> Expr a -> Expr a
calcDistribution v e = substitute (-1) (Variable v) $ integrate e v (Constant 0) (Variable (-1))
distributionLambda :: Int -> a -> Expr a
distributionLambda variable lambda =
let expnt = IM.singleton variable lambda
in ExprN $ Term (Atom lambda emptyBundle emptyBundle emptyBundle expnt) []
{-
-- should not be used
distributionCspLambda :: (Num a, Ord a) => Int -> a -> Int -> Expr a
distributionCspLambda varB lambda varA =
let expnt = IM.fromList [(varA, lambda), (varB, -lambda)]
in ExprN $ Term (Atom lambda emptyBundle (makeSingle varB varA) expnt) []
-}
distributionAnd :: (Num a, Ord a) => Int -> Int -> Int -> Expr a
distributionAnd x a b =
let a1 = Atom 1 (makeSingle x b) (makeSingle b a) emptyBundle IM.empty
a2 = Atom 1 (makeSingle x a) (makeSingle a b) emptyBundle IM.empty
in ExprC (Term a1 []) (ExprN (Term a2 []))
distributionOr :: (Num a, Ord a) => Int -> Int -> Int -> Expr a
distributionOr x a b =
let a1 = Atom 1 (makeSingle x a) (makeSingle b a) emptyBundle IM.empty
a2 = Atom 1 (makeSingle x b) (makeSingle a b) emptyBundle IM.empty
in ExprC (Term a1 []) (ExprN (Term a2 []))
{-
-- should not be used
distributionPriorityAnd :: (Num a, Ord a) => Int -> Int -> Int -> Expr a
distributionPriorityAnd x a b =
let atom = Atom 1 (makeSingle x b) (makeSingle b a) IM.empty
in ExprN (Term atom [])
-}
distributionPriorityAndOr :: (Num a, Ord a) => Int -> Int -> Int -> Int -> Expr a
distributionPriorityAndOr x a b c =
let us1 = makeSingle b a `unionBundle` makeSingle c b
us2 = makeSingle b a `unionBundle` makeSingle b c
a1 = Atom 1 (makeSingle x b) us1 emptyBundle IM.empty
a2 = Atom 1 (makeSingle x c) us2 emptyBundle IM.empty
a3 = Atom 1 (makeSingle x c) (makeSingle a b) emptyBundle IM.empty
in fromList [Term a1 [], Term a2 [], Term a3 []]
distributionSwitch :: (Num a, Ord a) => Int -> Int -> Int -> Int -> Expr a
distributionSwitch x s a b =
let us1 = makeSingle s a `unionBundle` makeSingle b a
us2 = makeSingle s a `unionBundle` makeSingle a b
a1 = Atom 1 (makeSingle x b) us1 emptyBundle IM.empty
a2 = Atom 1 (makeSingle x a) us2 emptyBundle IM.empty
a3 = Atom 1 (makeSingle x a) (makeSingle a s) emptyBundle IM.empty
in fromList [Term a1 [], Term a2 [], Term a3 []]
compileDynamicFaultTree :: FaultTree Rational -> [DynamicFactor]
compileDynamicFaultTree (FaultTree ft) = map reNode ft
where
u = unVariable
reNode :: (F.Variable, FaultTreeNode Rational) -> DynamicFactor
reNode (x, FaultTreeLambda k) = DynamicFactor [x] $ distributionLambda (u x) k
reNode (x, FaultTreeAnd a b) = DynamicFactor (sort [x, a, b]) $ distributionAnd (u x) (u a) (u b)
reNode (x, FaultTreeOr a b) = DynamicFactor (sort [x, a, b]) $ distributionOr (u x) (u a) (u b)
reNode (x, FaultTreePriorityAndOr a b c) = DynamicFactor (sort [x, a, b, c]) $ distributionPriorityAndOr (u x) (u a) (u b) (u c)
reNode (x, FaultTreeSwitch s a b) = DynamicFactor (sort [x, s, a, b]) $ distributionSwitch (u x) (u s) (u a) (u b)
reNode (x, f) = error $ "compileDynamicFaultTree: unsupported node found " ++ show f ++ " for variable " ++ show x
| balodja/sesyrel | src/Sesyrel/FaultTree/Dynamic.hs | bsd-3-clause | 6,383 | 0 | 16 | 1,389 | 2,349 | 1,188 | 1,161 | 101 | 6 |
module Main where
import Client
import Server
import Player
import System.IO
import System.IO.Error hiding (catch)
import System.Environment (getArgs)
import Network
import Text.Printf
import Control.Concurrent.STM
import Control.Concurrent.Async
import Control.Exception hiding (handle)
import Control.Monad.Reader
import Prelude hiding (catch)
defaultPort :: Int
defaultPort = 4444
main :: IO ()
main = do
args <- getArgs
case args of
[] -> withSocketsDo $ do bracket (getSock defaultPort) (\s -> sClose s >> putStrLn "Socket closed.") useSock
args -> withSocketsDo $ do let port = read (head args)
bracket (getSock port) (\s -> sClose s >> putStrLn "Socket closed.") useSock
where getSock :: Int -> IO Socket
getSock port = do
sock <- listenOn (PortNumber (fromIntegral port))
printf "Listening on port %d\n" port
return sock
useSock sock = do
[n,e,s,w] <- mapM initialisePlayer ["north","east","south","west"]
race_ (manageConnections n e s w sock) (runServer n e s w)
manageConnections n e s w sock = do
h <- getConn
concurrently (catch (manageClient h)
(\e -> if isEOFError e then putStrLn "A connection closed unexpectedly." else ioError e))
(manageConnections n e s w sock)
return ()
where getConn = do
(h, host, port) <- accept sock
printf "Accepted connection from %s: %s\n" host (show port)
hSetBuffering h LineBuffering
hPutStrLn h "\n\n -- Welcome to H-chu!! -- \n\n\n"
return h
manageClient h = do
openSeats <- availableSeats n e s w
hPutStrLn h $ "The following seats are open: " ++ show openSeats
hPutStrLn h "Choose your seat, or type 'quit' to exit."
l <- hGetLine h
if l == "quit"
then hClose h >> putStrLn "Connection closed."
else case words l of
("north":_) -> tryLink h n
("south":_) -> tryLink h s
("east":_) -> tryLink h e
("west":_) -> tryLink h w
_ -> hPutStrLn h "That seat isn't available." >> manageClient h
tryLink :: Handle -> Player -> IO ()
tryLink h p = do
success <- atomically $ do pHandle <- readTVar $ handle p
case pHandle of
Nothing -> do writeTVar (handle p) (Just h)
return True
Just _ -> do return False
if success
then do hPutStrLn h $ "\nYou have joined the game, occupying the " ++ name p ++ " seat."
putStrLn $ "The " ++ name p ++ " seat is now occupied."
race_ (catch (runReaderT playerInterface p)
(\e -> if isEOFError e
then do putStrLn $ "Connection lost with " ++ name p ++ " seat."
atomically $ writeTVar (handle p) Nothing
else ioError e))
((atomically $ do mb <- readTVar (handle p)
case mb of
Nothing -> return ()
_ -> retry) >> (putStrLn $ "Terminated connection with " ++ name p ++ " seat."))
else hPutStrLn h "Sorry, that seat appears to be unavailable."
availableSeats n e s w = do
valid <- sequence $ map playerConn [n,e,s,w]
return $ concat $ zipWith (\b s -> if b then [] else [s]) valid ["north","east","south","west"]
playerConn :: Player -> IO Bool
playerConn p = do
pHandle <- atomically $ readTVar $ handle p
case pHandle of
Nothing -> return False
Just h -> return True
| thomasathorne/h-chu | src/Main.hs | bsd-3-clause | 3,638 | 0 | 21 | 1,204 | 1,183 | 580 | 603 | 85 | 7 |
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Tasty.HUnit (testCase)
import Tests.Syntax
import Tests.Codegen
import Tests.Codegen.Util(utilTestGroup)
tests :: TestTree
tests = testGroup "Compiler tests"
[ testGroup "AST"
[ localOption (QuickCheckMaxSize 6) $
testProperty "roundtrip" roundtripAST
, testGroup "Compare .bond and .json"
[ testCase "attributes" $ compareAST "attributes"
, testCase "basic types" $ compareAST "basic_types"
, testCase "bond_meta types" $ compareAST "bond_meta"
, testCase "complex types" $ compareAST "complex_types"
, testCase "default values" $ compareAST "defaults"
, testCase "empty" $ compareAST "empty"
, testCase "field modifiers" $ compareAST "field_modifiers"
, testCase "generics" $ compareAST "generics"
, testCase "inheritance" $ compareAST "inheritance"
, testCase "type aliases" $ compareAST "aliases"
, testCase "documentation example" $ compareAST "example"
, testCase "simple service syntax" $ compareAST "service"
, testCase "service attributes" $ compareAST "service_attributes"
, testCase "generic service" $ compareAST "generic_service"
, testCase "documentation example" $ compareAST "example"
, testCase "service inheritance" $ compareAST "service_inheritance"
]
]
, testGroup "SchemaDef"
[ verifySchemaDef "attributes" "Foo"
, verifySchemaDef "basic_types" "BasicTypes"
, verifySchemaDef "defaults" "Foo"
, verifySchemaDef "field_modifiers" "Foo"
, verifySchemaDef "inheritance" "Foo"
, verifySchemaDef "alias_key" "foo"
, verifySchemaDef "maybe_blob" "Foo"
, verifySchemaDef "nullable_alias" "foo"
, verifySchemaDef "schemadef" "AliasBase"
, verifySchemaDef "schemadef" "EnumDefault"
, verifySchemaDef "schemadef" "StringTree"
, verifySchemaDef "example" "SomeStruct"
]
, testGroup "Types"
[ testCase "type alias resolution" aliasResolution
]
, testGroup "Codegen Failures (Expect to see errors below check for OK or FAIL)"
[ testCase "Struct default value nothing" $ failBadSyntax "Should fail when default value of a struct field is 'nothing'" "struct_nothing"
, testCase "Enum no default value" $ failBadSyntax "Should fail when an enum field has no default value" "enum_no_default"
, testCase "Alias default value" $ failBadSyntax "Should fail when underlying default value is of the wrong type" "aliases_default"
, testCase "Out of range" $ failBadSyntax "Should fail, out of range for int16" "int_out_of_range"
, testCase "Duplicate method definition in service" $ failBadSyntax "Should fail, method name should be unique" "duplicate_service_method"
, testCase "Invalid service base: struct" $ failBadSyntax "Should fail, struct can't be used as service base" "service_invalid_base_struct"
, testCase "Invalid service base: type param" $ failBadSyntax "Should fail, type param can't be used as service base" "service_invalid_base_type_param"
]
, testGroup "Codegen"
[ utilTestGroup,
testGroup "C++"
[ verifyCppCodegen "attributes"
, verifyCppCodegen "basic_types"
, verifyCppCodegen "bond_meta"
, verifyCppCodegen "complex_types"
, verifyCppCodegen "defaults"
, verifyCppCodegen "empty"
, verifyCppCodegen "field_modifiers"
, verifyCppCodegen "generics"
, verifyCppCodegen "inheritance"
, verifyCppCodegen "aliases"
, verifyCppCodegen "alias_key"
, verifyCppCodegen "maybe_blob"
, verifyCodegen
[ "c++"
, "--enum-header"
]
"with_enum_header"
, verifyCodegen
[ "c++"
, "--allocator=arena"
]
"alias_with_allocator"
, verifyCodegen
[ "c++"
, "--allocator=arena"
, "--using=List=my::list<{0}, arena>"
, "--using=Vector=my::vector<{0}, arena>"
, "--using=Set=my::set<{0}, arena>"
, "--using=Map=my::map<{0}, {1}, arena>"
, "--using=String=my::string<arena>"
]
"custom_alias_with_allocator"
, verifyCodegen
[ "c++"
, "--allocator=arena"
, "--using=List=my::list<{0}>"
, "--using=Vector=my::vector<{0}>"
, "--using=Set=my::set<{0}>"
, "--using=Map=my::map<{0}, {1}>"
, "--using=String=my::string"
]
"custom_alias_without_allocator"
, testGroup "Apply"
[ verifyApplyCodegen
[ "c++"
, "--apply-attribute=DllExport"
]
"basic_types"
]
, testGroup "Exports"
[ verifyExportsCodegen
[ "c++"
, "--export-attribute=DllExport"
]
"service"
]
, verifyCodegen
[ "c++"
, "--namespace=tests=nsmapped"
]
"basic_types_nsmapped"
, testGroup "Comm"
[ verifyCppCommCodegen
[ "c++"
]
"service"
, verifyCppCommCodegen
[ "c++"
]
"generic_service"
, verifyCppCommCodegen
[ "c++"
]
"service_attributes"
]
, testGroup "Grpc"
[ verifyCppGrpcCodegen
[ "c++"
]
"service"
, verifyCppGrpcCodegen
[ "c++"
]
"generic_service"
, verifyCppGrpcCodegen
[ "c++"
]
"service_attributes"
]
]
, testGroup "C#"
[ verifyCsCodegen "attributes"
, verifyCsCodegen "basic_types"
, verifyCsCodegen "bond_meta"
, verifyCsCodegen "complex_types"
, verifyCsCodegen "defaults"
, verifyCsCodegen "empty"
, verifyCsCodegen "field_modifiers"
, verifyCsCodegen "generics"
, verifyCsCodegen "inheritance"
, verifyCsCodegen "aliases"
, verifyCodegen
[ "c#"
, "--using=time=System.DateTime"
]
"nullable_alias"
, verifyCodegen
[ "c#"
, "--namespace=tests=nsmapped"
]
"basic_types_nsmapped"
, testGroup "Comm"
[ verifyCsCommCodegen
[ "c#"
]
"service"
, verifyCsCommCodegen
[ "c#"
]
"generic_service"
, verifyCsCommCodegen
[ "c#"
]
"service_attributes"
]
]
, testGroup "Java"
[ verifyJavaCodegen "attributes"
, verifyJavaCodegen "basic_types"
, verifyJavaCodegen "bond_meta"
, verifyJavaCodegen "complex_types"
, verifyJavaCodegen "defaults"
, verifyJavaCodegen "empty"
, verifyJavaCodegen "field_modifiers"
, verifyJavaCodegen "generics"
, verifyJavaCodegen "inheritance"
, verifyJavaCodegen "aliases"
, verifyCodegen
[ "java"
, "--namespace=tests=nsmapped"
]
"basic_types_nsmapped"
]
]
]
main :: IO ()
main = defaultMain tests
| gencer/bond | compiler/tests/TestMain.hs | mit | 8,480 | 0 | 14 | 3,375 | 1,093 | 561 | 532 | 172 | 1 |
{- DATX02-17-26, automated assessment of imperative programs.
- Copyright, 2017, see AUTHORS.md.
-
- This program is free software; you can redistribute it and/or
- modify it under the terms of the GNU General Public License
- as published by the Free Software Foundation; either version 2
- of the License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-}
module PropertyBasedTesting where
import System.FilePath
import System.Process
import System.Directory
import System.Timeout
import Control.Monad.Reader
import Data.Maybe
import Data.List
import EvaluationMonad
import Data.RoseTree
import Util.RoseGen
import InputMonad
-- | Get the output from the class file `file`
solutionOutput :: Input -> FilePath -> EvalM String
solutionOutput (Input commandLineArgs stdin) file = do
let command = "java " ++ dropExtension file ++ " " ++ intercalate " " commandLineArgs
logMessage $ "Running the command: " ++ command
-- Timeout 1 second
m <- liftIO $ timeout 1000000 $ readCreateProcess (shell command) stdin
case m of
Nothing -> throw "Command timed out"
Just x -> return x
-- | Get the output of the student solution
studentOutput :: FilePath -> Input -> EvalM (Maybe String)
studentOutput dir input = do
-- This is really inefficient and should be floated to the top level
ss <- liftIO $ listDirectory $ dir </> "student"
studentSolutionName <- case ss of
[] -> throw "Student solution missing"
(s:_) -> return s
catch (Just <$> (inTemporaryDirectory (dir </> "student") $ solutionOutput input studentSolutionName))
(\_ -> issue "Student test timeout" >> return Nothing)
-- | Get the output of every model solution
modelSolutionsOutputs :: FilePath -> Input -> EvalM [String]
modelSolutionsOutputs dir input = do
modelSolutions <- liftIO $ listDirectory (dir </> "model")
inTemporaryDirectory (dir </> "model") $ sequence $ solutionOutput input <$> modelSolutions
-- | Test the student solution in `dir </> "student/"` against
-- the solutions in `dir </> "model/"`
testSolutions :: FilePath -> Input -> EvalM (Maybe (String, String))
testSolutions dir input = do
modelOutputs <- modelSolutionsOutputs dir input
studO <- studentOutput dir input
return $ maybe Nothing (\s -> compareOutputs s modelOutputs) studO
compareOutputs :: String -> [String] -> Maybe (String, String)
compareOutputs student (model:[])
| student /= model = Just (student,model)
| otherwise = Nothing
compareOutputs student (model:ms)
| student /= model = compareOutputs student ms
| otherwise = Nothing
-- | Perform the relevant tests on all class files in the directory
runPBT :: FilePath -> RoseGen Input -> EvalM Bool
runPBT dir generator = do
numTests <- numberOfTests <$> ask
logMessage $ "Testing student solution " ++ show numTests ++ " times"
runNumberOfTests numTests dir generator
-- | Shrink the failing input
shrink :: FilePath -> (Input, String, String) -> [RoseTree Input] -> EvalM ()
shrink dir (input, stud, mod) [] =
issue $
"Failed on input: " ++ show input ++ "\n"
++ "With\n"
++ "Student solution output: "
++ stud ++ "\n"
++ "Model solution output: "
++ mod ++ "\n"
shrink dir failing ((RoseTree input []):trees) = do
mFailing <- testSolutions dir input
case mFailing of
Nothing -> shrink dir failing trees
Just (stud, mod) -> do
issue $
"Failed on input: " ++ show input ++ "\n"
++ "With\n"
++ "Student solution output: "
++ stud ++ "\n"
++ "Model solution output: "
++ mod ++ "\n"
shrink dir failing (tree:trees) = do
res <- testSolutions dir (root tree)
case res of
Just (stud, mod) -> shrink dir (root tree, stud, mod) $ branches tree ++ trees
Nothing -> shrink dir failing trees
--Runs the specified number of tests
runNumberOfTests :: Int -> FilePath -> RoseGen Input -> EvalM Bool
runNumberOfTests 0 _ _ = comment "Student solution passed all tests" >> return True
runNumberOfTests numTests dir generator = do
input <- liftIO $ generate generator
failing <- testSolutions dir (root input)
case failing of
Just (stud, mod) -> shrink dir (root input, stud, mod) (branches input) >> return False
Nothing -> runNumberOfTests (numTests - 1) dir generator
| Centril/DATX02-17-26 | libsrc/PropertyBasedTesting.hs | gpl-2.0 | 4,812 | 0 | 21 | 1,040 | 1,178 | 584 | 594 | 83 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.DirectConnect.DescribeInterconnects
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns a list of interconnects owned by the AWS account.
--
-- If an interconnect ID is provided, it will only return this particular
-- interconnect.
--
-- <http://docs.aws.amazon.com/directconnect/latest/APIReference/API_DescribeInterconnects.html>
module Network.AWS.DirectConnect.DescribeInterconnects
(
-- * Request
DescribeInterconnects
-- ** Request constructor
, describeInterconnects
-- ** Request lenses
, diInterconnectId
-- * Response
, DescribeInterconnectsResponse
-- ** Response constructor
, describeInterconnectsResponse
-- ** Response lenses
, dirInterconnects
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.DirectConnect.Types
import qualified GHC.Exts
newtype DescribeInterconnects = DescribeInterconnects
{ _diInterconnectId :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'DescribeInterconnects' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'diInterconnectId' @::@ 'Maybe' 'Text'
--
describeInterconnects :: DescribeInterconnects
describeInterconnects = DescribeInterconnects
{ _diInterconnectId = Nothing
}
diInterconnectId :: Lens' DescribeInterconnects (Maybe Text)
diInterconnectId = lens _diInterconnectId (\s a -> s { _diInterconnectId = a })
newtype DescribeInterconnectsResponse = DescribeInterconnectsResponse
{ _dirInterconnects :: List "interconnects" Interconnect
} deriving (Eq, Read, Show, Monoid, Semigroup)
instance GHC.Exts.IsList DescribeInterconnectsResponse where
type Item DescribeInterconnectsResponse = Interconnect
fromList = DescribeInterconnectsResponse . GHC.Exts.fromList
toList = GHC.Exts.toList . _dirInterconnects
-- | 'DescribeInterconnectsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dirInterconnects' @::@ ['Interconnect']
--
describeInterconnectsResponse :: DescribeInterconnectsResponse
describeInterconnectsResponse = DescribeInterconnectsResponse
{ _dirInterconnects = mempty
}
-- | A list of interconnects.
dirInterconnects :: Lens' DescribeInterconnectsResponse [Interconnect]
dirInterconnects = lens _dirInterconnects (\s a -> s { _dirInterconnects = a }) . _List
instance ToPath DescribeInterconnects where
toPath = const "/"
instance ToQuery DescribeInterconnects where
toQuery = const mempty
instance ToHeaders DescribeInterconnects
instance ToJSON DescribeInterconnects where
toJSON DescribeInterconnects{..} = object
[ "interconnectId" .= _diInterconnectId
]
instance AWSRequest DescribeInterconnects where
type Sv DescribeInterconnects = DirectConnect
type Rs DescribeInterconnects = DescribeInterconnectsResponse
request = post "DescribeInterconnects"
response = jsonResponse
instance FromJSON DescribeInterconnectsResponse where
parseJSON = withObject "DescribeInterconnectsResponse" $ \o -> DescribeInterconnectsResponse
<$> o .:? "interconnects" .!= mempty
| kim/amazonka | amazonka-directconnect/gen/Network/AWS/DirectConnect/DescribeInterconnects.hs | mpl-2.0 | 4,133 | 0 | 10 | 776 | 507 | 306 | 201 | 59 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.SES.Types.Product
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.SES.Types.Product where
import Network.AWS.Prelude
import Network.AWS.SES.Types.Sum
-- | Represents the body of the message. You can specify text, HTML, or both.
-- If you use both, then the message should display correctly in the widest
-- variety of email clients.
--
-- /See:/ 'body' smart constructor.
data Body = Body'
{ _bText :: !(Maybe Content)
, _bHTML :: !(Maybe Content)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Body' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bText'
--
-- * 'bHTML'
body
:: Body
body =
Body'
{ _bText = Nothing
, _bHTML = Nothing
}
-- | The content of the message, in text format. Use this for text-based
-- email clients, or clients on high-latency networks (such as mobile
-- devices).
bText :: Lens' Body (Maybe Content)
bText = lens _bText (\ s a -> s{_bText = a});
-- | The content of the message, in HTML format. Use this for email clients
-- that can process HTML. You can include clickable links, formatted text,
-- and much more in an HTML message.
bHTML :: Lens' Body (Maybe Content)
bHTML = lens _bHTML (\ s a -> s{_bHTML = a});
instance ToQuery Body where
toQuery Body'{..}
= mconcat ["Text" =: _bText, "Html" =: _bHTML]
-- | Represents textual data, plus an optional character set specification.
--
-- By default, the text must be 7-bit ASCII, due to the constraints of the
-- SMTP protocol. If the text must contain any other characters, then you
-- must also specify a character set. Examples include UTF-8, ISO-8859-1,
-- and Shift_JIS.
--
-- /See:/ 'content' smart constructor.
data Content = Content'
{ _cCharset :: !(Maybe Text)
, _cData :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Content' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cCharset'
--
-- * 'cData'
content
:: Text -- ^ 'cData'
-> Content
content pData_ =
Content'
{ _cCharset = Nothing
, _cData = pData_
}
-- | The character set of the content.
cCharset :: Lens' Content (Maybe Text)
cCharset = lens _cCharset (\ s a -> s{_cCharset = a});
-- | The textual data of the content.
cData :: Lens' Content Text
cData = lens _cData (\ s a -> s{_cData = a});
instance ToQuery Content where
toQuery Content'{..}
= mconcat ["Charset" =: _cCharset, "Data" =: _cData]
-- | Represents the destination of the message, consisting of To:, CC:, and
-- BCC: fields.
--
-- By default, the string must be 7-bit ASCII. If the text must contain any
-- other characters, then you must use MIME encoded-word syntax (RFC 2047)
-- instead of a literal string. MIME encoded-word syntax uses the following
-- form: '=?charset?encoding?encoded-text?='. For more information, see
-- <http://tools.ietf.org/html/rfc2047 RFC 2047>.
--
-- /See:/ 'destination' smart constructor.
data Destination = Destination'
{ _dBCCAddresses :: !(Maybe [Text])
, _dCCAddresses :: !(Maybe [Text])
, _dToAddresses :: !(Maybe [Text])
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Destination' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dBCCAddresses'
--
-- * 'dCCAddresses'
--
-- * 'dToAddresses'
destination
:: Destination
destination =
Destination'
{ _dBCCAddresses = Nothing
, _dCCAddresses = Nothing
, _dToAddresses = Nothing
}
-- | The BCC: field(s) of the message.
dBCCAddresses :: Lens' Destination [Text]
dBCCAddresses = lens _dBCCAddresses (\ s a -> s{_dBCCAddresses = a}) . _Default . _Coerce;
-- | The CC: field(s) of the message.
dCCAddresses :: Lens' Destination [Text]
dCCAddresses = lens _dCCAddresses (\ s a -> s{_dCCAddresses = a}) . _Default . _Coerce;
-- | The To: field(s) of the message.
dToAddresses :: Lens' Destination [Text]
dToAddresses = lens _dToAddresses (\ s a -> s{_dToAddresses = a}) . _Default . _Coerce;
instance ToQuery Destination where
toQuery Destination'{..}
= mconcat
["BccAddresses" =:
toQuery (toQueryList "member" <$> _dBCCAddresses),
"CcAddresses" =:
toQuery (toQueryList "member" <$> _dCCAddresses),
"ToAddresses" =:
toQuery (toQueryList "member" <$> _dToAddresses)]
-- | Represents the DKIM attributes of a verified email address or a domain.
--
-- /See:/ 'identityDkimAttributes' smart constructor.
data IdentityDkimAttributes = IdentityDkimAttributes'
{ _idaDkimTokens :: !(Maybe [Text])
, _idaDkimEnabled :: !Bool
, _idaDkimVerificationStatus :: !VerificationStatus
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'IdentityDkimAttributes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'idaDkimTokens'
--
-- * 'idaDkimEnabled'
--
-- * 'idaDkimVerificationStatus'
identityDkimAttributes
:: Bool -- ^ 'idaDkimEnabled'
-> VerificationStatus -- ^ 'idaDkimVerificationStatus'
-> IdentityDkimAttributes
identityDkimAttributes pDkimEnabled_ pDkimVerificationStatus_ =
IdentityDkimAttributes'
{ _idaDkimTokens = Nothing
, _idaDkimEnabled = pDkimEnabled_
, _idaDkimVerificationStatus = pDkimVerificationStatus_
}
-- | A set of character strings that represent the domain\'s identity. Using
-- these tokens, you will need to create DNS CNAME records that point to
-- DKIM public keys hosted by Amazon SES. Amazon Web Services will
-- eventually detect that you have updated your DNS records; this detection
-- process may take up to 72 hours. Upon successful detection, Amazon SES
-- will be able to DKIM-sign email originating from that domain. (This only
-- applies to domain identities, not email address identities.)
--
-- For more information about creating DNS records using DKIM tokens, go to
-- the
-- <http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html Amazon SES Developer Guide>.
idaDkimTokens :: Lens' IdentityDkimAttributes [Text]
idaDkimTokens = lens _idaDkimTokens (\ s a -> s{_idaDkimTokens = a}) . _Default . _Coerce;
-- | True if DKIM signing is enabled for email sent from the identity; false
-- otherwise.
idaDkimEnabled :: Lens' IdentityDkimAttributes Bool
idaDkimEnabled = lens _idaDkimEnabled (\ s a -> s{_idaDkimEnabled = a});
-- | Describes whether Amazon SES has successfully verified the DKIM DNS
-- records (tokens) published in the domain name\'s DNS. (This only applies
-- to domain identities, not email address identities.)
idaDkimVerificationStatus :: Lens' IdentityDkimAttributes VerificationStatus
idaDkimVerificationStatus = lens _idaDkimVerificationStatus (\ s a -> s{_idaDkimVerificationStatus = a});
instance FromXML IdentityDkimAttributes where
parseXML x
= IdentityDkimAttributes' <$>
(x .@? "DkimTokens" .!@ mempty >>=
may (parseXMLList "member"))
<*> (x .@ "DkimEnabled")
<*> (x .@ "DkimVerificationStatus")
-- | Represents the notification attributes of an identity, including whether
-- an identity has Amazon Simple Notification Service (Amazon SNS) topics
-- set for bounce, complaint, and\/or delivery notifications, and whether
-- feedback forwarding is enabled for bounce and complaint notifications.
--
-- /See:/ 'identityNotificationAttributes' smart constructor.
data IdentityNotificationAttributes = IdentityNotificationAttributes'
{ _inaBounceTopic :: !Text
, _inaComplaintTopic :: !Text
, _inaDeliveryTopic :: !Text
, _inaForwardingEnabled :: !Bool
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'IdentityNotificationAttributes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'inaBounceTopic'
--
-- * 'inaComplaintTopic'
--
-- * 'inaDeliveryTopic'
--
-- * 'inaForwardingEnabled'
identityNotificationAttributes
:: Text -- ^ 'inaBounceTopic'
-> Text -- ^ 'inaComplaintTopic'
-> Text -- ^ 'inaDeliveryTopic'
-> Bool -- ^ 'inaForwardingEnabled'
-> IdentityNotificationAttributes
identityNotificationAttributes pBounceTopic_ pComplaintTopic_ pDeliveryTopic_ pForwardingEnabled_ =
IdentityNotificationAttributes'
{ _inaBounceTopic = pBounceTopic_
, _inaComplaintTopic = pComplaintTopic_
, _inaDeliveryTopic = pDeliveryTopic_
, _inaForwardingEnabled = pForwardingEnabled_
}
-- | The Amazon Resource Name (ARN) of the Amazon SNS topic where Amazon SES
-- will publish bounce notifications.
inaBounceTopic :: Lens' IdentityNotificationAttributes Text
inaBounceTopic = lens _inaBounceTopic (\ s a -> s{_inaBounceTopic = a});
-- | The Amazon Resource Name (ARN) of the Amazon SNS topic where Amazon SES
-- will publish complaint notifications.
inaComplaintTopic :: Lens' IdentityNotificationAttributes Text
inaComplaintTopic = lens _inaComplaintTopic (\ s a -> s{_inaComplaintTopic = a});
-- | The Amazon Resource Name (ARN) of the Amazon SNS topic where Amazon SES
-- will publish delivery notifications.
inaDeliveryTopic :: Lens' IdentityNotificationAttributes Text
inaDeliveryTopic = lens _inaDeliveryTopic (\ s a -> s{_inaDeliveryTopic = a});
-- | Describes whether Amazon SES will forward bounce and complaint
-- notifications as email. 'true' indicates that Amazon SES will forward
-- bounce and complaint notifications as email, while 'false' indicates
-- that bounce and complaint notifications will be published only to the
-- specified bounce and complaint Amazon SNS topics.
inaForwardingEnabled :: Lens' IdentityNotificationAttributes Bool
inaForwardingEnabled = lens _inaForwardingEnabled (\ s a -> s{_inaForwardingEnabled = a});
instance FromXML IdentityNotificationAttributes where
parseXML x
= IdentityNotificationAttributes' <$>
(x .@ "BounceTopic") <*> (x .@ "ComplaintTopic") <*>
(x .@ "DeliveryTopic")
<*> (x .@ "ForwardingEnabled")
-- | Represents the verification attributes of a single identity.
--
-- /See:/ 'identityVerificationAttributes' smart constructor.
data IdentityVerificationAttributes = IdentityVerificationAttributes'
{ _ivaVerificationToken :: !(Maybe Text)
, _ivaVerificationStatus :: !VerificationStatus
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'IdentityVerificationAttributes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ivaVerificationToken'
--
-- * 'ivaVerificationStatus'
identityVerificationAttributes
:: VerificationStatus -- ^ 'ivaVerificationStatus'
-> IdentityVerificationAttributes
identityVerificationAttributes pVerificationStatus_ =
IdentityVerificationAttributes'
{ _ivaVerificationToken = Nothing
, _ivaVerificationStatus = pVerificationStatus_
}
-- | The verification token for a domain identity. Null for email address
-- identities.
ivaVerificationToken :: Lens' IdentityVerificationAttributes (Maybe Text)
ivaVerificationToken = lens _ivaVerificationToken (\ s a -> s{_ivaVerificationToken = a});
-- | The verification status of the identity: \"Pending\", \"Success\",
-- \"Failed\", or \"TemporaryFailure\".
ivaVerificationStatus :: Lens' IdentityVerificationAttributes VerificationStatus
ivaVerificationStatus = lens _ivaVerificationStatus (\ s a -> s{_ivaVerificationStatus = a});
instance FromXML IdentityVerificationAttributes where
parseXML x
= IdentityVerificationAttributes' <$>
(x .@? "VerificationToken") <*>
(x .@ "VerificationStatus")
-- | Represents the message to be sent, composed of a subject and a body.
--
-- /See:/ 'message' smart constructor.
data Message = Message'
{ _mSubject :: !Content
, _mBody :: !Body
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Message' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mSubject'
--
-- * 'mBody'
message
:: Content -- ^ 'mSubject'
-> Body -- ^ 'mBody'
-> Message
message pSubject_ pBody_ =
Message'
{ _mSubject = pSubject_
, _mBody = pBody_
}
-- | The subject of the message: A short summary of the content, which will
-- appear in the recipient\'s inbox.
mSubject :: Lens' Message Content
mSubject = lens _mSubject (\ s a -> s{_mSubject = a});
-- | The message body.
mBody :: Lens' Message Body
mBody = lens _mBody (\ s a -> s{_mBody = a});
instance ToQuery Message where
toQuery Message'{..}
= mconcat ["Subject" =: _mSubject, "Body" =: _mBody]
-- | Represents the raw data of the message.
--
-- /See:/ 'rawMessage' smart constructor.
newtype RawMessage = RawMessage'
{ _rmData :: Base64
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RawMessage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rmData'
rawMessage
:: ByteString -- ^ 'rmData'
-> RawMessage
rawMessage pData_ =
RawMessage'
{ _rmData = _Base64 # pData_
}
-- | The raw data of the message. The client must ensure that the message
-- format complies with Internet email standards regarding email header
-- fields, MIME types, MIME encoding, and base64 encoding (if necessary).
--
-- The To:, CC:, and BCC: headers in the raw message can contain a group
-- list.
--
-- If you are using 'SendRawEmail' with sending authorization, you can
-- include X-headers in the raw message to specify the \"Source,\"
-- \"From,\" and \"Return-Path\" addresses. For more information, see the
-- documentation for 'SendRawEmail'.
--
-- Do not include these X-headers in the DKIM signature, because they are
-- removed by Amazon SES before sending the email.
--
-- For more information, go to the
-- <http://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-raw.html Amazon SES Developer Guide>.
--
-- /Note:/ This 'Lens' automatically encodes and decodes Base64 data,
-- despite what the AWS documentation might say.
-- The underlying isomorphism will encode to Base64 representation during
-- serialisation, and decode from Base64 representation during deserialisation.
-- This 'Lens' accepts and returns only raw unencoded data.
rmData :: Lens' RawMessage ByteString
rmData = lens _rmData (\ s a -> s{_rmData = a}) . _Base64;
instance ToQuery RawMessage where
toQuery RawMessage'{..} = mconcat ["Data" =: _rmData]
-- | Represents sending statistics data. Each 'SendDataPoint' contains
-- statistics for a 15-minute period of sending activity.
--
-- /See:/ 'sendDataPoint' smart constructor.
data SendDataPoint = SendDataPoint'
{ _sdpRejects :: !(Maybe Integer)
, _sdpComplaints :: !(Maybe Integer)
, _sdpDeliveryAttempts :: !(Maybe Integer)
, _sdpBounces :: !(Maybe Integer)
, _sdpTimestamp :: !(Maybe ISO8601)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'SendDataPoint' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdpRejects'
--
-- * 'sdpComplaints'
--
-- * 'sdpDeliveryAttempts'
--
-- * 'sdpBounces'
--
-- * 'sdpTimestamp'
sendDataPoint
:: SendDataPoint
sendDataPoint =
SendDataPoint'
{ _sdpRejects = Nothing
, _sdpComplaints = Nothing
, _sdpDeliveryAttempts = Nothing
, _sdpBounces = Nothing
, _sdpTimestamp = Nothing
}
-- | Number of emails rejected by Amazon SES.
sdpRejects :: Lens' SendDataPoint (Maybe Integer)
sdpRejects = lens _sdpRejects (\ s a -> s{_sdpRejects = a});
-- | Number of unwanted emails that were rejected by recipients.
sdpComplaints :: Lens' SendDataPoint (Maybe Integer)
sdpComplaints = lens _sdpComplaints (\ s a -> s{_sdpComplaints = a});
-- | Number of emails that have been enqueued for sending.
sdpDeliveryAttempts :: Lens' SendDataPoint (Maybe Integer)
sdpDeliveryAttempts = lens _sdpDeliveryAttempts (\ s a -> s{_sdpDeliveryAttempts = a});
-- | Number of emails that have bounced.
sdpBounces :: Lens' SendDataPoint (Maybe Integer)
sdpBounces = lens _sdpBounces (\ s a -> s{_sdpBounces = a});
-- | Time of the data point.
sdpTimestamp :: Lens' SendDataPoint (Maybe UTCTime)
sdpTimestamp = lens _sdpTimestamp (\ s a -> s{_sdpTimestamp = a}) . mapping _Time;
instance FromXML SendDataPoint where
parseXML x
= SendDataPoint' <$>
(x .@? "Rejects") <*> (x .@? "Complaints") <*>
(x .@? "DeliveryAttempts")
<*> (x .@? "Bounces")
<*> (x .@? "Timestamp")
| fmapfmapfmap/amazonka | amazonka-ses/gen/Network/AWS/SES/Types/Product.hs | mpl-2.0 | 17,621 | 0 | 13 | 3,397 | 2,662 | 1,569 | 1,093 | 260 | 1 |
{-#LANGUAGE NoImplicitPrelude #-}
{-#LANGUAGE OverloadedStrings #-}
{-#LANGUAGE OverloadedLists #-}
{-#LANGUAGE TypeFamilies #-}
{-#LANGUAGE MultiParamTypeClasses #-}
{-#LANGUAGE FlexibleInstances #-}
{-#LANGUAGE FlexibleContexts #-}
{-#LANGUAGE LambdaCase #-}
{-#LANGUAGE TupleSections #-}
{-#LANGUAGE DeriveGeneric #-}
{-#LANGUAGE TypeApplications #-}
-- | Backend spec types and parser
module Web.Sprinkles.Backends.Spec
(
-- * Defining backends
BackendSpec (..)
, backendSpecFromJSON
, makeBackendTypePathsAbsolute
, makeBackendSpecPathsAbsolute
, BackendType (..)
, FetchOrderField (..)
, FetchMode (..)
, AscDesc (..)
, FetchOrder (..)
, parseBackendURI
, Credentials (..)
, HttpMethod (..)
, HttpBackendOptions (..)
, CachePolicy (..)
, HasCachePolicy (..)
, ParserType (..)
, parserTypes
)
where
import Web.Sprinkles.Prelude
import Network.Mime (MimeType)
import Network.HTTP.Types ()
import Data.Aeson (FromJSON (..), Value (..), (.=), (.!=), (.:?), (.:))
import qualified Data.Aeson as JSON
import qualified Data.Aeson.Types as JSON
import System.PosixCompat.Files
import Data.Default (Default (..))
import Web.Sprinkles.Cache
import qualified Data.Serialize as Cereal
import Data.Serialize (Serialize)
import Web.Sprinkles.Databases (DSN (..), sqlDriverFromID, ResultSetMode (..))
import Web.Sprinkles.Logger (LogLevel (..))
import Data.Expandable (ExpandableM (..), expand)
import GHC.Stack
-- | A type of backend.
data BackendType = HttpBackend Text HttpBackendOptions -- ^ Fetch data over HTTP(S)
| FileBackend Text -- ^ Read local files
| SqlBackend DSN Text [Text] -- ^ Query an SQL database
| SqlMultiBackend DSN ResultSetMode [(Text, [Text])] -- ^ Query an SQL database, multiple queries
| SubprocessBackend Text [Text] MimeType -- ^ Run a command in a subprocess
| RequestBodyBackend -- ^ Read the incoming request body
| LiteralBackend Value -- ^ Return literal data from the spec itself
deriving (Show, Generic)
makeBackendTypePathsAbsolute :: FilePath -> BackendType -> BackendType
makeBackendTypePathsAbsolute dir (FileBackend fn) = FileBackend (pack . (dir </>) . unpack $ fn)
makeBackendTypePathsAbsolute dir (SubprocessBackend cmd args ty) = SubprocessBackend (pack . (dir </>) . unpack $ cmd) args ty
makeBackendTypePathsAbsolute _ x = x
instance Serialize BackendType where
put (HttpBackend url options) = do
Cereal.put 'h'
Cereal.put (encodeUtf8 url)
Cereal.put options
put (FileBackend path) = do
Cereal.put 'f'
Cereal.put (encodeUtf8 path)
put (SqlBackend dsn query params) = do
Cereal.put 's'
Cereal.put dsn
Cereal.put (encodeUtf8 query)
Cereal.put (map encodeUtf8 params)
put (SqlMultiBackend dsn mode queries) = do
Cereal.put 'S'
Cereal.put dsn
Cereal.put mode
Cereal.put
[ (encodeUtf8 $ q, map encodeUtf8 p)
| (q, p) <- queries
]
put (SubprocessBackend cmd args t) = do
Cereal.put 'p'
Cereal.put (encodeUtf8 cmd)
Cereal.put (map encodeUtf8 args)
Cereal.put t
put RequestBodyBackend = Cereal.put 'b'
put (LiteralBackend b) = do
Cereal.put 'l'
Cereal.put (JSON.encode b)
get = Cereal.get >>= \case
'h' -> HttpBackend <$> (decodeUtf8 <$> Cereal.get) <*> Cereal.get
'f' -> FileBackend <$> (decodeUtf8 <$> Cereal.get)
's' -> SqlBackend <$>
Cereal.get <*>
(decodeUtf8 <$> Cereal.get) <*>
(map decodeUtf8 <$> Cereal.get)
'S' -> SqlMultiBackend <$>
Cereal.get <*>
Cereal.get <*>
(Cereal.get >>= \items -> return
[ ( decodeUtf8 q
, map decodeUtf8 p
)
| (q, p) <- items
])
'p' -> SubprocessBackend <$>
(decodeUtf8 <$> Cereal.get) <*>
(map decodeUtf8 <$> Cereal.get) <*>
Cereal.get
'b' -> return RequestBodyBackend
'l' -> LiteralBackend <$>
(fromMaybe JSON.Null . JSON.decode <$> Cereal.get)
x -> fail $ "Invalid backend type identifier: " <> show x
instance ExpandableM Text BackendType where
expandM f (HttpBackend t c) =
HttpBackend <$> f t <*> pure c
expandM f (FileBackend t) =
FileBackend <$> f t
expandM f (SqlBackend dsn query params) =
SqlBackend <$> expandM f dsn <*> pure query <*> expandM f params
expandM f (SqlMultiBackend dsn mode queries) =
SqlMultiBackend
<$> expandM f dsn
<*> pure mode
<*> ( forM queries $ \(query, params) ->
(query,) <$> expandM f params
)
expandM f (SubprocessBackend cmd args t) =
SubprocessBackend cmd <$> expandM f args <*> pure t
expandM _ RequestBodyBackend =
pure RequestBodyBackend
expandM f (LiteralBackend b) =
LiteralBackend <$> expandM f b
-- | A specification of a backend query.
data BackendSpec =
BackendSpec
{ bsType :: BackendType -- ^ Defines the data source
, bsFetchMode :: FetchMode -- ^ How many items to fetch, and in what shape
, bsOrder :: FetchOrder -- ^ How to order items
-- | If set, ignore reported MIME type and use this one instead.
, bsMimeTypeOverride :: Maybe MimeType
, bsCacheEnabled :: Bool
}
deriving (Show, Generic)
instance Serialize BackendSpec
makeBackendSpecPathsAbsolute :: FilePath -> BackendSpec -> BackendSpec
makeBackendSpecPathsAbsolute dir spec =
spec { bsType = makeBackendTypePathsAbsolute dir (bsType spec) }
instance ExpandableM Text BackendSpec where
expandM f (BackendSpec t m o mto ce) =
BackendSpec <$> expandM f t
<*> pure m
<*> pure o
<*> pure mto
<*> pure ce
-- | The JSON shape of a backend spec is:
--
-- @
-- {
-- // type: one of:
-- // - "http" (fetch over HTTP)
-- // - "https" (fetch over HTTPS)
-- // - "file" (load an individual file)
-- // - "glob" (resolve a glob and load all matching files)
-- // - "dir" (get a directory listing)
-- // - "sql" (query an SQL database)
-- // - "subprocess" (execute a subprocess and read its stdout)
-- // - "post" (get the request body; only for POST requests)
-- // - "literal" (return literal value as specified)
-- "type": type,
--
-- // fetch mode. One of:
-- - "one": Fetch exactly one item, as a scalar
-- - "all": Fetch all items, as a list
-- - n (numeric value): Fetch up to n items, as a list
-- "fetch": fetchMode,
--
-- // ordering. One of:
-- // - "arbitrary": do not reorder, use whatever the backend produces
-- // - "random": random-shuffle results
-- // - "shuffle": same as "random"
-- // - "name": order by name
-- // - "mtime": order by modification time
-- // The ordering can be preceded with a "+" or "-" sign to indicate
-- // ascending or descending ordering.
-- "order": ordering,
--
-- // The rest of the structure depends on the type.
--
-- // For "http" and "https":
-- // The HTTP(S) URI to load from
-- "uri": uri,
--
-- // For "file", "glob", "dir":
-- // The local file path or glob
-- "path": path
-- }
-- @
instance FromJSON BackendSpec where
parseJSON = backendSpecFromJSON
-- | Read a backend spec from a JSON value.
backendSpecFromJSON :: HasCallStack => JSON.Value -> JSON.Parser BackendSpec
backendSpecFromJSON (String uri) =
parseBackendURI uri
backendSpecFromJSON (Object obj) = do
bsTypeStr <- obj .: "type"
(t, defFetchMode) <- case bsTypeStr :: Text of
"http" -> parseHttpBackendSpec
"https" -> parseHttpBackendSpec
"file" -> parseFileBackendSpec FetchOne
"glob" -> parseFileBackendSpec FetchAll
"dir" -> parseDirBackendSpec
"sql" -> parseSqlBackendSpec
"subprocess" -> parseSubprocessSpec
"post" -> return (RequestBodyBackend, FetchOne)
"literal" -> parseLiteralBackendSpec
x -> fail $ "Invalid backend specifier: " ++ show x
fetchMode <- obj .:? "fetch" .!= defFetchMode
fetchOrder <- obj .:? "order" .!= def
cacheEnabled <- obj .:? "cache-enabled" .!= True
mimeOverride <- fmap encodeUtf8 . (id @(Maybe Text)) <$> obj .:? "force-mime-type"
return $ BackendSpec t fetchMode fetchOrder mimeOverride cacheEnabled
where
parseHttpBackendSpec = do
t <- obj .: "uri"
return (HttpBackend t def, FetchOne)
parseFileBackendSpec m = do
path <- obj .: "path"
return (FileBackend path, m)
parseDirBackendSpec = do
path <- obj .: "path"
return (FileBackend (pack $ path </> "*"), FetchAll)
parseSqlBackendSpec = do
dsn <- obj .: "connection"
case lookup "queries" obj of
Nothing -> do
query <- obj .: "query"
params <- obj .:? "params" .!= []
return (SqlBackend dsn query params, FetchAll)
Just (Array queries') -> do
queries <- forM (toList queries') $ \case
String queryStr -> do
return (queryStr, [])
Object queriesObj -> do
query <- queriesObj .: "query"
params <- queriesObj .:? "params" .!= []
return (query, params)
Array queriesArr -> do
case toList queriesArr of
[] ->
fail "Invalid query object, empty array is not allowed"
[String queryStr] ->
return (queryStr, [])
[String queryStr, Array params] ->
(queryStr,) <$> mapM parseJSON (toList params)
(String queryStr:params) ->
(queryStr,) <$> mapM parseJSON params
x ->
fail "Invalid query object, first array element must be string"
x -> fail "Invalid query object, must be array, string, or object"
mode <- obj .:? "results" .!= ResultsMerge
return (SqlMultiBackend dsn mode queries, FetchAll)
Just x -> fail "Invalid queries object, must be array"
parseSubprocessSpec = do
rawCmd <- obj .: "cmd"
t <- fromString <$> (obj .:? "mime-type" .!= "text/plain")
case rawCmd of
String cmd -> return (SubprocessBackend cmd [] t, FetchOne)
Array v -> parseJSON rawCmd >>= \case
cmd:args -> return (SubprocessBackend cmd args t, FetchOne)
_ -> fail "Expected a command and a list of arguments"
x -> fail $ "Expected string or array, but found " ++ show x
parseLiteralBackendSpec = do
b <- obj .:? "body" .!= JSON.Null
return (LiteralBackend b, FetchOne)
backendSpecFromJSON x = fail $ "Invalid JSON value for BackendSpec: " <> show x <> ", expecting object or string"
-- | Parse a 'Text' into a 'BackendSpec'.
parseBackendURI :: MonadFail m => Text -> m BackendSpec
parseBackendURI t = do
let protocol = takeWhile (/= ':') t
path = drop (length protocol + 3) t
case protocol of
"http" ->
return $
BackendSpec
(HttpBackend t def)
FetchOne
def
Nothing
True
"https" ->
return $
BackendSpec
(HttpBackend t def)
FetchOne
def
Nothing
True
"dir" -> return $
BackendSpec (FileBackend (pack $ unpack path </> "*")) FetchAll def Nothing True
"glob" -> return $
BackendSpec (FileBackend path) FetchAll def Nothing True
"file" -> return $
BackendSpec (FileBackend path) FetchOne def Nothing True
"sql" -> do
be <- parseSqlBackendURI path
return $ BackendSpec be FetchAll def Nothing True
"post" ->
return $
BackendSpec
RequestBodyBackend
FetchOne def Nothing True
"literal" ->
return $
BackendSpec
(LiteralBackend $ JSON.String path)
FetchOne def Nothing True
_ -> fail $ "Unknown protocol: " <> show protocol
where
parseSqlBackendURI path = do
let driverID = takeWhile (/= ':') path
remainder = drop (length driverID + 1) path
details = takeWhile (/= ':') remainder
query = drop (length details + 1) remainder
driver <- either
(\msg ->
fail $ "Invalid driver: " ++ show driverID ++ "(" ++ msg ++ ")")
return
(sqlDriverFromID driverID)
return $ SqlBackend (DSN driver details) query []
-- | How many items to fetch, and in what shape.
data FetchMode = FetchOne -- ^ Fetch only the first result
| FetchAll -- ^ Fetch all results
| FetchN Int -- ^ Fetch at most @n@ results, starting from the top
deriving (Show, Read, Eq, Generic)
instance Serialize FetchMode where
instance FromJSON FetchMode where
parseJSON (String "one") = return FetchOne
parseJSON (String "all") = return FetchAll
parseJSON (Number n) = return . FetchN . ceiling $ n
parseJSON _ = fail "Invalid fetch mode (want 'one' or 'all')"
-- | By which field should we order results?
data FetchOrderField = ArbitraryOrder -- ^ Do not impose any ordering at all
| RandomOrder -- ^ Shuffle randomly
| OrderByName -- ^ Order by reported name
| OrderByMTime -- ^ Order by modification time
deriving (Show, Read, Eq, Generic)
instance Serialize FetchOrderField where
instance Default FetchOrderField where
def = ArbitraryOrder
data AscDesc = Ascending | Descending
deriving (Show, Read, Eq, Generic)
instance Serialize AscDesc where
instance Default AscDesc where
def = Ascending
-- | How to order results.
data FetchOrder =
FetchOrder
{ fetchField :: FetchOrderField -- ^ By which field?
, fetchAscDesc :: AscDesc -- ^ Reverse ordering?
}
deriving (Show, Read, Eq, Generic)
instance Serialize FetchOrder where
instance Default FetchOrder where
def = FetchOrder def def
instance FromJSON FetchOrder where
parseJSON Null = return $ FetchOrder ArbitraryOrder Ascending
parseJSON (String str) = do
let (order, core) = case take 1 str of
"-" -> (Descending, drop 1 str)
"+" -> (Ascending, drop 1 str)
_ -> (Ascending, str)
field <- case core of
"arbitrary" -> return ArbitraryOrder
"random" -> return RandomOrder
"shuffle" -> return RandomOrder
"name" -> return OrderByName
"mtime" -> return OrderByMTime
x -> fail $ "Invalid order field: " ++ show x
return $ FetchOrder field order
parseJSON val = fail $ "Invalid fetch order specifier: " ++ show val
-- | Credentials to pass to an external backend data source. Currently stubbed,
-- supporting only anonymous access.
data Credentials = AnonymousCredentials
deriving (Show, Generic)
instance Serialize Credentials where
data HttpMethod = GET | POST
deriving (Show, Generic)
instance Serialize HttpMethod where
data HttpBackendOptions =
HttpBackendOptions
{ httpCredentials :: Credentials
, httpHttpMethods :: HttpMethod
, httpAcceptedContentTypes :: [MimeType]
}
deriving (Show, Generic)
instance Serialize HttpBackendOptions where
instance FromJSON Credentials where
parseJSON Null = return AnonymousCredentials
parseJSON (String "anonymous") = return AnonymousCredentials
parseJSON _ = fail "Invalid credentials"
instance FromJSON HttpMethod where
parseJSON (String str) =
case toUpper str of
"GET" -> return GET
"POST" -> return POST
x -> fail $ "Unsupported request method: " <> show x
parseJSON _ = fail "Invalid request method, expected string"
instance FromJSON HttpBackendOptions where
parseJSON Null = return def
parseJSON (Object o) =
HttpBackendOptions
<$> (o .:? "credentials" .!= AnonymousCredentials)
<*> (o .:? "method" .!= GET)
<*> pure knownContentTypes
parseJSON x =
fail $ "Expected string or array, but found " ++ show x
instance Default HttpBackendOptions where
def = HttpBackendOptions
AnonymousCredentials
GET
knownContentTypes
data CachePolicy = CacheForever
| NoCaching
class HasCachePolicy a where
cachePolicy :: a -> CachePolicy
instance HasCachePolicy BackendSpec where
cachePolicy = cachePolicy . bsType
instance HasCachePolicy BackendType where
cachePolicy = \case
RequestBodyBackend -> NoCaching
_ -> CacheForever
data ParserType = ParserText
| ParserJSON
| ParserYAML
| ParserFormUrlencoded
| ParserMarkdown
| ParserCreole
| ParserTextile
| ParserRST
| ParserLaTeX
| ParserDocX
| ParserHtml
deriving (Show, Read)
-- | The parsers we know, by mime types.
parserTypes :: [([MimeType], ParserType)]
parserTypes =
[ ( [ "application/json", "text/json" ]
, ParserJSON
)
, ( [ "application/x-yaml"
, "text/x-yaml"
, "application/yaml"
, "text/yaml"
]
, ParserYAML
)
, ( [ "application/x-www-form-urlencoded"
]
, ParserFormUrlencoded
)
, ( [ "application/x-markdown"
, "text/x-markdown"
]
, ParserMarkdown
)
, ( [ "application/x-creole"
, "text/x-creole"
]
, ParserCreole
)
, ( [ "application/x-textile"
, "text/x-textile"
]
, ParserTextile
)
, ( [ "application/x-rst"
, "text/x-rst"
]
, ParserRST
)
, ( [ "application/x-latex"
, "text/x-latex"
, "application/x-tex"
, "text/x-tex"
]
, ParserLaTeX
)
, ( [ "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
]
, ParserDocX
)
, ( [ "text/plain" ]
, ParserText
)
, ( [ "application/html"
, "text/html"
]
, ParserHtml
)
]
-- | All the content types we know how to parse
knownContentTypes :: [MimeType]
knownContentTypes = concatMap fst parserTypes
| tdammers/templar | src/Web/Sprinkles/Backends/Spec.hs | bsd-3-clause | 19,599 | 0 | 27 | 6,568 | 4,246 | 2,238 | 2,008 | 418 | 22 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Monoid
import qualified Graphics.Vty as V
import qualified Brick.Main as M
import Brick.Widgets.Core
( Widget
, padAll
, str
)
import qualified Brick.Widgets.Dialog as D
import qualified Brick.Widgets.Center as C
import qualified Brick.AttrMap as A
import Brick.Util (on, bg)
import qualified Brick.Types as T
data Choice = Red | Blue | Green
deriving Show
drawUI :: D.Dialog Choice -> [Widget]
drawUI d = [ui]
where
ui = D.renderDialog d $ C.hCenter $ padAll 1 $ str "This is the dialog body."
appEvent :: D.Dialog Choice -> V.Event -> M.EventM (M.Next (D.Dialog Choice))
appEvent d ev =
case ev of
V.EvKey V.KEsc [] -> M.halt d
V.EvKey V.KEnter [] -> M.halt d
_ -> M.continue $ T.handleEvent ev d
initialState :: D.Dialog Choice
initialState = D.dialog "dialog" (Just "Title") (Just (0, choices)) 50
where
choices = [ ("Red", Red)
, ("Blue", Blue)
, ("Green", Green)
]
theMap :: A.AttrMap
theMap = A.attrMap V.defAttr
[ (D.dialogAttr, V.white `on` V.blue)
, (D.buttonAttr, V.black `on` V.white)
, (D.buttonSelectedAttr, bg V.yellow)
]
theApp :: M.App (D.Dialog Choice) V.Event
theApp =
M.App { M.appDraw = drawUI
, M.appChooseCursor = M.showFirstCursor
, M.appHandleEvent = appEvent
, M.appStartEvent = return
, M.appAttrMap = const theMap
, M.appLiftVtyEvent = id
}
main :: IO ()
main = do
d <- M.defaultMain theApp initialState
putStrLn $ "You chose: " <> show (D.dialogSelection d)
| FranklinChen/brick | programs/DialogDemo.hs | bsd-3-clause | 1,675 | 0 | 12 | 454 | 576 | 323 | 253 | 47 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Examples.Puzzles.Coins
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Solves the following puzzle:
--
-- @
-- You and a friend pass by a standard coin operated vending machine and you decide to get a candy bar.
-- The price is US $0.95, but after checking your pockets you only have a dollar (US $1) and the machine
-- only takes coins. You turn to your friend and have this conversation:
-- you: Hey, do you have change for a dollar?
-- friend: Let's see. I have 6 US coins but, although they add up to a US $1.15, I can't break a dollar.
-- you: Huh? Can you make change for half a dollar?
-- friend: No.
-- you: How about a quarter?
-- friend: Nope, and before you ask I cant make change for a dime or nickel either.
-- you: Really? and these six coins are all US government coins currently in production?
-- friend: Yes.
-- you: Well can you just put your coins into the vending machine and buy me a candy bar, and I'll pay you back?
-- friend: Sorry, I would like to but I cant with the coins I have.
-- What coins are your friend holding?
-- @
--
-- To be fair, the problem has no solution /mathematically/. But there is a solution when one takes into account that
-- vending machines typically do not take the 50 cent coins!
--
-----------------------------------------------------------------------------
module Data.SBV.Examples.Puzzles.Coins where
import Data.SBV
-- | We will represent coins with 16-bit words (more than enough precision for coins).
type Coin = SWord16
-- | Create a coin. The argument Int argument just used for naming the coin. Note that
-- we constrain the value to be one of the valid U.S. coin values as we create it.
mkCoin :: Int -> Symbolic Coin
mkCoin i = do c <- exists $ 'c' : show i
constrain $ bAny (.== c) [1, 5, 10, 25, 50, 100]
return c
-- | Return all combinations of a sequence of values.
combinations :: [a] -> [[a]]
combinations coins = concat [combs i coins | i <- [1 .. length coins]]
where combs 0 _ = [[]]
combs _ [] = []
combs k (x:xs) = map (x:) (combs (k-1) xs) ++ combs k xs
-- | Constraint 1: Cannot make change for a dollar.
c1 :: [Coin] -> SBool
c1 xs = sum xs ./= 100
-- | Constraint 2: Cannot make change for half a dollar.
c2 :: [Coin] -> SBool
c2 xs = sum xs ./= 50
-- | Constraint 3: Cannot make change for a quarter.
c3 :: [Coin] -> SBool
c3 xs = sum xs ./= 25
-- | Constraint 4: Cannot make change for a dime.
c4 :: [Coin] -> SBool
c4 xs = sum xs ./= 10
-- | Constraint 5: Cannot make change for a nickel
c5 :: [Coin] -> SBool
c5 xs = sum xs ./= 5
-- | Constraint 6: Cannot buy the candy either. Here's where we need to have the extra knowledge
-- that the vending machines do not take 50 cent coins.
c6 :: [Coin] -> SBool
c6 xs = sum (map val xs) ./= 95
where val x = ite (x .== 50) 0 x
-- | Solve the puzzle. We have:
--
-- >>> puzzle
-- Satisfiable. Model:
-- c1 = 50 :: Word16
-- c2 = 25 :: Word16
-- c3 = 10 :: Word16
-- c4 = 10 :: Word16
-- c5 = 10 :: Word16
-- c6 = 10 :: Word16
--
-- i.e., your friend has 4 dimes, a quarter, and a half dollar.
puzzle :: IO SatResult
puzzle = sat $ do
cs <- mapM mkCoin [1..6]
-- Assert each of the constraints for all combinations that has
-- at least two coins (to make change)
mapM_ constrain [c s | s <- combinations cs, length s >= 2, c <- [c1, c2, c3, c4, c5, c6]]
-- the following constraint is not necessary for solving the puzzle
-- however, it makes sure that the solution comes in decreasing value of coins,
-- thus allowing the above test to succeed regardless of the solver used.
constrain $ bAnd $ zipWith (.>=) cs (tail cs)
-- assert that the sum must be 115 cents.
return $ sum cs .== 115
| josefs/sbv | Data/SBV/Examples/Puzzles/Coins.hs | bsd-3-clause | 3,989 | 0 | 12 | 949 | 623 | 353 | 270 | 31 | 3 |
-- | The functions in this module allow you to limit the total size of incoming request bodies.
--
-- Limiting incoming request body size helps protect your server against denial-of-service (DOS) attacks,
-- in which an attacker sends huge bodies to your server.
module Network.Wai.Middleware.RequestSizeLimit
(
-- * Middleware
requestSizeLimitMiddleware
-- * Constructing 'RequestSizeLimitSettings'
, defaultRequestSizeLimitSettings
-- * 'RequestSizeLimitSettings' and accessors
, RequestSizeLimitSettings
, setMaxLengthForRequest
, setOnLengthExceeded
) where
import Network.Wai
import Network.Wai.Request
import qualified Data.ByteString.Lazy as BSL
import Data.Word (Word64)
import Network.HTTP.Types.Status (requestEntityTooLarge413)
import qualified Data.ByteString.Lazy.Char8 as LS8
import Control.Exception (try, catch)
import Data.Monoid ((<>))
import Network.Wai.Middleware.RequestSizeLimit.Internal (RequestSizeLimitSettings(..), setMaxLengthForRequest, setOnLengthExceeded)
-- | Create a 'RequestSizeLimitSettings' with these settings:
--
-- * 2MB size limit for all requests
-- * When the limit is exceeded, return a plain text response describing the error, with a 413 status code.
--
-- @since 3.1.1
defaultRequestSizeLimitSettings :: RequestSizeLimitSettings
defaultRequestSizeLimitSettings = RequestSizeLimitSettings
{ maxLengthForRequest = \_req -> pure $ Just $ 2 * 1024 * 1024
, onLengthExceeded = \maxLen _app req sendResponse -> sendResponse (tooLargeResponse maxLen (requestBodyLength req))
}
-- | Middleware to limit request bodies to a certain size.
--
-- This uses 'requestSizeCheck' under the hood; see that function for details.
--
-- @since 3.1.1
requestSizeLimitMiddleware :: RequestSizeLimitSettings -> Middleware
requestSizeLimitMiddleware settings app req sendResponse = do
maybeMaxLen <- (maxLengthForRequest settings) req
case maybeMaxLen of
Nothing -> app req sendResponse
Just maxLen -> do
eitherSizeExceptionOrNewReq <- try (requestSizeCheck maxLen req)
case eitherSizeExceptionOrNewReq of
-- In the case of a known-length request, RequestSizeException will be thrown immediately
Left (RequestSizeException _maxLen) -> handleLengthExceeded maxLen
-- In the case of a chunked request (unknown length), RequestSizeException will be thrown during the processing of a body
Right newReq -> app newReq sendResponse `catch` \(RequestSizeException _maxLen) -> handleLengthExceeded maxLen
where
handleLengthExceeded maxLen = (onLengthExceeded settings) maxLen app req sendResponse
tooLargeResponse :: Word64 -> RequestBodyLength -> Response
tooLargeResponse maxLen bodyLen = responseLBS
requestEntityTooLarge413
[("Content-Type", "text/plain")]
(BSL.concat
[ "Request body too large to be processed. The maximum size is "
, (LS8.pack (show maxLen))
, " bytes; your request body was "
, case bodyLen of
KnownLength knownLen -> (LS8.pack (show knownLen)) <> " bytes."
ChunkedBody -> "split into chunks, whose total size is unknown, but exceeded the limit."
, " If you're the developer of this site, you can configure the maximum length with `requestSizeLimitMiddleware`."
])
| sordina/wai | wai-extra/Network/Wai/Middleware/RequestSizeLimit.hs | bsd-2-clause | 3,370 | 0 | 18 | 658 | 499 | 284 | 215 | 43 | 3 |
{-# LANGUAGE DeriveDataTypeable, UndecidableInstances #-}
{-# LANGUAGE CPP #-}
--
-- (c) The University of Glasgow 2002-2006
--
-- The IO Monad with an environment
--
-- The environment is passed around as a Reader monad but
-- as its in the IO monad, mutable references can be used
-- for updating state.
--
module IOEnv (
IOEnv, -- Instance of Monad
-- Monad utilities
module MonadUtils,
-- Errors
failM, failWithM,
IOEnvFailure(..),
-- Getting at the environment
getEnv, setEnv, updEnv,
runIOEnv, unsafeInterleaveM, uninterruptibleMaskM_,
tryM, tryAllM, tryMostM, fixM,
-- I/O operations
IORef, newMutVar, readMutVar, writeMutVar, updMutVar,
atomicUpdMutVar, atomicUpdMutVar'
) where
import DynFlags
import Exception
import Module
import Panic
import Data.IORef ( IORef, newIORef, readIORef, writeIORef, modifyIORef,
atomicModifyIORef, atomicModifyIORef' )
import Data.Typeable
import System.IO.Unsafe ( unsafeInterleaveIO )
import System.IO ( fixIO )
import Control.Monad
#if __GLASGOW_HASKELL__ > 710
import qualified Control.Monad.Fail as MonadFail
#endif
import MonadUtils
import Control.Applicative (Alternative(..))
----------------------------------------------------------------------
-- Defining the monad type
----------------------------------------------------------------------
newtype IOEnv env a = IOEnv (env -> IO a)
unIOEnv :: IOEnv env a -> (env -> IO a)
unIOEnv (IOEnv m) = m
instance Monad (IOEnv m) where
(>>=) = thenM
(>>) = (*>)
return = pure
fail _ = failM -- Ignore the string
#if __GLASGOW_HASKELL__ > 710
instance MonadFail.MonadFail (IOEnv m) where
fail _ = failM -- Ignore the string
#endif
instance Applicative (IOEnv m) where
pure = returnM
IOEnv f <*> IOEnv x = IOEnv (\ env -> f env <*> x env )
(*>) = thenM_
instance Functor (IOEnv m) where
fmap f (IOEnv m) = IOEnv (\ env -> fmap f (m env))
returnM :: a -> IOEnv env a
returnM a = IOEnv (\ _ -> return a)
thenM :: IOEnv env a -> (a -> IOEnv env b) -> IOEnv env b
thenM (IOEnv m) f = IOEnv (\ env -> do { r <- m env ;
unIOEnv (f r) env })
thenM_ :: IOEnv env a -> IOEnv env b -> IOEnv env b
thenM_ (IOEnv m) f = IOEnv (\ env -> do { _ <- m env ; unIOEnv f env })
failM :: IOEnv env a
failM = IOEnv (\ _ -> throwIO IOEnvFailure)
failWithM :: String -> IOEnv env a
failWithM s = IOEnv (\ _ -> ioError (userError s))
data IOEnvFailure = IOEnvFailure
deriving Typeable
instance Show IOEnvFailure where
show IOEnvFailure = "IOEnv failure"
instance Exception IOEnvFailure
instance ExceptionMonad (IOEnv a) where
gcatch act handle =
IOEnv $ \s -> unIOEnv act s `gcatch` \e -> unIOEnv (handle e) s
gmask f =
IOEnv $ \s -> gmask $ \io_restore ->
let
g_restore (IOEnv m) = IOEnv $ \s -> io_restore (m s)
in
unIOEnv (f g_restore) s
instance ContainsDynFlags env => HasDynFlags (IOEnv env) where
getDynFlags = do env <- getEnv
return $ extractDynFlags env
instance ContainsModule env => HasModule (IOEnv env) where
getModule = do env <- getEnv
return $ extractModule env
----------------------------------------------------------------------
-- Fundamental combinators specific to the monad
----------------------------------------------------------------------
---------------------------
runIOEnv :: env -> IOEnv env a -> IO a
runIOEnv env (IOEnv m) = m env
---------------------------
{-# NOINLINE fixM #-}
-- Aargh! Not inlining fixM alleviates a space leak problem.
-- Normally fixM is used with a lazy tuple match: if the optimiser is
-- shown the definition of fixM, it occasionally transforms the code
-- in such a way that the code generator doesn't spot the selector
-- thunks. Sigh.
fixM :: (a -> IOEnv env a) -> IOEnv env a
fixM f = IOEnv (\ env -> fixIO (\ r -> unIOEnv (f r) env))
---------------------------
tryM :: IOEnv env r -> IOEnv env (Either IOEnvFailure r)
-- Reflect UserError exceptions (only) into IOEnv monad
-- Other exceptions are not caught; they are simply propagated as exns
--
-- The idea is that errors in the program being compiled will give rise
-- to UserErrors. But, say, pattern-match failures in GHC itself should
-- not be caught here, else they'll be reported as errors in the program
-- begin compiled!
tryM (IOEnv thing) = IOEnv (\ env -> tryIOEnvFailure (thing env))
tryIOEnvFailure :: IO a -> IO (Either IOEnvFailure a)
tryIOEnvFailure = try
-- XXX We shouldn't be catching everything, e.g. timeouts
tryAllM :: IOEnv env r -> IOEnv env (Either SomeException r)
-- Catch *all* exceptions
-- This is used when running a Template-Haskell splice, when
-- even a pattern-match failure is a programmer error
tryAllM (IOEnv thing) = IOEnv (\ env -> try (thing env))
tryMostM :: IOEnv env r -> IOEnv env (Either SomeException r)
tryMostM (IOEnv thing) = IOEnv (\ env -> tryMost (thing env))
---------------------------
unsafeInterleaveM :: IOEnv env a -> IOEnv env a
unsafeInterleaveM (IOEnv m) = IOEnv (\ env -> unsafeInterleaveIO (m env))
uninterruptibleMaskM_ :: IOEnv env a -> IOEnv env a
uninterruptibleMaskM_ (IOEnv m) = IOEnv (\ env -> uninterruptibleMask_ (m env))
----------------------------------------------------------------------
-- Alternative/MonadPlus
----------------------------------------------------------------------
instance MonadPlus IO => Alternative (IOEnv env) where
empty = mzero
(<|>) = mplus
-- For use if the user has imported Control.Monad.Error from MTL
-- Requires UndecidableInstances
instance MonadPlus IO => MonadPlus (IOEnv env) where
mzero = IOEnv (const mzero)
m `mplus` n = IOEnv (\env -> unIOEnv m env `mplus` unIOEnv n env)
----------------------------------------------------------------------
-- Accessing input/output
----------------------------------------------------------------------
instance MonadIO (IOEnv env) where
liftIO io = IOEnv (\ _ -> io)
newMutVar :: a -> IOEnv env (IORef a)
newMutVar val = liftIO (newIORef val)
writeMutVar :: IORef a -> a -> IOEnv env ()
writeMutVar var val = liftIO (writeIORef var val)
readMutVar :: IORef a -> IOEnv env a
readMutVar var = liftIO (readIORef var)
updMutVar :: IORef a -> (a -> a) -> IOEnv env ()
updMutVar var upd = liftIO (modifyIORef var upd)
-- | Atomically update the reference. Does not force the evaluation of the
-- new variable contents. For strict update, use 'atomicUpdMutVar''.
atomicUpdMutVar :: IORef a -> (a -> (a, b)) -> IOEnv env b
atomicUpdMutVar var upd = liftIO (atomicModifyIORef var upd)
-- | Strict variant of 'atomicUpdMutVar'.
atomicUpdMutVar' :: IORef a -> (a -> (a, b)) -> IOEnv env b
atomicUpdMutVar' var upd = liftIO (atomicModifyIORef' var upd)
----------------------------------------------------------------------
-- Accessing the environment
----------------------------------------------------------------------
getEnv :: IOEnv env env
{-# INLINE getEnv #-}
getEnv = IOEnv (\ env -> return env)
-- | Perform a computation with a different environment
setEnv :: env' -> IOEnv env' a -> IOEnv env a
{-# INLINE setEnv #-}
setEnv new_env (IOEnv m) = IOEnv (\ _ -> m new_env)
-- | Perform a computation with an altered environment
updEnv :: (env -> env') -> IOEnv env' a -> IOEnv env a
{-# INLINE updEnv #-}
updEnv upd (IOEnv m) = IOEnv (\ env -> m (upd env))
| GaloisInc/halvm-ghc | compiler/utils/IOEnv.hs | bsd-3-clause | 7,617 | 0 | 18 | 1,632 | 1,998 | 1,062 | 936 | -1 | -1 |
module Dwarf.Types
( -- * Dwarf information
DwarfInfo(..)
, pprDwarfInfo
, pprAbbrevDecls
-- * Dwarf address range table
, DwarfARange(..)
, pprDwarfARanges
-- * Dwarf frame
, DwarfFrame(..), DwarfFrameProc(..), DwarfFrameBlock(..)
, pprDwarfFrame
-- * Utilities
, pprByte
, pprHalf
, pprData4'
, pprDwWord
, pprWord
, pprLEBWord
, pprLEBInt
, wordAlign
, sectionOffset
)
where
import GhcPrelude
import Debug
import CLabel
import CmmExpr ( GlobalReg(..) )
import Encoding
import FastString
import Outputable
import Platform
import Unique
import Reg
import SrcLoc
import Util
import Dwarf.Constants
import qualified Control.Monad.Trans.State.Strict as S
import Control.Monad (zipWithM, join)
import Data.Bits
import qualified Data.Map as Map
import Data.Word
import Data.Char
import CodeGen.Platform
-- | Individual dwarf records. Each one will be encoded as an entry in
-- the @.debug_info@ section.
data DwarfInfo
= DwarfCompileUnit { dwChildren :: [DwarfInfo]
, dwName :: String
, dwProducer :: String
, dwCompDir :: String
, dwLowLabel :: CLabel
, dwHighLabel :: CLabel
, dwLineLabel :: LitString }
| DwarfSubprogram { dwChildren :: [DwarfInfo]
, dwName :: String
, dwLabel :: CLabel
, dwParent :: Maybe CLabel
-- ^ label of DIE belonging to the parent tick
}
| DwarfBlock { dwChildren :: [DwarfInfo]
, dwLabel :: CLabel
, dwMarker :: Maybe CLabel
}
| DwarfSrcNote { dwSrcSpan :: RealSrcSpan
}
-- | Abbreviation codes used for encoding above records in the
-- @.debug_info@ section.
data DwarfAbbrev
= DwAbbrNull -- ^ Pseudo, used for marking the end of lists
| DwAbbrCompileUnit
| DwAbbrSubprogram
| DwAbbrSubprogramWithParent
| DwAbbrBlockWithoutCode
| DwAbbrBlock
| DwAbbrGhcSrcNote
deriving (Eq, Enum)
-- | Generate assembly for the given abbreviation code
pprAbbrev :: DwarfAbbrev -> SDoc
pprAbbrev = pprLEBWord . fromIntegral . fromEnum
-- | Abbreviation declaration. This explains the binary encoding we
-- use for representing 'DwarfInfo'. Be aware that this must be updated
-- along with 'pprDwarfInfo'.
pprAbbrevDecls :: Bool -> SDoc
pprAbbrevDecls haveDebugLine =
let mkAbbrev abbr tag chld flds =
let fld (tag, form) = pprLEBWord tag $$ pprLEBWord form
in pprAbbrev abbr $$ pprLEBWord tag $$ pprByte chld $$
vcat (map fld flds) $$ pprByte 0 $$ pprByte 0
-- These are shared between DwAbbrSubprogram and
-- DwAbbrSubprogramWithParent
subprogramAttrs =
[ (dW_AT_name, dW_FORM_string)
, (dW_AT_MIPS_linkage_name, dW_FORM_string)
, (dW_AT_external, dW_FORM_flag)
, (dW_AT_low_pc, dW_FORM_addr)
, (dW_AT_high_pc, dW_FORM_addr)
, (dW_AT_frame_base, dW_FORM_block1)
]
in dwarfAbbrevSection $$
ptext dwarfAbbrevLabel <> colon $$
mkAbbrev DwAbbrCompileUnit dW_TAG_compile_unit dW_CHILDREN_yes
([(dW_AT_name, dW_FORM_string)
, (dW_AT_producer, dW_FORM_string)
, (dW_AT_language, dW_FORM_data4)
, (dW_AT_comp_dir, dW_FORM_string)
, (dW_AT_use_UTF8, dW_FORM_flag_present) -- not represented in body
, (dW_AT_low_pc, dW_FORM_addr)
, (dW_AT_high_pc, dW_FORM_addr)
] ++
(if haveDebugLine
then [ (dW_AT_stmt_list, dW_FORM_data4) ]
else [])) $$
mkAbbrev DwAbbrSubprogram dW_TAG_subprogram dW_CHILDREN_yes
subprogramAttrs $$
mkAbbrev DwAbbrSubprogramWithParent dW_TAG_subprogram dW_CHILDREN_yes
(subprogramAttrs ++ [(dW_AT_ghc_tick_parent, dW_FORM_ref_addr)]) $$
mkAbbrev DwAbbrBlockWithoutCode dW_TAG_lexical_block dW_CHILDREN_yes
[ (dW_AT_name, dW_FORM_string)
] $$
mkAbbrev DwAbbrBlock dW_TAG_lexical_block dW_CHILDREN_yes
[ (dW_AT_name, dW_FORM_string)
, (dW_AT_low_pc, dW_FORM_addr)
, (dW_AT_high_pc, dW_FORM_addr)
] $$
mkAbbrev DwAbbrGhcSrcNote dW_TAG_ghc_src_note dW_CHILDREN_no
[ (dW_AT_ghc_span_file, dW_FORM_string)
, (dW_AT_ghc_span_start_line, dW_FORM_data4)
, (dW_AT_ghc_span_start_col, dW_FORM_data2)
, (dW_AT_ghc_span_end_line, dW_FORM_data4)
, (dW_AT_ghc_span_end_col, dW_FORM_data2)
] $$
pprByte 0
-- | Generate assembly for DWARF data
pprDwarfInfo :: Bool -> DwarfInfo -> SDoc
pprDwarfInfo haveSrc d
= case d of
DwarfCompileUnit {} -> hasChildren
DwarfSubprogram {} -> hasChildren
DwarfBlock {} -> hasChildren
DwarfSrcNote {} -> noChildren
where
hasChildren =
pprDwarfInfoOpen haveSrc d $$
vcat (map (pprDwarfInfo haveSrc) (dwChildren d)) $$
pprDwarfInfoClose
noChildren = pprDwarfInfoOpen haveSrc d
-- | Prints assembler data corresponding to DWARF info records. Note
-- that the binary format of this is parameterized in @abbrevDecls@ and
-- has to be kept in synch.
pprDwarfInfoOpen :: Bool -> DwarfInfo -> SDoc
pprDwarfInfoOpen haveSrc (DwarfCompileUnit _ name producer compDir lowLabel
highLabel lineLbl) =
pprAbbrev DwAbbrCompileUnit
$$ pprString name
$$ pprString producer
$$ pprData4 dW_LANG_Haskell
$$ pprString compDir
$$ pprWord (ppr lowLabel)
$$ pprWord (ppr highLabel)
$$ if haveSrc
then sectionOffset (ptext lineLbl) (ptext dwarfLineLabel)
else empty
pprDwarfInfoOpen _ (DwarfSubprogram _ name label
parent) = sdocWithDynFlags $ \df ->
ppr (mkAsmTempDieLabel label) <> colon
$$ pprAbbrev abbrev
$$ pprString name
$$ pprString (renderWithStyle df (ppr label) (mkCodeStyle CStyle))
$$ pprFlag (externallyVisibleCLabel label)
$$ pprWord (ppr label)
$$ pprWord (ppr $ mkAsmTempEndLabel label)
$$ pprByte 1
$$ pprByte dW_OP_call_frame_cfa
$$ parentValue
where
abbrev = case parent of Nothing -> DwAbbrSubprogram
Just _ -> DwAbbrSubprogramWithParent
parentValue = maybe empty pprParentDie parent
pprParentDie sym = sectionOffset (ppr sym) (ptext dwarfInfoLabel)
pprDwarfInfoOpen _ (DwarfBlock _ label Nothing) = sdocWithDynFlags $ \df ->
ppr (mkAsmTempDieLabel label) <> colon
$$ pprAbbrev DwAbbrBlockWithoutCode
$$ pprString (renderWithStyle df (ppr label) (mkCodeStyle CStyle))
pprDwarfInfoOpen _ (DwarfBlock _ label (Just marker)) = sdocWithDynFlags $ \df ->
ppr (mkAsmTempDieLabel label) <> colon
$$ pprAbbrev DwAbbrBlock
$$ pprString (renderWithStyle df (ppr label) (mkCodeStyle CStyle))
$$ pprWord (ppr marker)
$$ pprWord (ppr $ mkAsmTempEndLabel marker)
pprDwarfInfoOpen _ (DwarfSrcNote ss) =
pprAbbrev DwAbbrGhcSrcNote
$$ pprString' (ftext $ srcSpanFile ss)
$$ pprData4 (fromIntegral $ srcSpanStartLine ss)
$$ pprHalf (fromIntegral $ srcSpanStartCol ss)
$$ pprData4 (fromIntegral $ srcSpanEndLine ss)
$$ pprHalf (fromIntegral $ srcSpanEndCol ss)
-- | Close a DWARF info record with children
pprDwarfInfoClose :: SDoc
pprDwarfInfoClose = pprAbbrev DwAbbrNull
-- | A DWARF address range. This is used by the debugger to quickly locate
-- which compilation unit a given address belongs to. This type assumes
-- a non-segmented address-space.
data DwarfARange
= DwarfARange
{ dwArngStartLabel :: CLabel
, dwArngEndLabel :: CLabel
}
-- | Print assembler directives corresponding to a DWARF @.debug_aranges@
-- address table entry.
pprDwarfARanges :: [DwarfARange] -> Unique -> SDoc
pprDwarfARanges arngs unitU = sdocWithPlatform $ \plat ->
let wordSize = platformWordSize plat
paddingSize = 4 :: Int
-- header is 12 bytes long.
-- entry is 8 bytes (32-bit platform) or 16 bytes (64-bit platform).
-- pad such that first entry begins at multiple of entry size.
pad n = vcat $ replicate n $ pprByte 0
initialLength = 8 + paddingSize + 2*2*wordSize
in pprDwWord (ppr initialLength)
$$ pprHalf 2
$$ sectionOffset (ppr $ mkAsmTempLabel $ unitU)
(ptext dwarfInfoLabel)
$$ pprByte (fromIntegral wordSize)
$$ pprByte 0
$$ pad paddingSize
-- body
$$ vcat (map pprDwarfARange arngs)
-- terminus
$$ pprWord (char '0')
$$ pprWord (char '0')
pprDwarfARange :: DwarfARange -> SDoc
pprDwarfARange arng = pprWord (ppr $ dwArngStartLabel arng) $$ pprWord length
where
length = ppr (dwArngEndLabel arng)
<> char '-' <> ppr (dwArngStartLabel arng)
-- | Information about unwind instructions for a procedure. This
-- corresponds to a "Common Information Entry" (CIE) in DWARF.
data DwarfFrame
= DwarfFrame
{ dwCieLabel :: CLabel
, dwCieInit :: UnwindTable
, dwCieProcs :: [DwarfFrameProc]
}
-- | Unwind instructions for an individual procedure. Corresponds to a
-- "Frame Description Entry" (FDE) in DWARF.
data DwarfFrameProc
= DwarfFrameProc
{ dwFdeProc :: CLabel
, dwFdeHasInfo :: Bool
, dwFdeBlocks :: [DwarfFrameBlock]
-- ^ List of blocks. Order must match asm!
}
-- | Unwind instructions for a block. Will become part of the
-- containing FDE.
data DwarfFrameBlock
= DwarfFrameBlock
{ dwFdeBlkHasInfo :: Bool
, dwFdeUnwind :: [UnwindPoint]
-- ^ these unwind points must occur in the same order as they occur
-- in the block
}
instance Outputable DwarfFrameBlock where
ppr (DwarfFrameBlock hasInfo unwinds) = braces $ ppr hasInfo <+> ppr unwinds
-- | Header for the @.debug_frame@ section. Here we emit the "Common
-- Information Entry" record that etablishes general call frame
-- parameters and the default stack layout.
pprDwarfFrame :: DwarfFrame -> SDoc
pprDwarfFrame DwarfFrame{dwCieLabel=cieLabel,dwCieInit=cieInit,dwCieProcs=procs}
= sdocWithPlatform $ \plat ->
let cieStartLabel= mkAsmTempDerivedLabel cieLabel (fsLit "_start")
cieEndLabel = mkAsmTempEndLabel cieLabel
length = ppr cieEndLabel <> char '-' <> ppr cieStartLabel
spReg = dwarfGlobalRegNo plat Sp
retReg = dwarfReturnRegNo plat
wordSize = platformWordSize plat
pprInit :: (GlobalReg, Maybe UnwindExpr) -> SDoc
pprInit (g, uw) = pprSetUnwind plat g (Nothing, uw)
-- Preserve C stack pointer: This necessary to override that default
-- unwinding behavior of setting $sp = CFA.
preserveSp = case platformArch plat of
ArchX86 -> pprByte dW_CFA_same_value $$ pprLEBWord 4
ArchX86_64 -> pprByte dW_CFA_same_value $$ pprLEBWord 7
_ -> empty
in vcat [ ppr cieLabel <> colon
, pprData4' length -- Length of CIE
, ppr cieStartLabel <> colon
, pprData4' (text "-1")
-- Common Information Entry marker (-1 = 0xf..f)
, pprByte 3 -- CIE version (we require DWARF 3)
, pprByte 0 -- Augmentation (none)
, pprByte 1 -- Code offset multiplicator
, pprByte (128-fromIntegral wordSize)
-- Data offset multiplicator
-- (stacks grow down => "-w" in signed LEB128)
, pprByte retReg -- virtual register holding return address
] $$
-- Initial unwind table
vcat (map pprInit $ Map.toList cieInit) $$
vcat [ -- RET = *CFA
pprByte (dW_CFA_offset+retReg)
, pprByte 0
-- Preserve C stack pointer
, preserveSp
-- Sp' = CFA
-- (we need to set this manually as our (STG) Sp register is
-- often not the architecture's default stack register)
, pprByte dW_CFA_val_offset
, pprLEBWord (fromIntegral spReg)
, pprLEBWord 0
] $$
wordAlign $$
ppr cieEndLabel <> colon $$
-- Procedure unwind tables
vcat (map (pprFrameProc cieLabel cieInit) procs)
-- | Writes a "Frame Description Entry" for a procedure. This consists
-- mainly of referencing the CIE and writing state machine
-- instructions to describe how the frame base (CFA) changes.
pprFrameProc :: CLabel -> UnwindTable -> DwarfFrameProc -> SDoc
pprFrameProc frameLbl initUw (DwarfFrameProc procLbl hasInfo blocks)
= let fdeLabel = mkAsmTempDerivedLabel procLbl (fsLit "_fde")
fdeEndLabel = mkAsmTempDerivedLabel procLbl (fsLit "_fde_end")
procEnd = mkAsmTempEndLabel procLbl
ifInfo str = if hasInfo then text str else empty
-- see [Note: Info Offset]
in vcat [ whenPprDebug $ text "# Unwinding for" <+> ppr procLbl <> colon
, pprData4' (ppr fdeEndLabel <> char '-' <> ppr fdeLabel)
, ppr fdeLabel <> colon
, pprData4' (ppr frameLbl <> char '-' <>
ptext dwarfFrameLabel) -- Reference to CIE
, pprWord (ppr procLbl <> ifInfo "-1") -- Code pointer
, pprWord (ppr procEnd <> char '-' <>
ppr procLbl <> ifInfo "+1") -- Block byte length
] $$
vcat (S.evalState (mapM pprFrameBlock blocks) initUw) $$
wordAlign $$
ppr fdeEndLabel <> colon
-- | Generates unwind information for a block. We only generate
-- instructions where unwind information actually changes. This small
-- optimisations saves a lot of space, as subsequent blocks often have
-- the same unwind information.
pprFrameBlock :: DwarfFrameBlock -> S.State UnwindTable SDoc
pprFrameBlock (DwarfFrameBlock hasInfo uws0) =
vcat <$> zipWithM pprFrameDecl (True : repeat False) uws0
where
pprFrameDecl :: Bool -> UnwindPoint -> S.State UnwindTable SDoc
pprFrameDecl firstDecl (UnwindPoint lbl uws) = S.state $ \oldUws ->
let -- Did a register's unwind expression change?
isChanged :: GlobalReg -> Maybe UnwindExpr
-> Maybe (Maybe UnwindExpr, Maybe UnwindExpr)
isChanged g new
-- the value didn't change
| Just new == old = Nothing
-- the value was and still is undefined
| Nothing <- old
, Nothing <- new = Nothing
-- the value changed
| otherwise = Just (join old, new)
where
old = Map.lookup g oldUws
changed = Map.toList $ Map.mapMaybeWithKey isChanged uws
in if oldUws == uws
then (empty, oldUws)
else let -- see [Note: Info Offset]
needsOffset = firstDecl && hasInfo
lblDoc = ppr lbl <>
if needsOffset then text "-1" else empty
doc = sdocWithPlatform $ \plat ->
pprByte dW_CFA_set_loc $$ pprWord lblDoc $$
vcat (map (uncurry $ pprSetUnwind plat) changed)
in (doc, uws)
-- Note [Info Offset]
--
-- GDB was pretty much written with C-like programs in mind, and as a
-- result they assume that once you have a return address, it is a
-- good idea to look at (PC-1) to unwind further - as that's where the
-- "call" instruction is supposed to be.
--
-- Now on one hand, code generated by GHC looks nothing like what GDB
-- expects, and in fact going up from a return pointer is guaranteed
-- to land us inside an info table! On the other hand, that actually
-- gives us some wiggle room, as we expect IP to never *actually* end
-- up inside the info table, so we can "cheat" by putting whatever GDB
-- expects to see there. This is probably pretty safe, as GDB cannot
-- assume (PC-1) to be a valid code pointer in the first place - and I
-- have seen no code trying to correct this.
--
-- Note that this will not prevent GDB from failing to look-up the
-- correct function name for the frame, as that uses the symbol table,
-- which we can not manipulate as easily.
-- | Get DWARF register ID for a given GlobalReg
dwarfGlobalRegNo :: Platform -> GlobalReg -> Word8
dwarfGlobalRegNo p UnwindReturnReg = dwarfReturnRegNo p
dwarfGlobalRegNo p reg = maybe 0 (dwarfRegNo p . RegReal) $ globalRegMaybe p reg
-- | Generate code for setting the unwind information for a register,
-- optimized using its known old value in the table. Note that "Sp" is
-- special: We see it as synonym for the CFA.
pprSetUnwind :: Platform
-> GlobalReg
-- ^ the register to produce an unwinding table entry for
-> (Maybe UnwindExpr, Maybe UnwindExpr)
-- ^ the old and new values of the register
-> SDoc
pprSetUnwind plat g (_, Nothing)
= pprUndefUnwind plat g
pprSetUnwind _ Sp (Just (UwReg s _), Just (UwReg s' o')) | s == s'
= if o' >= 0
then pprByte dW_CFA_def_cfa_offset $$ pprLEBWord (fromIntegral o')
else pprByte dW_CFA_def_cfa_offset_sf $$ pprLEBInt o'
pprSetUnwind plat Sp (_, Just (UwReg s' o'))
= if o' >= 0
then pprByte dW_CFA_def_cfa $$
pprLEBRegNo plat s' $$
pprLEBWord (fromIntegral o')
else pprByte dW_CFA_def_cfa_sf $$
pprLEBRegNo plat s' $$
pprLEBInt o'
pprSetUnwind _ Sp (_, Just uw)
= pprByte dW_CFA_def_cfa_expression $$ pprUnwindExpr False uw
pprSetUnwind plat g (_, Just (UwDeref (UwReg Sp o)))
| o < 0 && ((-o) `mod` platformWordSize plat) == 0 -- expected case
= pprByte (dW_CFA_offset + dwarfGlobalRegNo plat g) $$
pprLEBWord (fromIntegral ((-o) `div` platformWordSize plat))
| otherwise
= pprByte dW_CFA_offset_extended_sf $$
pprLEBRegNo plat g $$
pprLEBInt o
pprSetUnwind plat g (_, Just (UwDeref uw))
= pprByte dW_CFA_expression $$
pprLEBRegNo plat g $$
pprUnwindExpr True uw
pprSetUnwind plat g (_, Just (UwReg g' 0))
| g == g'
= pprByte dW_CFA_same_value $$
pprLEBRegNo plat g
pprSetUnwind plat g (_, Just uw)
= pprByte dW_CFA_val_expression $$
pprLEBRegNo plat g $$
pprUnwindExpr True uw
-- | Print the register number of the given 'GlobalReg' as an unsigned LEB128
-- encoded number.
pprLEBRegNo :: Platform -> GlobalReg -> SDoc
pprLEBRegNo plat = pprLEBWord . fromIntegral . dwarfGlobalRegNo plat
-- | Generates a DWARF expression for the given unwind expression. If
-- @spIsCFA@ is true, we see @Sp@ as the frame base CFA where it gets
-- mentioned.
pprUnwindExpr :: Bool -> UnwindExpr -> SDoc
pprUnwindExpr spIsCFA expr
= sdocWithPlatform $ \plat ->
let pprE (UwConst i)
| i >= 0 && i < 32 = pprByte (dW_OP_lit0 + fromIntegral i)
| otherwise = pprByte dW_OP_consts $$ pprLEBInt i -- lazy...
pprE (UwReg Sp i) | spIsCFA
= if i == 0
then pprByte dW_OP_call_frame_cfa
else pprE (UwPlus (UwReg Sp 0) (UwConst i))
pprE (UwReg g i) = pprByte (dW_OP_breg0+dwarfGlobalRegNo plat g) $$
pprLEBInt i
pprE (UwDeref u) = pprE u $$ pprByte dW_OP_deref
pprE (UwLabel l) = pprByte dW_OP_addr $$ pprWord (ppr l)
pprE (UwPlus u1 u2) = pprE u1 $$ pprE u2 $$ pprByte dW_OP_plus
pprE (UwMinus u1 u2) = pprE u1 $$ pprE u2 $$ pprByte dW_OP_minus
pprE (UwTimes u1 u2) = pprE u1 $$ pprE u2 $$ pprByte dW_OP_mul
in text "\t.uleb128 1f-.-1" $$ -- DW_FORM_block length
pprE expr $$
text "1:"
-- | Generate code for re-setting the unwind information for a
-- register to @undefined@
pprUndefUnwind :: Platform -> GlobalReg -> SDoc
pprUndefUnwind plat g = pprByte dW_CFA_undefined $$
pprLEBRegNo plat g
-- | Align assembly at (machine) word boundary
wordAlign :: SDoc
wordAlign = sdocWithPlatform $ \plat ->
text "\t.align " <> case platformOS plat of
OSDarwin -> case platformWordSize plat of
8 -> text "3"
4 -> text "2"
_other -> error "wordAlign: Unsupported word size!"
_other -> ppr (platformWordSize plat)
-- | Assembly for a single byte of constant DWARF data
pprByte :: Word8 -> SDoc
pprByte x = text "\t.byte " <> ppr (fromIntegral x :: Word)
-- | Assembly for a two-byte constant integer
pprHalf :: Word16 -> SDoc
pprHalf x = text "\t.short" <+> ppr (fromIntegral x :: Word)
-- | Assembly for a constant DWARF flag
pprFlag :: Bool -> SDoc
pprFlag f = pprByte (if f then 0xff else 0x00)
-- | Assembly for 4 bytes of dynamic DWARF data
pprData4' :: SDoc -> SDoc
pprData4' x = text "\t.long " <> x
-- | Assembly for 4 bytes of constant DWARF data
pprData4 :: Word -> SDoc
pprData4 = pprData4' . ppr
-- | Assembly for a DWARF word of dynamic data. This means 32 bit, as
-- we are generating 32 bit DWARF.
pprDwWord :: SDoc -> SDoc
pprDwWord = pprData4'
-- | Assembly for a machine word of dynamic data. Depends on the
-- architecture we are currently generating code for.
pprWord :: SDoc -> SDoc
pprWord s = (<> s) . sdocWithPlatform $ \plat ->
case platformWordSize plat of
4 -> text "\t.long "
8 -> text "\t.quad "
n -> panic $ "pprWord: Unsupported target platform word length " ++
show n ++ "!"
-- | Prints a number in "little endian base 128" format. The idea is
-- to optimize for small numbers by stopping once all further bytes
-- would be 0. The highest bit in every byte signals whether there
-- are further bytes to read.
pprLEBWord :: Word -> SDoc
pprLEBWord x | x < 128 = pprByte (fromIntegral x)
| otherwise = pprByte (fromIntegral $ 128 .|. (x .&. 127)) $$
pprLEBWord (x `shiftR` 7)
-- | Same as @pprLEBWord@, but for a signed number
pprLEBInt :: Int -> SDoc
pprLEBInt x | x >= -64 && x < 64
= pprByte (fromIntegral (x .&. 127))
| otherwise = pprByte (fromIntegral $ 128 .|. (x .&. 127)) $$
pprLEBInt (x `shiftR` 7)
-- | Generates a dynamic null-terminated string. If required the
-- caller needs to make sure that the string is escaped properly.
pprString' :: SDoc -> SDoc
pprString' str = text "\t.asciz \"" <> str <> char '"'
-- | Generate a string constant. We take care to escape the string.
pprString :: String -> SDoc
pprString str
= pprString' $ hcat $ map escapeChar $
if str `lengthIs` utf8EncodedLength str
then str
else map (chr . fromIntegral) $ bytesFS $ mkFastString str
-- | Escape a single non-unicode character
escapeChar :: Char -> SDoc
escapeChar '\\' = text "\\\\"
escapeChar '\"' = text "\\\""
escapeChar '\n' = text "\\n"
escapeChar c
| isAscii c && isPrint c && c /= '?' -- prevents trigraph warnings
= char c
| otherwise
= char '\\' <> char (intToDigit (ch `div` 64)) <>
char (intToDigit ((ch `div` 8) `mod` 8)) <>
char (intToDigit (ch `mod` 8))
where ch = ord c
-- | Generate an offset into another section. This is tricky because
-- this is handled differently depending on platform: Mac Os expects
-- us to calculate the offset using assembler arithmetic. Linux expects
-- us to just reference the target directly, and will figure out on
-- their own that we actually need an offset. Finally, Windows has
-- a special directive to refer to relative offsets. Fun.
sectionOffset :: SDoc -> SDoc -> SDoc
sectionOffset target section = sdocWithPlatform $ \plat ->
case platformOS plat of
OSDarwin -> pprDwWord (target <> char '-' <> section)
OSMinGW32 -> text "\t.secrel32 " <> target
_other -> pprDwWord target
| ezyang/ghc | compiler/nativeGen/Dwarf/Types.hs | bsd-3-clause | 23,682 | 0 | 24 | 6,419 | 5,112 | 2,678 | 2,434 | 422 | 9 |
-- | This module handles loading data from disk.
module HLearn.Data.LoadData
where
import SubHask
import SubHask.Algebra.Array
import SubHask.Algebra.Container
import SubHask.Algebra.Parallel
import SubHask.Compatibility.ByteString
import SubHask.Compatibility.Cassava
import SubHask.Compatibility.Containers
import SubHask.TemplateHaskell.Deriving
import HLearn.History.Timing
import HLearn.Models.Distributions
import qualified Prelude as P
import Prelude (asTypeOf,unzip,head,take,drop,zipWith)
import Control.Monad.ST
import qualified Data.List as L
import Data.Maybe
import System.Directory
import System.IO
--------------------------------------------------------------------------------
{-
FIXME:
This code was written a long time ago to assist with the Cover Tree ICML paper.
It needs to be updated to use the new subhask interface.
This should be an easy project.
-- | This loads files in the format used by the BagOfWords UCI dataset.
-- See: https://archive.ics.uci.edu/ml/machine-learning-databases/bag-of-words/readme.txt
loadBagOfWords :: FilePath -> IO (BArray (Map' Int Float))
loadBagOfWords filepath = do
hin <- openFile filepath ReadMode
numdp :: Int <- liftM read $ hGetLine hin
numdim :: Int <- liftM read $ hGetLine hin
numlines :: Int <- liftM read $ hGetLine hin
ret <- VGM.replicate numdp zero
forM [0..numlines-1] $ \i -> do
line <- hGetLine hin
let [dp,dim,val] :: [Int] = map read $ L.words line
curdp <- VGM.read ret (dp-1)
VGM.write ret (dp-1) $ insertAt dim (fromIntegral val) curdp
hClose hin
VG.unsafeFreeze ret
-}
-- | Loads a dataset of strings in the unix words file format (i.e. one word per line).
-- This format is also used by the UCI Bag Of Words dataset.
-- See: https://archive.ics.uci.edu/ml/machine-learning-databases/bag-of-words/readme.txt
loadWords :: (Monoid dp, Elem dp~Char, Eq dp, Constructible dp) => FilePath -> IO (BArray dp)
loadWords filepath = do
hin <- openFile filepath ReadMode
contents <- hGetContents hin
return $ fromList $ map fromList $ L.lines contents
--------------------------------------------------------------------------------
-- | Returns all files in a subdirectory (and all descendant directories).
-- Unlike "getDirectoryContents", this function prepends the directory's path to each filename.
-- This is important so that we can tell where in the hierarchy the file is located.
--
-- FIXME:
-- This is relatively untested.
-- It probably has bugs related to weird symbolic links.
getDirectoryContentsRecursive :: FilePath -> IO [FilePath]
getDirectoryContentsRecursive = fmap toList . go
where
go :: FilePath -> IO (Seq FilePath)
go dirpath = do
files <- getDirectoryContents dirpath
fmap concat $ forM files $ \file -> case file of
'.':_ -> return empty
_ -> do
let file' = dirpath++"/"++file
isdir <- doesDirectoryExist file'
contents <- if isdir
then go file'
else return empty
return $ file' `cons` contents
-- | A generic method for loading data points.
-- Each file in a directory hierarchy corresponds to a single data point.
--
-- The label assigned to the data point is simply the name of the file.
-- This means each data point will have a distinct label.
-- For typical supervised learning tasks, you will want to prune the
loadDirectory ::
( Eq a
, NFData a
) => Maybe Int -- ^ maximum number of datapoints to load; Nothing for unlimitted
-> (FilePath -> IO a) -- ^ function to load an individual file
-> (FilePath -> Bool) -- ^ function to filter out invalid filenames
-> (a -> Bool) -- ^ function to filter out malformed results
-> FilePath -- ^ directory to load data from
-> IO (BArray (Labeled' a FilePath)) -- ^
loadDirectory numdp loadFile validFilepath validResult dirpath = {-# SCC loadDirectory #-} do
files <- timeIO "getDirectoryContentsRecursive" $ do
xs <- getDirectoryContentsRecursive dirpath
let takedp = case numdp of
Nothing -> id
Just n -> fmap (L.take n)
return $ takedp $ L.filter validFilepath xs
results <- timeIO "loadDirectory" $ do
xs <- forM files $ \filepath -> do
res <- loadFile filepath
return $ Labeled' res filepath
return $ L.filter (validResult . xLabeled') xs
putStrLn $ " numdp: " ++ show (length files)
return $ fromList results
-- | Load a CSV file containing numeric attributes.
{-# INLINABLE loadCSV #-}
loadCSV ::
( NFData a
, FromRecord a
, FiniteModule a
, Eq a
, Show (Scalar a)
) => FilePath -> IO (BArray a)
loadCSV filepath = do
bs <- timeIO ("loading ["++filepath++"]") $ readFileByteString filepath
let rse = decode NoHeader bs
time "parsing csv file" rse
rs <- case rse of
Right rs -> return rs
Left str -> error $ "failed to parse CSV file " ++ filepath ++ ": " ++ L.take 1000 str
putStrLn " dataset info:"
putStrLn $ " num dp: " ++ show ( size rs )
putStrLn $ " numdim: " ++ show ( dim $ rs!0 )
putStrLn ""
return rs
-- | FIXME: this should be combined with the CSV function above
loadCSVLabeled' ::
( NFData x
, FromRecord x
, FiniteModule x
, Eq x
, Show (Scalar x)
, Read (Scalar x)
) => Int -- ^ column of csv file containing the label
-> FilePath -- ^ path to csv file
-> IO (BArray (Labeled' x (Lexical String)))
loadCSVLabeled' col filepath = do
bs <- timeIO ("loading ["++filepath++"]") $ readFileByteString filepath
let rse = decode NoHeader bs
time "parsing csv file" rse
rs :: BArray (BArray String) <- case rse of
Right rs -> return rs
Left str -> error $ "failed to parse CSV file " ++ filepath ++ ": " ++ L.take 1000 str
let ret = fromList $ map go $ toList rs
putStrLn " dataset info:"
putStrLn $ " num dp: " ++ show ( size ret )
putStrLn $ " numdim: " ++ show ( dim $ xLabeled' $ ret!0 )
putStrLn ""
return ret
where
go arr = Labeled' x y
where
y = Lexical $ arr!col
x = unsafeToModule $ map read $ take (col) arrlist ++ drop (col+1) arrlist
arrlist = toList arr
-------------------------------------------------------------------------------
-- data preprocessing
--
-- FIXME:
-- Find a better location for all this code.
-- | Uses an efficient 1-pass algorithm to calculate the mean variance.
-- This is much faster than the 2-pass algorithms on large datasets,
-- but has (slightly) worse numeric stability.
--
-- See http://www.cs.berkeley.edu/~mhoemmen/cs194/Tutorials/variance.pdf for details.
{-# INLINE meanAndVarianceInOnePass #-}
meanAndVarianceInOnePass :: (Foldable xs, Field (Elem xs)) => xs -> (Elem xs, Elem xs)
meanAndVarianceInOnePass ys =
{-# SCC meanAndVarianceInOnePass #-}
case uncons ys of
Nothing -> error "meanAndVarianceInOnePass on empty container"
Just (x,xs) -> (\(k,m,v) -> (m,v/(k-1))) $ foldl' go (2,x,0) xs
where
go (k,mk,qk) x = (k+1,mk',qk')
where
mk'=mk+(x-mk)/k
qk'=qk+(k-1)*(x-mk)*(x-mk)/k
-- | A wrapper around "meanAndVarianceInOnePass"
{-# INLINE varianceInOnePass #-}
varianceInOnePass :: (Foldable xs, Field (Elem xs)) => xs -> Elem xs
varianceInOnePass = snd . meanAndVarianceInOnePass
-- | Calculate the variance of each column, then sort so that the highest variance is first.
-- This can be useful for preprocessing data.
--
-- NOTE:
-- The git history has a lot of versions of this function with different levels of efficiency.
-- I need to write a blog post about how all the subtle haskellisms effect the runtime.
{-# INLINABLE mkShuffleMap #-}
mkShuffleMap :: forall v.
( FiniteModule v
, VectorSpace v
, Unboxable v
, Unboxable (Scalar v)
, Eq v
, Elem (SetElem v (Elem v)) ~ Elem v
, Elem (SetElem v (Scalar (Elem v))) ~ Scalar (Elem v)
, IxContainer (SetElem v (Elem v))
) => BArray v -> UArray Int
mkShuffleMap vs = {-# SCC mkShuffleMap #-} if size vs==0
then error "mkShuffleMap: called on empty array"
else runST ( do
-- FIXME:
-- @smalldata@ should be a random subsample of the data.
-- The size should also depend on the dimension.
let smalldata = P.take 1000 $ toList vs
-- let variances = fromList
-- $ values
-- $ varianceInOnePass
-- $ VG.map Componentwise vs
-- :: BArray (Scalar v)
let variances
= imap (\i _ -> varianceInOnePass $ (imap (\_ -> (!i)) smalldata))
$ values
$ vs!0
:: [Scalar v]
return
$ fromList
$ map fst
$ L.sortBy (\(_,v1) (_,v2) -> compare v2 v1)
$ imap (,)
$ variances
)
-- | apply the shufflemap to the data set to get a better ordering of the data
{-# INLINABLE apShuffleMap #-}
apShuffleMap :: forall v. FiniteModule v => UArray Int -> v -> v
apShuffleMap vmap v = unsafeToModule xs
where
xs :: [Scalar v]
xs = generate1 (size vmap) $ \i -> v!(vmap!i)
{-# INLINABLE generate1 #-}
generate1 :: (Monoid v, Constructible v) => Int -> (Int -> Elem v) -> v
generate1 n f = if n <= 0
then zero
else fromList1N n (f 0) (map f [1..n-1])
{-
FIXME:
All this needs to be reimplemented using the subhask interface.
This requires fixing some of the features of subhask's linear algebra system.
-- | translate a dataset so the mean is zero
{-# INLINABLE meanCenter #-}
meanCenter ::
( VG.Vector v1 (v2 a)
, VG.Vector v2 a
, Real a
) => v1 (v2 a) -> v1 (v2 a)
meanCenter dps = {-# SCC meanCenter #-} VG.map (\v -> VG.zipWith (-) v meanV) dps
where
meanV = {-# SCC meanV #-} VG.map (/ fromIntegral (VG.length dps)) $ VG.foldl1' (VG.zipWith (+)) dps
-- | rotates the data using the PCA transform
{-# INLINABLE rotatePCA #-}
rotatePCA ::
( VG.Vector container dp
, VG.Vector container [Float]
, VG.Vector v a
, dp ~ v a
, Show a
, a ~ Float
) => container dp -> container dp
rotatePCA dps' = {-# SCC rotatePCA #-} VG.map rotate dps
where
-- rotate dp = VG.convert $ LA.single $ eigm LA.<> LA.double (VG.convert dp :: VS.Vector Float)
rotate dp = {-# SCC convert #-} VG.convert $ LA.single $ (LA.trans eigm) LA.<> LA.double (VG.convert dp :: VS.Vector Float)
dps = meanCenter dps'
(eigv,eigm) = {-# SCC eigSH #-} LA.eigSH $ LA.double gramMatrix
-- gramMatrix = {-# SCC gramMatrix #-} gramMatrix_ $ map VG.convert $ VG.toList dps
-- gramMatrix = {-# SCC gramMatrix #-} LA.trans tmpm LA.<> tmpm
-- where
-- tmpm = LA.fromLists (VG.toList $ VG.map VG.toList dps)
gramMatrix = {-# SCC gramMatrix #-} foldl1' (P.+)
[ let dp' = VG.convert dp in LA.asColumn dp' LA.<> LA.asRow dp' | dp <- VG.toList dps ]
gramMatrix_ :: (Ring a, Storable a) => [VS.Vector a] -> LA.Matrix a
gramMatrix_ xs = runST ( do
let dim = VG.length (head xs)
m <- LA.newMatrix 0 dim dim
forM_ xs $ \x -> do
forM_ [0..dim-1] $ \i -> do
forM_ [0..dim-1] $ \j -> do
mij <- LA.unsafeReadMatrix m i j
LA.unsafeWriteMatrix m i j $ mij + (x `VG.unsafeIndex` i)*(x `VG.unsafeIndex` j)
LA.unsafeFreezeMatrix m
)
{-# INLINABLE rotatePCADouble #-}
-- | rotates the data using the PCA transform
rotatePCADouble ::
( VG.Vector container (v Double)
, VG.Vector container [Double]
, VG.Vector v Double
) => container (v Double) -> container (v Double)
rotatePCADouble dps' = VG.map rotate dps
where
rotate dp = VG.convert $ (LA.trans eigm) LA.<> (VG.convert dp :: VS.Vector Double)
dps = meanCenter dps'
(eigv,eigm) = LA.eigSH gramMatrix
gramMatrix = LA.trans tmpm LA.<> tmpm
where
tmpm = LA.fromLists (VG.toList $ VG.map VG.toList dps)
-}
| mikeizbicki/HLearn | src/HLearn/Data/LoadData.hs | bsd-3-clause | 12,398 | 0 | 24 | 3,387 | 2,149 | 1,107 | 1,042 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Passive Scan Rules | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/pscanrules/src/main/javahelp/org/zaproxy/zap/extension/pscanrules/resources/help_ko_KR/helpset_ko_KR.hs | apache-2.0 | 979 | 78 | 66 | 160 | 415 | 210 | 205 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>TreeTools</title>
<maps>
<homeID>treetools</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/treetools/src/main/javahelp/help_id_ID/helpset_id_ID.hs | apache-2.0 | 960 | 77 | 66 | 155 | 404 | 205 | 199 | -1 | -1 |
import Test.Cabal.Prelude
-- Test build-tool-depends between two packages
main = cabalTest $ do
cabal "new-build" ["client"]
| mydaum/cabal | cabal-testsuite/PackageTests/BuildToolDepends/setup.test.hs | bsd-3-clause | 129 | 0 | 9 | 20 | 28 | 15 | 13 | 3 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UnboxedSums #-}
module T12478_5 where
import Language.Haskell.TH
foo :: $(conT (unboxedSumTypeName 2) `appT` conT ''() `appT` conT ''())
-> $(conT (unboxedSumTypeName 2) `appT` conT ''() `appT` conT ''())
foo $(conP (unboxedSumDataName 1 2) [conP '() []])
= $(conE (unboxedSumDataName 2 2) `appE` conE '())
foo $(conP (unboxedSumDataName 2 2) [conP '() []])
= $(conE (unboxedSumDataName 2 2) `appE` conE '())
foo2 :: (# () | () #)
-> $(conT (unboxedSumTypeName 2) `appT` conT ''() `appT` conT ''())
foo2 (# () | #) = $(conE (unboxedSumDataName 2 2) `appE` conE '())
foo2 $(conP (unboxedSumDataName 2 2) [conP '() []]) = (# | () #)
| olsner/ghc | testsuite/tests/th/T12478_5.hs | bsd-3-clause | 693 | 0 | 12 | 125 | 369 | 199 | 170 | 14 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Tc251_Help where
class Cls a where
type Fam a :: *
type Fam a = Maybe a
| wxwxwwxxx/ghc | testsuite/tests/typecheck/should_compile/Tc251_Help.hs | bsd-3-clause | 118 | 0 | 7 | 31 | 33 | 19 | 14 | 5 | 0 |
{-# LANGUAGE TypeFamilies, FunctionalDependencies, RankNTypes, MultiParamTypeClasses #-}
module T4254 where
class FD a b | a -> b where
op :: a -> b;
op = undefined
instance FD Int Bool
ok1 :: forall a b. (a~Int,FD a b) => a -> b
ok1 = op
-- Should be OK: op has the right type
ok2 :: forall a b. (a~Int,FD a b,b~Bool) => a -> Bool
ok2 = op
-- Should be OK: needs the b~Bool
fails :: forall a b. (a~Int,FD a b) => a -> Bool
fails = op
-- Could fail: no proof that b~Bool
-- But can also succeed; it's not a *wanted* constraint
| wxwxwwxxx/ghc | testsuite/tests/indexed-types/should_fail/T4254.hs | bsd-3-clause | 551 | 0 | 8 | 133 | 175 | 100 | 75 | 12 | 1 |
{-# LANGUAGE DeriveFunctor #-}
module T9071_2 where
newtype Mu f = Mu (f (Mu f))
newtype K1 a b = K1 a
newtype F1 a = F1 (Mu (K1 a)) deriving Functor
| gcampax/ghc | testsuite/tests/deriving/should_fail/T9071_2.hs | bsd-3-clause | 152 | 0 | 9 | 34 | 60 | 37 | 23 | 5 | 0 |
module Math where
import Foreign.C
floor' :: Double -> Double
floor' = realToFrac . c_floor . realToFrac
{-# INLINE floor' #-}
round' :: Double -> Double
round' = realToFrac . c_round . realToFrac
{-# INLINE round' #-}
ceil' :: Double -> Double
ceil' = realToFrac . c_ceil . realToFrac
{-# INLINE ceil' #-}
log1p :: Double -> Double
log1p = realToFrac . c_log1p . realToFrac
{-# INLINE log1p #-}
foreign import ccall unsafe "math.h floor" c_floor :: CDouble -> CDouble
foreign import ccall unsafe "math.h ceil" c_ceil :: CDouble -> CDouble
foreign import ccall unsafe "math.h round" c_round :: CDouble -> CDouble
foreign import ccall unsafe "math.h log1p" c_log1p :: CDouble -> CDouble
| glguy/CookieCalculator | src/Math.hs | isc | 695 | 0 | 6 | 122 | 173 | 98 | 75 | 18 | 1 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
module Main where
import Control.Auto
import Control.Auto.Blip
import Control.Auto.Blip.Internal
import Control.Auto.Collection
import Control.Auto.Core
import Control.Auto.Effects
import Control.Auto.Interval
import Control.Auto.Process.Random
import Control.Auto.Run
import Control.Lens
import Control.Monad.Fix
import Control.Monad.Random
import Control.Monad.Reader hiding (forM_, mapM_)
import Control.Monad.Writer hiding ((<>), forM_, mapM_)
import Data.Foldable
import Data.IntMap.Strict (IntMap, Key)
import Data.List (sortBy)
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Ord
import Data.Serialize
import Debug.Trace
import GHC.Generics hiding (to)
import Linear hiding (ei, trace)
import Prelude hiding ((.), id, elem, any, sequence, concatMap, sum, concat, sequence_, mapM_)
import System.Console.ANSI
import System.IO
import Util
import qualified Data.IntMap.Strict as IM
import qualified Data.Map.Strict as M
-- | Types for commands, entities, inputs, outputs, etc.
-- direction
data Dir = DUp | DRight | DDown | DLeft
deriving (Show, Eq, Enum, Ord, Read, Generic)
-- an action to perform
data Action = Sword
| Bow
| Bomb
| Wall
deriving (Show, Eq, Enum, Ord, Read, Generic)
-- an item to use
data Item = Potion
deriving (Show, Eq, Enum, Ord, Read, Generic)
-- something you can pick up
data Pickup = PUArrows
| PUGunpowder
| PUCement
| PUPotion Double
| PUGold Int
deriving (Show, Eq, Ord, Read, Generic)
-- a command from the outside world/interface
data Cmd = CMove Dir
| CAct Action Dir
| CUse Item
| CNop
deriving (Show, Eq, Ord, Read, Generic)
-- a way an Entity can respond to the world
data EntResp = ERAtk Double Point -- attack with damage at relative position
| ERShoot Double Int Dir -- shoot with damage and range in direction
| ERBomb Dir -- bomb in direction
| ERBuild Dir -- build in direction
| ERFire Double Int Point -- start a fire with damage and duration in relative position
| ERMonster Char Double Double Point -- create a monster with sprite with health
-- and damage and absolute position
| ERItem Pickup Point -- place an item with pickup at absolute position
| ERGive Key Pickup -- give an entity with key/id a pickup
deriving (Show, Eq, Ord, Read, Generic)
-- communications an Entity can receive, from another
data EntComm = ECAtk Double -- attack with damage
| ECGive Pickup -- give pickup
deriving (Show, Eq, Ord, Read, Generic)
-- an entity existing on the map
data Entity = EPlayer
| EBomb
| EWall
| EFire
| EMonster Char
| EItem Pickup
deriving (Show, Eq, Ord, Read, Generic)
-- input for an Entity auto
data EntityInput = EI { _eiPos :: Point -- new position
, _eiComm :: [(Key, EntComm)] -- communications, from id's
, _eiWorld :: EntityMap -- a map of the world
} deriving (Show, Eq, Ord, Read, Generic)
-- output for an Entity auto
data EntityOutput a = EO { _eoData :: Maybe a -- extra data; Nothing if none.
, _eoPos :: Point -- position to move from
, _eoMove :: Point -- move
, _eoEntity :: Entity -- the entity
, _eoReact :: Map Entity Double -- "how this would react" when
-- encountering various entities;
-- how much damage it would attack with
, _eoResps :: Maybe [EntResp] -- lists of responses to the world.
-- Nothing if *dead*
} deriving (Show, Eq, Ord, Read, Generic)
-- output type from the player to the gui/frontend
data PlayerOut = PO { _poMessages :: [OutMessage] -- status messages
, _poHealth :: Double -- health
, _poInventory :: Inventory -- inventory
, _poKills :: Int -- kill count
} deriving (Show, Eq, Ord, Read, Generic)
-- player inventory, for purpose of PlayerOut rendering. not actaully used
-- for the actual inventory updating of the player itself.
data Inventory = Inv { _invArrows :: Int
, _invGunpowder :: Int
, _invCement :: Int
, _invGold :: Int
} deriving (Show, Eq, Ord, Read, Generic)
-- a status message to the outside world
data OutMessage = OMAtk Entity Entity Double -- attack from to damage
| OMShot Entity Entity Double -- shot from to damage
| OMMiss Entity -- shot missed by entity
| OMDeath Entity -- entity dies
| OMPickup Entity Pickup -- entity picked up picup
deriving (Show, Eq, Ord, Read, Generic)
type Point = V2 Int
type GameMap = Map Point [Entity]
type EntityMap = IntMap (Point, Entity)
instance Serialize EntResp
instance Serialize EntComm
instance Serialize Dir
instance Serialize Pickup
instance Serialize Entity
instance Serialize EntityInput
instance Serialize a => Serialize (EntityOutput a)
instance Serialize Cmd
instance Serialize Item
instance Serialize Action
instance Serialize Inventory
instance Serialize PlayerOut
instance Serialize OutMessage
instance Semigroup EntityInput where
EI p0 c0 w0 <> EI p1 c1 w1 = EI (p0 `v` p1) (c0 ++ c1) (w0 <> w1) -- watch out, is (<>) right here?
where
v y (V2 (-1) (-1)) = y -- yeah this might not work
v _ y = y
instance Monoid EntityInput where
mempty = EI (V2 (-1) (-1)) mempty mempty
mappend = (<>)
instance Semigroup Cmd where
x <> CNop = x
_ <> x = x
instance Monoid Cmd where
mempty = CNop
mappend x CNop = x
mappend _ x = x
instance Semigroup PlayerOut where
PO m1 h1 i1 k1 <> PO m2 h2 i2 k2 = PO (m1 ++ m2) (h1 <#> h2) (i1 <> i2) (k1 <#> k2)
where
x <#> (-1) = x
_ <#> y = y
instance Monoid PlayerOut where
mempty = PO [] (-1) mempty (-1)
mappend = (<>)
instance Semigroup Inventory where
Inv a1 g1 c1 r1 <> Inv a2 g2 c2 r2 = Inv (a1 <#> a2) (g1 <#> g2) (c1 <#> c2) (r1 <#> r2)
where
x <#> (-1) = x
_ <#> y = y
instance Monoid Inventory where
mempty = Inv (-1) (-1) (-1) (-1)
mappend = (<>)
makePrisms ''Cmd
makePrisms ''Pickup
makePrisms ''Item
makePrisms ''EntResp
makePrisms ''EntComm
makePrisms ''Entity
makeLenses ''EntityInput
makeLenses ''EntityOutput
makeLenses ''PlayerOut
makeLenses ''Inventory
-- | Utility functions
mapSize :: V2 Int
mapSize = V2 70 20
startPos :: V2 Int
startPos = (`div` 2) <$> mapSize
initialPO :: PlayerOut
initialPO = PO [] initialHealth initialInv 0
initialInv :: Inventory
initialInv = Inv 50 10 30 0
initialHealth :: Double
initialHealth = 50
dirToV2 :: Dir -> V2 Int
dirToV2 dir = case dir of
DUp -> V2 0 1
DRight -> V2 1 0
DDown -> V2 0 (-1)
DLeft -> V2 (-1) 0
v2ToDir :: V2 Int -> Maybe Dir
v2ToDir v2 = case v2 of
V2 0 1 -> Just DUp
V2 1 0 -> Just DRight
V2 0 (-1) -> Just DDown
V2 (-1) 0 -> Just DLeft
_ -> Nothing
-- | Entity `Auto`s
--
bomb :: Monad m
=> Dir
-> Interval m EntityInput (EntityOutput a)
bomb dir = proc ei -> do
-- move constantly
motion <- fromInterval zero . onFor 8 . pure (dirToV2 dir) -< ()
-- damage received
let damage = sumOf (eiComm . traverse . _2 . _ECAtk) ei
-- trigger: explosion from damage; fuse: explosion from timeout
trigger <- became (<= 0) . sumFrom 2 -< negate damage
fuse <- inB 10 -< 0
-- explode when either `trigger` or `fuse` emit
let explode = explodes <$ (fuse `mergeL` trigger)
explosion <- fromBlips [] -< explode
-- act like the EntityOutput until explosion; then just be on for 1.
before -?> lmap fst (onFor 1) -< (EO Nothing (_eiPos ei) motion EBomb M.empty (Just explosion), explode)
where
explodes = do
x <- [-3..3]
y <- [-3..3]
let r = sqrt (fromIntegral x**2 + fromIntegral y**2) :: Double
guard $ r <= 3
let dur | r < 1 = 2
| r < 2 = 1
| otherwise = 1
str | r < 1 = 16
| r < 2 = 8
| r < 3 = 2
| otherwise = 1
return $ ERFire str dur (V2 x y)
-- immediately just attack everything and die.
fire :: Monad m
=> Double
-> Int
-> Interval m EntityInput (EntityOutput a)
fire str dur = lmap (\ei -> EO Nothing (_eiPos ei) zero EFire M.empty (Just [ERAtk str zero])) (onFor dur)
-- just sit there and do nothing.
wall :: Monad m
=> Auto m EntityInput (EntityOutput a)
wall = arr $ \ei -> EO Nothing (_eiPos ei) zero EWall M.empty (Just [])
-- sit there and do nothing, but when the player steps on you, send them an
-- `ERGive` response.
itemPu :: Monad m => Pickup -> Point -> Interval m EntityInput (EntityOutput (Double, a))
itemPu pu p0 = proc ei -> do
pos <- onFor 1 . pure p0 <|!> id -< _eiPos ei -- ignore first ei
let pPos = preview (eiWorld . ix (-1) . _1) ei
pickedB <- emitOn (uncurry (==)) -< (Just pos, pPos)
picked <- fromBlips [] -< [ERGive (-1) pu] <$ pickedB
let eOut = EO Nothing pos zero (EItem pu) M.empty (Just picked)
naturalDeath <- inB 200 -< ()
before -?> dead -< (eOut, (() <$ pickedB) <> naturalDeath)
where
dead = lmap fst (onFor 1) -?> lmap (set eoResps Nothing . fst) (onFor 1)
-- take an 'Auto' that never dies, and imbues it with health and death.
-- teaches an 'Auto' how to die.
withHealth :: MonadWriter ([OutMessage], Sum Int) m
=> Double
-> Auto m EntityInput (EntityOutput (Double, a))
-> Interval m EntityInput (EntityOutput (Double, a))
withHealth h0 entA = proc ei -> do
eOut <- entA -< ei
let damage = sumOf (eiComm . traverse . _2 . _ECAtk) ei
health <- sumFrom h0 -< negate damage
-- set the EntityOutput data field to be its health
let eOut' = set (eoData . _Just . _1) (max 0 health) eOut
die <- became (<= 0) -< health
-- send a mesage if a monster dies
if has (eoEntity . _EMonster) eOut
then arrMB tell -< ([OMDeath (_eoEntity eOut)], 1) <$ die
else never -< ()
-- send a message if the player dies
if has (eoEntity . _EPlayer) eOut
then arrMB (tell . (,mempty)) -< [OMDeath (_eoEntity eOut)] <$ die
else never -< ()
before -?> dead -< (eOut' , die)
where
dead = lmap (set eoResps Nothing . fst) (onFor 1)
-- the player. move around, send out attacks, pick up recharges, drain
-- inventory....
player :: MonadReader Cmd m -- environment is the current command
=> Auto m EntityInput (EntityOutput (Double, Inventory))
player = proc (EI p comm _) -> do
inp <- effect ask -< ()
move <- fromBlips zero
. modifyBlips dirToV2
. emitJusts (preview _CMove) -< inp
resps <- fromBlipsWith [] (:[])
. modifyBlips toResp
. emitJusts (preview _CAct) -< inp
arrowUsage <- emitJusts $ preview (traverse . _ERShoot) -< resps
gunpowderUsage <- emitJusts $ preview (traverse . _ERBomb) -< resps
cementUsage <- emitJusts $ preview (traverse . _ERBuild) -< resps
getArrow <- emitOn (> 0) -< length . toListOf (traverse . _2 . _ECGive . _PUArrows) $ comm
getGunpowder <- emitOn (> 0) -< length . toListOf (traverse . _2 . _ECGive . _PUGunpowder) $ comm
getCement <- emitOn (> 0) -< length . toListOf (traverse . _2 . _ECGive . _PUCement) $ comm
arrows <- scanPos (_invArrows initialInv) -< merge (+) ((-1) <$ arrowUsage) (15 <$ getArrow)
gunpowder <- scanPos (_invGunpowder initialInv) -< merge (+) ((-1) <$ gunpowderUsage) ( 5 <$ getGunpowder)
cement <- scanPos (_invCement initialInv) -< merge (+) ((-1) <$ cementUsage) (15 <$ getCement)
gold <- sumFrom 0 -< sumOf (traverse . _2 . _ECGive . _PUGold) comm
let resps' = filter (enough arrows gunpowder cement) resps
id -< EO (Just (initialHealth, Inv arrows gunpowder cement gold)) p move EPlayer atkMap (Just resps')
where
toResp :: (Action, Dir) -> EntResp
toResp (u,d) = case u of
Sword -> ERAtk 4 (dirToV2 d)
Bow -> ERShoot 1 15 d
Bomb -> ERBomb d
Wall -> ERBuild d
atkMap = M.fromList . map (,4) $ [EWall, EMonster 'Z', EBomb]
scanPos = scanB (\x y -> max 0 (x + y))
enough ar gp cm resp = case resp of
ERAtk {} -> True
ERShoot {} -> ar > 0
ERBomb {} -> gp > 0
ERBuild {} -> cm > 0
_ -> True
-- move towards the player if it exists, or move around randomly if not.
monster :: MonadRandom m
=> Char
-> Double
-> Auto m EntityInput (EntityOutput a)
monster c damg = proc ei -> do
let pPos = ei ^? eiWorld . traverse . filtered (has (_2 . _EPlayer)) . _1
mPos = _eiPos ei
delta = (^-^ mPos) <$> pPos
moves = flip fmap delta $ \(V2 dx dy) ->
let adx = abs dx
ady = abs dy
in (V2 (signum dx) 0 <$ guard (adx /= 0))
<|> (V2 0 (signum dy) <$ guard (ady /= 0))
move <- during (arrM uniform) -< moves
wander <- effect (dirToV2 `liftM` uniform [DUp ..]) -< ()
let move' = fromMaybe wander move
id -< EO Nothing mPos move' (EMonster c) atkMap (Just [])
where
atkMap = M.fromList . map (,damg) $ [EPlayer, EWall, EBomb]
-- the main game loop
game :: MonadFix m
=> StdGen
-> Auto m Cmd (PlayerOut, GameMap)
game g = proc inp -> do
-- run game', get the outputs, , count kills, save the last output,
-- output to the client.
(((eo, _), gm), (msgs, newKills)) <- game' -< inp
kills <- sumFrom 0 -< getSum newKills
lastEoDat <- holdJusts
<|!> pure (initialHealth, initialInv) -< _eoData =<< eo
let (hlth, inv) = lastEoDat
let po = PO msgs hlth inv kills
id -< (po, gm)
where
-- run the Writer and the Random over 'bracketA playerA worldA'
-- "bracketA" runs player, then world, then player, so that the player
-- gets a chance to "clean up".
-- bracketA :: Auto m (Either a b) c -> Auto m c b -> Auto m a c runs
-- the first on the `a` Right input, feeds the `c` into the second,
-- runs the `b` output onto the first's Left channel, and outputs the
-- final `c`.
game' = runWriterA (sealRandomStd (bracketA playerA worldA) g)
playerA :: (MonadFix m, MonadWriter ([OutMessage], Sum Int) m)
=> Auto m (Either Cmd EntityInput)
( ( Maybe (EntityOutput (Double, Inventory))
, IntMap EntityInput
)
, GameMap
)
-- manage the player input and wrapping the `player` Auto
playerA = proc inp -> do
-- last received world is the last world received from `Right`
lastWorld <- holdWith IM.empty . emitJusts (preview (_Right . eiWorld)) -< inp
rec lastPos <- delay startPos -< currPos
-- new entity input for player
let ei = set eiPos lastPos . either (const mempty) id $ inp
-- run it through player', with the input
pEo <- player' -< (ei, either id (const CNop) inp)
-- generate the resulting entity inputs for everyone else, and
-- messages
let (pEis, msgs) = IM.mapAccumWithKey (mkEntIns lastWorld) IM.empty $ maybe IM.empty (IM.singleton (-1)) pEo
-- keep the current position; move when the player intputs ask
-- the player to move
currPos <- holdWith startPos . emitJusts (preview (ix (-1) . eiPos)) -< pEis
-- log the messages; messages are ([OutMessage], Sum Int) (kill count)
arrM (tell . (,mempty)) -< toListOf (traverse . traverse) msgs
let outEo = set (_Just . eoPos) currPos pEo
outEi = IM.delete (-1) pEis
outGm = either (const M.empty) (mkGMap lastPos . _eiWorld) inp
id -< ((outEo, outEi), outGm)
where
-- imbue position, health, and take an extra parameter as the
-- Reader environment
player' = runReaderA . booster startPos . withHealth initialHealth $ player
mkGMap p = M.fromListWith (<>)
. IM.elems
. (fmap . second) (:[])
. IM.insert (-1) (p, EPlayer)
-- the rest of the world
worldA :: (MonadFix m, MonadWriter ([OutMessage], Sum Int) m, MonadRandom m)
=> Auto m ( ( Maybe (EntityOutput (Double, a))
, IntMap EntityInput
), GameMap
)
EntityInput
worldA = proc ((pEo, pEis), _) -> do
-- make things... monsters and items
mkMonsters <- makeMonsters 25 -< ()
mkItems <- makeItems 15 -< ()
-- run all of the entities on all of the inputs, using dynMapF
rec entOuts <- dynMapF makeEntity mempty -< ( -- inputs from player and inputs from entities
IM.unionWith (<>) pEis entInsD'
-- make-new-entity events from everywhere
, newEntsBAll <> mkMonsters <> mkItems
)
-- only alive
let entOutsAlive = IM.filter (has (eoResps . _Just)) entOuts
-- alive + player entity output
entOutsFull = maybe entOutsAlive (\po -> IM.insert (-1) po entOutsAlive) pEo
-- map of all locations and entities
entMap = (_eoPos &&& _eoEntity) <$> entOutsFull
-- generate new entity inputs from the entity outputs
(entIns,msgs) = IM.mapAccumWithKey (mkEntIns entMap) IM.empty entOutsAlive
-- update entity maps
entMap' = maybe id (\po -> IM.insert (-1) (_eoPos po, EPlayer)) pEo
. flip IM.mapMaybeWithKey entIns $ \k ei -> do
eo <- IM.lookup k entOutsFull
return (_eiPos ei, _eoEntity eo)
entIns' = flip IM.mapWithKey entIns $ \k -> set eiWorld (IM.delete k entMap')
-- new entities, to send in as blip stream
newEnts = toList entOutsAlive >>= \(EO _ p _ _ _ ers) -> maybe [] (map (p,)) ers
-- EntResps from player
plrEResps = toListOf (_Just . eoResps . _Just . traverse) pEo
plrEResps' = case pEo of
Nothing -> []
Just po -> (_eoPos po,) <$> plrEResps
-- emit all non-empty newEnts, from "last cycle"
newEntsB <- lagBlips . emitOn (not . null) -< newEnts
-- all entity inputs from last cycle, to send into `entOuts`
entInsD <- delay IM.empty -< entIns'
-- add in the player entity to the world maps
let entInsD' = case pEo of
Just po -> over (traverse . eiWorld) (IM.insert (-1) (_eoPos po, EPlayer)) entInsD
Nothing -> entInsD
playerB <- emitOn (not . null) -< plrEResps'
let newEntsBAll = newEntsB <> playerB
-- write messages to log; messages are ([OutMessage], Sum Int) (kill count)
arrM (tell . (,mempty)) -< toListOf (traverse . traverse) msgs
id -< set eiWorld (IM.delete (-1) entMap') . IM.findWithDefault mempty (-1) $ entIns'
where
makeMonsters :: MonadRandom m => Int -> Auto m a (Blip [(Point, EntResp)])
makeMonsters n = onFor 500 . perBlip makeMonster . every n
--> makeMonsters ((n * 3) `div` 4)
makeMonster :: MonadRandom m => Auto m a [(Point, EntResp)]
makeMonster = liftA2 (\x y -> [(zero, ERMonster 'Z' 5 5 (shift (V2 x y)))])
(effect (getRandomR (0, view _x mapSize `div` 2)))
(effect (getRandomR (0, view _y mapSize `div` 2)))
where
shift = liftA2 (\m x -> (x - (m `div` 4)) `mod` m) mapSize
makeItems :: MonadRandom m => Double -> Auto m a (Blip [(Point, EntResp)])
makeItems r = perBlip makeItem . bernoulliMR (1/r)
where
makeItem = liftA3 (\x y i -> [(zero, ERItem i (shift (V2 x y)))])
(effect (getRandomR (0, 2 * view _x mapSize `div` 3)))
(effect (getRandomR (0, 2 * view _y mapSize `div` 3)))
(effect randomItem)
shift = liftA2 (\m x -> (x + (m `div` 6))) mapSize
randomItem = do
x <- fromList [ (PUArrows, 1.5)
, (PUGunpowder, 1)
, (PUCement, 1)
, (PUPotion 0, 1)
, (PUGold 0, 1)
]
case x of
PUGold _ -> PUGold `liftM` getRandomR (5,15)
PUPotion _ -> PUPotion `liftM` getRandomR (10,40)
_ -> return x
-- start off at give position
booster p0 a = (onFor 1 . arr (set (_Just . eoPos) p0) --> id) . a
-- generating entity inputs from entity outputs of last round. kinda
-- complicated, but this is the beef of the game logic, having every
-- entity communicate with every other one. run using
-- `IM.mapAccumWithKey`
mkEntIns :: EntityMap -- world map
-> IntMap EntityInput -- current "output" map, in-progress
-> Key -- key of this processed entity
-> EntityOutput a -- entity output of this processed entity
-> (IntMap EntityInput, [OutMessage]) -- updated "output" map, and also communications
mkEntIns em eis k (EO _ pos0 mv e react (Just resps)) = (IM.insertWith (<>) k res withGives, messages)
where
em' = IM.delete k em
pos1 = pos0 ^+^ mv
oldCols = IM.mapMaybe (\(p,e') -> e' <$ guard (p == pos1)) em'
newCols = flip IM.mapMaybeWithKey eis $ \k' ei -> do
guard (_eiPos ei == pos1)
snd <$> IM.lookup k' em'
allCols = oldCols <> newCols
pos2 | any isBlocking allCols = pos0
| otherwise = clamp pos1 -- could be short circuited here, really...
colAtks = flip IM.mapMaybe allCols $ \e' -> do
d <- M.lookup e' react
return (over eiComm ((k, ECAtk d):) mempty, [OMAtk e e' d])
respAtks = IM.unionsWith (<>) . flip mapMaybe resps $ \r ->
case r of
ERAtk a _ ->
let placed = place pos2 r
oldHits = snd <$> IM.filter (\(p,_) -> placed == p) em'
newHits = flip IM.mapMaybeWithKey eis $ \k' ei -> do
guard (placed == _eiPos ei)
snd <$> IM.lookup k' em
allHits = oldHits <> newHits
in Just $ (\e' -> (set eiComm [(k, ECAtk a)] mempty, [OMAtk e e' a])) <$> allHits
ERShoot a rg d -> -- TODO: drop when miss
let rg' = fromIntegral rg
oldHits = flip IM.mapMaybe em' $ \(p, e') -> do
guard $ arrowHit e'
dst <- aligned pos2 p d
dst <$ guard (dst <= rg')
newHits = flip IM.mapMaybeWithKey eis $ \k' ei -> do
guard $ arrowHit (snd (em IM.! k'))
dst <- aligned pos2 (_eiPos ei) d
dst <$ guard (dst <= rg')
allHits = oldHits <> newHits
minHit = fst . minimumBy (comparing snd) $ IM.toList allHits
in Just $ if IM.null allHits
then IM.singleton k (mempty, [OMMiss e])
else IM.singleton minHit (set eiComm [(k, ECAtk a)] mempty, [OMShot e (snd (em IM.! minHit)) a])
_ ->
Nothing
respGives = IM.unionsWith (<>) . flip mapMaybe resps $ \r ->
case r of
ERGive k' pu -> Just $ IM.singleton k' (set eiComm [(k, ECGive pu)] mempty, [OMPickup (snd (em IM.! k')) pu])
_ -> Nothing
allAtks = colAtks <> respAtks
messages = toListOf (traverse . traverse)
$ IM.unionWith (<>) (snd <$> allAtks) (snd <$> respGives)
withAtks = IM.unionWith (<>) (fst <$> IM.delete k allAtks) eis
withGives = IM.unionWith (<>) (fst <$> respGives) withAtks
res = EI pos2 [] em'
isBlocking ent = case ent of
EPlayer -> True
EWall -> True
EBomb -> True
EFire -> False
EMonster _ -> True
EItem _ -> False
aligned :: Point -> Point -> Dir -> Maybe Double
aligned p0 p1 dir = norm r <$ guard (abs (dotted - 1) < 0.001)
where
r = fmap fromIntegral (p1 - p0) :: V2 Double
rUnit = normalize r
dotted = rUnit `dot` fmap fromIntegral (dirToV2 dir)
arrowHit :: Entity -> Bool
arrowHit ent = case ent of
EPlayer -> True
EWall -> False
EBomb -> True
EFire -> False
EMonster _ -> True
EItem _ -> False
mkEntIns _ eis _ _ = (eis, [])
clamp = liftA3 (\mn mx -> max mn . min mx) (V2 0 0) mapSize
-- make entity from EntResp
makeEntity :: (MonadRandom m, MonadWriter ([OutMessage], Sum Int) m)
=> (Point, EntResp)
-> Interval m EntityInput (EntityOutput (Double, a))
makeEntity (p, er) = case er of
ERBomb dir -> booster placed $ bomb dir
ERBuild {} -> booster placed . withHealth 50 $ wall
ERMonster c h d _ -> booster placed . withHealth h $ monster c d
ERFire s d _ -> booster placed $ fire s d
ERItem pu pos -> itemPu pu pos
ERAtk {} -> off
ERShoot {} -> off
ERGive {} -> off
where
placed = place p er
-- where to place entity, given initial point and resp?
place :: Point -> EntResp -> Point
place p er = case er of
ERAtk _ disp -> p ^+^ disp
ERBomb {} -> p
ERBuild dir -> p ^+^ dirToV2 dir
ERShoot _ _ dir -> p ^+^ dirToV2 dir
ERFire _ _ d -> p ^+^ d
ERMonster _ _ _ p' -> p'
ERItem _ p' -> p'
ERGive {} -> zero
-- handle command stream
handleCmd :: (Serialize b, Monoid b, Monad m)
=> Auto m Cmd b
-> Auto m (Maybe Cmd) b
handleCmd a0 = holdWith mempty . perBlip a0 . onJusts
-- render the board
renderBoard :: (PlayerOut, GameMap) -> String
renderBoard (PO msgs ph (Inv ar gp cm gd) k, mp) =
unlines . concat $ [ map renderOM msgs
, "You dead!" <$ guard (ph <= 0)
, ["[1] Sword\t[2] Bow (" ++ show ar ++ ")\t[3] Bomb (" ++ show gp ++ ")\t[4] Wall (" ++ show cm ++ ")"]
, mapOut
, ["Health: " ++ show (round ph :: Int) ++ "\tKills: " ++ show k ++ "\tGold: " ++ show gd]
]
where
renderOM om = case om of
OMAtk e1 e2 d -> [entChr e1] ++ " attacked " ++ [entChr e2] ++ " for " ++ show d ++ " HP"
OMShot e1 e2 d -> [entChr e1] ++ " shot " ++ [entChr e2] ++ " for " ++ show d ++ " HP"
OMMiss e1 -> "Shot from " ++ [entChr e1] ++ " missed!"
OMDeath e1 -> [entChr e1] ++ " died"
OMPickup e1 pu -> [entChr e1] ++ " picked up " ++ showPu pu
mapOut = reverse [[ charAt x y | x <- [0..xMax] ] | y <- [0..yMax]]
charAt x y = fromMaybe '.' $ do
es <- M.lookup (V2 x y) mp
let es' | ph > 0 = es
| otherwise = filter (/= EPlayer) es
fmap entChr . listToMaybe . sortBy (comparing entPri) $ es'
xMax = view _x mapSize
yMax = view _y mapSize
entChr e = case e of
EPlayer -> '@'
EBomb -> 'o'
EWall -> '#'
EFire -> '"'
EMonster c -> c
EItem pu -> puChr pu
entPri e = case e of
EPlayer -> 0 :: Int
EFire -> 1
EMonster _ -> 2
EBomb -> 4
EItem _ -> 5
EWall -> 6
puChr pu = case pu of
PUArrows -> '>'
PUGunpowder -> '%'
PUCement -> '='
PUPotion _ -> '?'
PUGold _ -> '*'
showPu pu = case pu of
PUArrows -> "arrows"
PUGunpowder -> "gunpowder"
PUCement -> "cement"
PUPotion _ -> "an unimplemented potion"
PUGold amt -> show amt ++ " gold"
-- primitive command parser
parseCmd :: Auto m Char (Blip (Maybe Cmd))
parseCmd = go Nothing
where
go Nothing = mkAuto_ $ \x -> case x of
'h' -> (Blip (Just (CMove DLeft )) , go Nothing )
'j' -> (Blip (Just (CMove DDown )) , go Nothing )
'k' -> (Blip (Just (CMove DUp )) , go Nothing )
'l' -> (Blip (Just (CMove DRight)) , go Nothing )
'5' -> (Blip (Just (CUse Potion )) , go Nothing )
' ' -> (Blip (Just CNop) , go Nothing )
'1' -> (NoBlip , go (Just Sword))
'2' -> (NoBlip , go (Just Bow ))
'3' -> (NoBlip , go (Just Bomb ))
'4' -> (NoBlip , go (Just Wall ))
_ -> (Blip Nothing , go Nothing )
go (Just u) = mkAuto_ $ \x -> case x of
'h' -> (Blip (Just (CAct u DLeft )), go Nothing )
'j' -> (Blip (Just (CAct u DDown )), go Nothing )
'k' -> (Blip (Just (CAct u DUp )), go Nothing )
'l' -> (Blip (Just (CAct u DRight)), go Nothing )
_ -> (Blip Nothing , go Nothing )
main :: IO ()
main = do
g <- newStdGen
hSetBuffering stdin NoBuffering
renderStdout (initialPO, M.singleton startPos [EPlayer])
_ <- runM generalize getChar process $ hold
. perBlip (handleCmd (game g))
. parseCmd
return ()
where
renderStdout mp = do
clearScreen
putStrLn ""
putStrLn (renderBoard mp)
process mp' = do
mapM_ renderStdout mp'
Just <$> getChar
-- turn Identity into IO
generalize :: Monad m => Identity a -> m a
generalize = return . runIdentity
| mstksg/auto-examples | src/Experimental/Survive.hs | mit | 33,328 | 8 | 27 | 13,025 | 9,950 | 5,136 | 4,814 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
module XmppDaemon.Stun where
import Control.Applicative
import Control.Concurrent.STM
import qualified Control.Exception as Ex
import Control.Monad
import qualified Data.ByteString as BS
import qualified Data.IP as IP
import Data.Maybe
import Data.Monoid
import qualified Data.Text as Text
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Data.Typeable (Typeable)
import Data.XML.Pickle
import Data.XML.Types
import Network
import Network.DNS
import Network.Socket
import Network.Stun
import Network.Xmpp
import System.Log.Logger
data GetStunError = GetStunError String
deriving (Show, Typeable)
instance Ex.Exception GetStunError
getReflAddr :: HostName -> Maybe PortNumber -> IO (Either Text.Text Text.Text)
getReflAddr stunServer mbPn = do
rs <- makeResolvSeed defaultResolvConf
r <- withResolver rs $ \resolver -> do
let domain = Text.encodeUtf8 $ Text.pack stunServer
srvDomain = "_stun._udp." <> domain <> "."
srv' <- fromEither =<< lookupSRV resolver srvDomain
srv <- if (null srv')
then do
infoM "Pontarius.Xmpp.Daemon.Stun" $
"Did not find any SRV entries for " ++ show srvDomain
return [(fromMaybe 3478 mbPn, domain)]
else
return $ (\(_,_,p,dm) -> (fromIntegral p,dm)) <$> srv'
tryAll srv $ \(p, dom) -> do
infoM "Pontarius.Xmpp.Daemon.Stun" $
"Looking up " ++ show dom
as <- fromEither =<< lookupA resolver dom
aaaas <- fromEither =<< lookupAAAA resolver dom
let addrs = (toSockAddr p <$> as) ++ (toSockAddr6 p <$> aaaas)
case addrs of
[] -> return $ Left (GetStunError "Neither A nor AAAA record \
\returned a result")
addrs' -> tryAll addrs' $ \addr -> do
infoM "Pontarius.Xmpp.Daemon.Stun" $
"Sending STUN request to " ++ show addr
findMappedAddress addr 0 []
return $ mapLeft (Text.pack . show) $ for r $ \x -> Text.pack $ case fst x of
SockAddrInet{} -> takeWhile (/= ':') $ show r
SockAddrInet6{} -> tail . takeWhile (/= ']') $ show r
r' -> show r'
where
tryAll :: Show e => [a] -> (a -> IO (Either e b)) -> IO (Either GetStunError b)
tryAll [] f = return . Left . GetStunError
$ "Error: Could not connect to host " ++ stunServer
tryAll (x:xs) f = do
res <- f x
case res of
Right r -> return $ Right r
Left e -> do
errorM "Pontarius.Xmpp.Daemon.Stun" $ "error: " ++ (show e)
tryAll xs f
fromEither (Left e) = do
errorM "Pontarius.Xmpp.Daemon.Stun" $ "DNS error: " ++ (show e)
return []
fromEither (Right r) = return r
toSockAddr p ipv4 = SockAddrInet p (IP.toHostAddress ipv4)
toSockAddr6 p ipv6 = SockAddrInet6 p 0 (IP.toHostAddress6 ipv6) 0
for = flip fmap
mapLeft f (Left l) = Left $ f l
mapLeft _ (Right r) = Right r
getAddrE :: Element
getAddrE = Element "{org.pontarius.xmpp.daemon}get-refl-addr" [] []
xpAddrString :: PU Element (Either Text.Text Text.Text)
xpAddrString = xpRoot . xpUnliftElems $
xpElemNodes "{org.pontarius.xmpp.daemon}refl-addr"
(xpEither (xpElemNodes "{org.pontarius.xmpp.daemon}error"
(xpContent xpText))
(xpElemNodes "{org.pontarius.xmpp.daemon}addr"
(xpContent xpText)))
stunHandler :: HostName
-> Maybe PortNumber
-> Session
-> (Jid -> IO Bool)
-> IO ()
stunHandler host mbPort session policy = do
eitherChan <- listenIQChan Get "org.pontarius.xmpp.daemon" session
case eitherChan of
Left _ -> return ()
Right chan -> forever $ do
request <- atomically $ readTChan chan
pol <- case iqRequestFrom $ iqRequestBody request of
Nothing -> return False
Just j -> policy j
case pol of
True -> do
mbReflAddr <- getReflAddr host mbPort
answerIQ request . Right . Just
$ pickle xpAddrString mbReflAddr
return ()
False -> do
answerIQ request . Left $ mkStanzaError ServiceUnavailable
return ()
getAddr :: Session -> Jid -> IO (Either Text.Text Text.Text)
getAddr session recipient = do
res <- sendIQ' (Just (5*10^6)) (Just recipient) Get Nothing getAddrE session
case res of
Left (IQSendError e) -> return . Left $ "Could not send request "
`Text.append` Text.pack (show e)
Left IQTimeOut -> return $ Left "Request timed out"
Right (IQResponseError e) -> return . Left $ "Received XMPP error"
`Text.append` Text.pack (show e)
Right (IQResponseResult r) -> case iqResultPayload r of
Nothing -> return . Left $ "Response did not contain data"
Just r -> case unpickle xpAddrString r of
Left _ -> return . Left $ "Response data is invalid"
Right r -> return r
| Philonous/xmpp-daemon | source/XmppDaemon/Stun.hs | mit | 5,711 | 0 | 25 | 2,019 | 1,603 | 790 | 813 | 122 | 9 |
{-# LANGUAGE OverloadedStrings, NoImplicitPrelude #-}
module Stackage.Stats
( printStats
) where
import Stackage.Prelude
import Data.Yaml (decodeFileEither)
printStats :: FilePath -- ^ YAML build plan file
-> IO ()
printStats fp = do
bp <- decodeFileEither fp >>= either throwIO return
let core = length $ siCorePackages $ bpSystemInfo bp
pkgs = length $ bpPackages bp
maintainers = length $ asSet $ flip foldMap (bpPackages bp)
$ maybe
mempty
singletonSet
. pcMaintainer . ppConstraints
putStrLn $ "Core packages: " ++ tshow core
putStrLn $ "Non-core packages: " ++ tshow pkgs
putStrLn $ "Total packages: " ++ tshow (core + pkgs)
putStrLn $ "Unique maintainers: " ++ tshow maintainers
| fpco/stackage-curator | src/Stackage/Stats.hs | mit | 817 | 0 | 16 | 238 | 205 | 100 | 105 | 20 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{- |
Module : Orville.PostgreSQL.Expr.Name.TableName
Copyright : Flipstone Technology Partners 2016-2021
License : MIT
-}
module Orville.PostgreSQL.Internal.Expr.Name.TableName
( TableName,
tableName,
)
where
import Orville.PostgreSQL.Internal.Expr.Name.Identifier (Identifier, IdentifierExpression, identifier)
import qualified Orville.PostgreSQL.Internal.RawSql as RawSql
newtype TableName
= TableName Identifier
deriving (RawSql.SqlExpression, IdentifierExpression)
tableName :: String -> TableName
tableName =
TableName . identifier
| flipstone/orville | orville-postgresql-libpq/src/Orville/PostgreSQL/Internal/Expr/Name/TableName.hs | mit | 604 | 0 | 7 | 77 | 90 | 60 | 30 | 12 | 1 |
module Network.Skype.Protocol.ChatMessage where
import Data.Typeable (Typeable)
import Network.Skype.Protocol.Chat
import Network.Skype.Protocol.Types
data ChatMessageProperty
-- | Time when the message was sent (UNIX timestamp).
= ChatMessageTimestamp Timestamp
-- | Skypename of the originator of the chatmessage.
| ChatMessageFromHandle UserID
-- | Displayed name of the originator of the chatmessage.
| ChatMessageFromDisplayName UserDisplayName
-- | Message type
| ChatMessageType ChatMessageType
-- | Message status
| ChatMessageStatus ChatMessageStatus
-- | Used with LEFT type message
| ChatMessageLeaveReason (Maybe ChatMessageLeaveReason)
-- | Chat that includes the message
| ChatMessageChatName ChatID
-- | People added to chat
| ChatMessageUsers [UserID]
-- | TRUE|FALSE
| ChatMessageIsEditable Bool
-- | Identity of the last user who edited the message.
| ChatMessageEditedBy UserID
-- | UNIX timestamp of the last edit.
| ChatMessageEditedTimestamp Timestamp
-- | Numeric field that contains chat options bitmap in system messages that
-- get sent out when a change is made to chat options (messages where TYPE is
-- SETOPTIONS). In normal messages the value of this field is 0.
| ChatMessageOptions ChatOption
-- | Used in system messages that get sent when a public chat administrator
-- has promoted or demoted a chat member. The TYPE property of such messages
-- is set to SETROLE. In these messages the value of this field is set to the
-- new assigned role of the promoted or demoted chat member. In normal
-- messages the value of this property is set to UNKNOWN.
| ChatMessageRole ChatRole
-- | Message body
| ChatMessageBody ChatMessageBody
-- | The message is seen and will be removed from missed messages list. The
-- UI sets this automatically if auto-popup is enabled for the user.
| ChatMessageSeen
deriving (Eq, Show, Typeable)
data ChatMessageType
-- | Change of chattopic
= ChatMessageTypeSetTopic
-- | IM
| ChatMessageTypeSaid
-- | Invited someone to chat.
| ChatMessageTypeAddedMembers
-- | Chat participant has seen other members.
| ChatMessageTypeSawMembers
-- | Chat to multiple people is created.
| ChatMessageTypeCreatedChatWith
-- | Someone left chat.
-- Can also be a notification if somebody cannot be added to chat.
| ChatMessageTypeLeft
-- | System message that is sent or received when one user sends contacts to
-- another. Added in protocol 7.
| ChatMessageTypePostedContacts
-- | messages of this type are generated locally, during synchronization, when
-- a user enters a chat and it becomes apparent that it is impossible to
-- update user's chat history completely. Chat history is kept only up to
-- maximum of 400 messages or 2 weeks. When a user has been offline past that
-- limit, GAP_IN_CHAT notification is generated. Added in protocol 7.
| ChatMessageTypeGapInChat
-- | System messages that are sent when a chat member gets promoted or
-- demoted.
| ChatMessageTypeSetRole
-- | System messages that are sent when a chat member gets kicked
| ChatMessageTypeKicked
-- | System messages that are sent when a chat member gets banned.
| ChatMessageTypeKickBanned
-- | System messages that are sent when chat options are changed.
| ChatMessageTypeSetOptions
-- | System messages that are sent when a chat member has changed the public
-- chat topic picture. Added in protocol 7.
| ChatMessageTypeSetPicture
-- | System messages that are sent when chat guidelines are changed.
| ChatMessageTypeSetGuideLines
-- | notification message that gets sent in a public chat with
-- JOINERS_BECOME_APPLICANTS options, when a new user joins the chat.
| ChatMessageTypeJoinedAsApplicant
| ChatMessageTypeEmoted
-- | Unknown message type, possibly due to connecting to Skype with older
-- protocol.
| ChatMessageTypeUnkown
deriving (Eq, Show, Typeable)
data ChatMessageStatus
-- | Message is being sent
= ChatMessageStatusSending
-- | Message was sent
| ChatMessageStatusSent
-- | Message has been received
| ChatMessageStatusReceived
-- | Message has been read
| ChatMessageStatusRead
deriving (Eq, Show, Typeable)
data ChatMessageLeaveReason
-- | User was not found
= ChatMessageLeaveReasonUserNotFound
-- | User has an older Skype version and cannot join multichat
| ChatMessageLeaveReasonUserIncapable
-- | Recipient accepts messages from contacts only and sender is not in
-- his/her contact list
| ChatMessageLeaveReasonAdderMustBeFriend
-- | Recipient accepts messages from authorized users only and sender is not
-- authorized
| ChatMessageLeaveReasonAdderMustBeAuthorized
-- | Participant left chat
| ChatMessageLeaveReasonUnsubscribe
deriving (Eq, Show, Typeable)
| emonkak/skype4hs | src/Network/Skype/Protocol/ChatMessage.hs | mit | 4,864 | 0 | 8 | 940 | 327 | 222 | 105 | 53 | 0 |
{-# Language TemplateHaskell, QuasiQuotes, FlexibleContexts #-}
module Main where
import Language.F2
import Language.F2.Util
import System.IO (hFlush, stdout)
import Control.Arrow (first, second, (>>>))
import Control.Monad.State
import Text.Peggy (defaultDelimiter, peggy, parseString)
[peggy|
space :: ()
= [ \r\n\t] { () } / comment { () }
delimiter :: ()
= [()\[\]<>;:,.+*/<>=:^~#$-'|&] { () }
comment :: ()
= '{-' (space / (!"-}" . { () }))* '-}' { () }
top :: (String, String)
= expr !.
expr :: (String, String)
= defExpr
/ otherExpr { ("it", $1) }
defExpr :: (String, String)
= "def" name "=" .+ { ($1, $2) }
/ "def" "(" op ")" "=" .+ { ($1, $2) }
otherExpr :: String
= .+
name ::: String
= !"fun" !"in" !"let" !"rec" !"if" !"then" !"else" [a-z_] [a-zA-Z0-9~']* { $1 : $2 }
/ [~]+ { $1 }
op ::: String
= [.+\-*/<>^~#$|&] [.+\-*/<>^~#$&|=:]* { $1 : $2 }
/ [=:] [.+\-*/<>^~#$&|=:]+ { $1 : $2 }
|]
helloStr
= " ____ _____\n" ++
" // //\n" ++
" //-- ---- \n" ++
"// //___ version " ++ version ++ "\n"
main = do
putStrLn $ helloStr
runStateT mainloop (1, preludeEnv)
mainloop :: StateT (Int, Env) IO ()
mainloop = do
(n, env) <- get
lift $ putStr $ "(" ++ show n ++ ")# "
lift $ hFlush stdout
line <- lift $ getLine
if line == ":q" then do
lift $ putStrLn "\n See you!!"
else if line == ":v" then do
lift $ putStrLn $ " version " ++ version
modify (first (+ 1))
mainloop
else do
case parseString top "<source>" line of
Left e -> lift $ putStrLn $ " parse error : " ++ showParseError e
Right (name, x) -> case exec env x of
Left e -> lift $ putStrLn $ " " ++ e
Right (t, v) -> do
case getValue v of
Left e -> lift $ putStrLn $ " " ++ e
Right v' -> do
modify (second ((name, (t, v')):))
lift $ putStrLn $ " " ++ name ++ " = " ++ x ++ " = " ++ show v' ++ " : " ++ show t
modify (first (+ 1))
mainloop
| kagamilove0707/F2 | src/Main.hs | mit | 2,011 | 0 | 32 | 575 | 509 | 262 | 247 | -1 | -1 |
-- Taken from https://github.com/carymrobbins/haskell-conf commit 3d3b13c
module Data.Conf (Conf, readConf, getConf, parseConf) where
import Control.Monad
import Text.Read
import Data.Conf.Parser
getConf :: Read a => String -> Conf -> Maybe a
getConf key conf = lookup key conf >>= readMaybe
readConf :: String -> IO [(String, String)]
readConf = liftM parseConf . readFile
| splondike/dmarc-check | src/Data/Conf.hs | gpl-2.0 | 377 | 0 | 8 | 55 | 112 | 62 | 50 | 8 | 1 |
{- |
Module : $Header$
Description : printing Isabelle entities
Copyright : (c) University of Cambridge, Cambridge, England
adaption (c) Till Mossakowski, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
Printing functions for Isabelle logic.
-}
module Isabelle.IsaPrint
( showBaseSig
, printIsaTheory
, getAxioms
, printNamedSen
, printTerm
) where
import Isabelle.IsaSign
import Isabelle.IsaConsts
import Isabelle.Translate
import Common.AS_Annotation
import qualified Data.Map as Map
import qualified Data.Set as Set
import Common.Doc hiding (bar)
import Common.DocUtils
import Common.Utils (number)
import Data.Char
import Data.List
import Data.Maybe (isNothing, catMaybes)
printIsaTheory :: String -> Sign -> [Named Sentence] -> Doc
printIsaTheory tn sign sens = let
b = baseSig sign
bs = showBaseSig b
ld = "$HETS_ISABELLE_LIB/"
use = text usesS <+> doubleQuotes (text $ ld ++ "prelude.ML")
in text theoryS <+> text tn
$+$ text importsS <+> fsep (case b of
Custom_thy -> []
_ -> if case b of
Main_thy -> False
HOLCF_thy -> False
Custom_thy -> True
_ -> True then [doubleQuotes $ text $ ld ++ bs] else [text bs]
++ map (doubleQuotes . text) (imports sign))
$+$ use
$+$ text beginS
$++$ printTheoryBody sign sens
$++$ text endS
printTheoryBody :: Sign -> [Named Sentence] -> Doc
printTheoryBody sig sens =
let (sens', recFuns) = findTypesForRecFuns sens (constTab sig)
sig' = sig { constTab =
Map.filterWithKey (\ k _ -> notElem (new k) recFuns) (constTab sig) }
in callSetup "initialize" (brackets $ sepByCommas
$ map (text . show . Quote . senAttr)
$ filter (\ s -> not (isConstDef s || isRecDef s || isInstance s)
&& senAttr s /= "") sens')
$++$ printSign sig'
$++$ printNamedSentences sens'
$++$ printMonSign sig'
findTypesForRecFuns :: [Named Sentence] -> ConstTab
-> ([Named Sentence], [String])
findTypesForRecFuns ns ctab =
let (sens, recFuns') = unzip $ map (\ sen ->
let (sen', recFns') =
case sentence sen of
RecDef kw cName cType tm ->
(case Map.toList $
Map.filterWithKey (\ k _ -> new k == new cName) ctab of
(_, t) : _ ->
case cType of
Nothing ->
RecDef kw cName (Just t) tm
Just t' ->
if t /= t' then error "recFun: two types given"
else sentence sen
[] -> sentence sen
, Just cName)
_ -> (sentence sen, Nothing)
in (sen {sentence = sen'}, recFns')) ns
in (sens, map new $ catMaybes recFuns')
printNamedSentences :: [Named Sentence] -> Doc
printNamedSentences sens = case sens of
[] -> empty
s : r
| isIsaAxiom s ->
let (axs, rest) = span isAxiom sens in
(if null axs then empty else text axiomatizationS $+$ text whereS)
$+$ vcat (intersperse (text andS) $ map printNamedSen axs)
$++$ vcat (map ( \ a -> text declareS <+> text (senAttr a)
<+> brackets (text simpS))
$ filter ( \ a -> case sentence a of
b@Sentence {} -> isSimp b && senAttr a /= ""
_ -> False) axs)
$++$ printNamedSentences rest
| isConstDef s ->
let (defs_, rest) = span isConstDef sens in
text defsS $+$ vsep (map printNamedSen defs_)
$++$ printNamedSentences rest
| otherwise ->
printNamedSen s $++$ (case senAttr s of
n | n == "" || isRecDef s -> empty
| otherwise -> callSetup "record" (text $ show $ Quote n))
$++$ printNamedSentences r
callSetup :: String -> Doc -> Doc
callSetup fun args =
text "setup" <+> doubleQuotes (fsep [text ("Header." ++ fun), args])
data QuotedString = Quote String
instance Show QuotedString where
show (Quote s) = init . tail . show $ show s
getAxioms :: [Named Sentence] -> ([Named Sentence], [Named Sentence])
getAxioms = partition isIsaAxiom
isIsaAxiom :: Named Sentence -> Bool
isIsaAxiom s = case sentence s of
Sentence {} -> isAxiom s
_ -> False
isInstance :: Named Sentence -> Bool
isInstance s = case sentence s of
Instance {} -> True
_ -> False
isConstDef :: Named Sentence -> Bool
isConstDef s = case sentence s of
ConstDef {} -> True
_ -> False
isRecDef :: Named Sentence -> Bool
isRecDef s = case sentence s of
RecDef {} -> True
_ -> False
-- --------------------- Printing functions -----------------------------
showBaseSig :: BaseSig -> String
showBaseSig = takeWhile (/= '_') . show
printClass :: IsaClass -> Doc
printClass (IsaClass x) = text x
printSort :: Sort -> Doc
printSort = printSortAux False
printSortAux :: Bool -> Sort -> Doc
printSortAux b l = case l of
[c] -> printClass c
_ -> (if b then doubleQuotes else id)
. braces . hcat . punctuate comma $ map printClass l
data SynFlag = Quoted | Unquoted | Null
doubleColon :: Doc
doubleColon = text "::"
isaEquals :: Doc
isaEquals = text "=="
bar :: [Doc] -> [Doc]
bar = punctuate $ space <> text "|"
printType :: Typ -> Doc
printType = printTyp Unquoted
printTyp :: SynFlag -> Typ -> Doc
printTyp a = fst . printTypeAux a
printTypeAux :: SynFlag -> Typ -> (Doc, Int)
printTypeAux a t = case t of
TFree v s -> (let
d = text $ if isPrefixOf "\'" v || isPrefixOf "?\'" v
then v else '\'' : v
c = printSort s
in if null s then d else case a of
Quoted -> d <> doubleColon <> if null
$ tail s then c else doubleQuotes c
Unquoted -> d <> doubleColon <> c
Null -> d, 1000)
TVar iv s -> printTypeAux a $ TFree (unindexed iv) s
Type name _ args -> case args of
[t1, t2] | elem name [prodS, sProdS, funS, cFunS, lFunS, sSumS] ->
printTypeOp a name t1 t2
_ -> ((case args of
[] -> empty
[arg] -> let (d, i) = printTypeAux a arg in
if i < 1000 then parens d else d
_ -> parens $ hsep $ punctuate comma $
map (fst . printTypeAux a) args)
<+> text name, 1000)
printTypeOp :: SynFlag -> TName -> Typ -> Typ -> (Doc, Int)
printTypeOp x name r1 r2 =
let (d1, i1) = printTypeAux x r1
(d2, i2) = printTypeAux x r2
(l, r) = Map.findWithDefault (0 :: Int, 0 :: Int)
name $ Map.fromList
[ (funS, (1, 0))
, (cFunS, (1, 0))
, (lFunS, (1, 0))
, (sSumS, (11, 10))
, (prodS, (21, 20))
, (sProdS, (21, 20))
, (lProdS, (21, 20))
]
d3 = if i1 < l then parens d1 else d1
d4 = if i2 < r then parens d2 else d2
in (d3 <+> text name <+> d4, r)
andDocs :: [Doc] -> Doc
andDocs = vcat . prepPunctuate (text andS <> space)
-- | printing a named sentence
printNamedSen :: Named Sentence -> Doc
printNamedSen ns =
let s = sentence ns
lab = senAttr ns
b = isAxiom ns
d = printSentence s
in case s of
TypeDef {} -> d
RecDef {} -> d
Lemmas {} -> d
Instance {} -> d
Locale {} -> d
Class {} -> d
Datatypes _ -> d
Consts _ -> d
TypeSynonym {} -> d
Axioms _ -> d
Lemma {} -> d
Definition {} -> d
Fun {} -> d
Instantiation {} -> d
InstanceProof {} -> d
InstanceArity {} -> d
InstanceSubclass {} -> d
Subclass {} -> d
Typedef {} -> d
Defs {} -> d
Fixrec {} -> d
Domains {} -> d
Primrec {} -> d
_ -> let dd = doubleQuotes d in
if isRefute s then text lemmaS <+> text lab <+> colon
<+> dd $+$ text refuteS
else if null lab then dd else fsep [ (case s of
ConstDef {} -> text $ lab ++ "_def"
Sentence {} ->
(if b then empty else text theoremS)
<+> text lab <+> (if b then text "[rule_format]" else
if isSimp s then text "[simp]" else empty)
_ -> error "printNamedSen") <+> colon, dd] $+$ case s of
Sentence {} -> if b then empty else case thmProof s of
Nothing -> text oopsS
Just prf -> pretty prf
_ -> empty
-- | sentence printing
printSentence :: Sentence -> Doc
printSentence s = case s of
TypeDef nt td pr -> text typedefS
<+> printType nt
<+> equals
<+> doubleQuotes (printSetDecl td)
$+$ pretty pr
RecDef kw cName cType xs ->
let preparedEq = map (doubleQuotes . printTerm) xs
preparedEqWithBars =
map (<+> text barS) (init preparedEq) ++ [last preparedEq]
tp = case cType of
Just t -> doubleColon <+> doubleQuotes (printType t)
Nothing -> empty
kw' = case kw of
Just str -> text str
Nothing -> text primrecS
in kw' <+> text (new cName) <+> tp <+> printAlt cName <+> text whereS $+$
vcat preparedEqWithBars
Instance { tName = t, arityArgs = args, arityRes = res, definitions = defs_,
instProof = prf } ->
text instantiationS <+> text t <> doubleColon <> (case args of
[] -> empty
_ -> parens $ hsep $ punctuate comma $ map (printSortAux True) args)
<+> printSortAux True res $+$ text beginS $++$ printDefs defs_ $++$
text instanceS <+> pretty prf $+$ text endS
where printDefs :: [(String, Term)] -> Doc
printDefs defs' = vcat (map printDef defs')
printDef :: (String, Term) -> Doc
printDef (name, def) =
text definitionS <+>
printNamedSen (makeNamed name (ConstDef def))
Sentence { isRefuteAux = b, metaTerm = t } -> printPlainMetaTerm (not b) t
ConstDef t -> printTerm t
Lemmas name lemmas -> if null lemmas
then empty {- only have this lemmas if we have some in
the list -}
else text lemmasS <+> text name <+>
equals <+> sep (map text lemmas)
l@(Locale {}) ->
let h = text "locale" <+> text (show $ localeName l)
parents = Data.List.intersperse (text "+") $
map (text . show) (localeParents l)
(fxs, ass) = printContext $ localeContext l
in printFixesAssumes h parents ass fxs
$+$ printBody (localeBody l)
c@(Class {}) ->
let h = text "class" <+> text (show $ className c)
parents = Data.List.intersperse (text "+") $
map (text . show) (classParents c)
(fxs, ass) = printContext (classContext c)
in printFixesAssumes h parents ass fxs
$+$ printBody (classBody c)
(Datatypes dts) -> if null dts then empty
else text "datatype" <+>
andDocs (map (\ d ->
let vars = map printType $ datatypeTVars d
name = text $ show $ datatypeName d
pretty_cs c =
let cname = case c of
DatatypeNoConstructor {} -> ""
_ -> show $ constructorName c
cname' = if any isSpace cname
then doubleQuotes (text cname)
else text cname
tps = map (doubleQuotes . printType) $
constructorArgs c
in hsep (cname' : tps)
cs = map pretty_cs $ datatypeConstructors d
in hsep vars <+> name <+> text "=" <+>
fsep (bar cs)) dts)
Domains ds -> if null ds then empty
else text "domain" <+>
andDocs (map (\ d ->
let vars = map printType $ domainTVars d
name = text $ show $ domainName d
pretty_cs c =
let cname = text $ show $ domainConstructorName c
args' = map (\ arg ->
(if domainConstructorArgLazy arg
then text "lazy" else empty) <+>
(case domainConstructorArgSel arg of
Just sel -> text (show sel) <+> text "::"
Nothing -> empty) <+> (doubleQuotes . printType)
(domainConstructorArgType arg)) $
domainConstructorArgs c
args = map parens args'
in hsep $ cname : args
cs = map pretty_cs $ domainConstructors d
in hsep vars <+> name <+> text "=" <+>
fsep (bar cs)) ds)
Consts cs -> if null cs then empty
else vsep $ text "consts" :
map (\ (n, _, t) -> text n <+> text "::" <+>
doubleQuotes (printType t)) cs
TypeSynonym n _ vs tp -> hsep $ [text "type_synonym",
text $ show n, text "="] ++ map text vs
++ [doubleQuotes . printType $ tp]
Axioms axs -> if null axs then empty
else vsep $ text "axioms" :
map (\ a -> text (show $ axiomName a) <+>
(if axiomArgs a /= ""
then brackets (text $ axiomArgs a)
else empty) <+> text ":" <+>
doubleQuotes (printTerm $ axiomTerm a)) axs
l@(Lemma {}) ->
let (fxs, ass) = printContext $ lemmaContext l
in text "lemma" <+> (case lemmaTarget l of
Just t -> braces (text "in" <+> text (show t))
Nothing -> empty) <+>
(case (null fxs, null ass, lemmaProps l) of
(True, True, [sh]) -> printProps sh
_ -> vsep (fxs ++ ass ++
[text "shows" <+> andDocs
(map printProps (lemmaProps l))]))
$+$ (case lemmaProof l of
Just p -> text p
Nothing -> empty)
d@(Definition {}) -> fsep [text "definition" <+>
(case definitionTarget d of
Just t -> braces (text "in" <+> text (show t))
Nothing -> empty) <+>
(text (show $ definitionName d) <+> text "::" <+>
doubleQuotes (printType $ definitionType d)), text "where" <+>
doubleQuotes (text (show $ definitionName d) <+> hsep (map printTerm (
definitionVars d)) <+> text "=" <+> printTerm (definitionTerm d))]
f@(Fun {}) -> text "fun" <+> (case funTarget f of
Just t -> braces (text "in" <+> text (show t))
Nothing -> empty) <+> (if funDomintros f then braces (text "domintros")
else empty) <+> vcat (intersperse (text andS) $
map (\ (name, mx, tp, _) -> text name <+> text "::" <+>
doubleQuotes (printType tp) <+> case mx of
Just (Mixfix _ _ s' _) -> doubleQuotes (text s')
_ -> empty) (funEquations f)) <+> text "where" $+$
(let eqs = concatMap (\ (name, _, _, e) -> map (\ e' -> (name, e')) e)
(funEquations f)
eqs' = map (\ (n, (vs, t)) -> doubleQuotes (text n <+>
hsep (map printTerm vs) <+>
text "=" <+> printTerm t)) eqs
in fsep $ bar eqs')
i@(Instantiation {}) -> fsep $ (text "instantiation" <+> text
(instantiationType i) <+> text "::" <+> printArity (instantiationArity i)) :
[printBody (instantiationBody i)]
InstanceProof prf -> text "instance" $+$ text prf
i@(InstanceArity {}) -> text "instance" <+>
hcat (intersperse (text "and") $ map text $ instanceTypes i) <+>
printArity (instanceArity i) $+$ text (instanceProof i)
i@(InstanceSubclass {}) -> text "instance" <+> text (instanceClass i) <+>
text (instanceRel i) <+> text (instanceClass1 i) $+$ text (instanceProof i)
c@(Subclass {}) -> text "subclass" <+> (case subclassTarget c of
Just t -> braces (text "in" <+> text (show t))
Nothing -> empty) <+> text (subclassClass c)
<+> text (subclassProof c)
t@(Typedef {}) -> text "typedef" <+> (case typedefVars t of
[] -> empty
[v] -> printVarWithSort v
vs -> parens $ hsep $ punctuate comma $
map printVarWithSort vs) <+>
text (show $ typedefName t) <+> text "=" <+>
doubleQuotes (printTerm $ typedefTerm t) <+>
(case typedefMorphisms t of
Just (m1, m2) -> text "morphisms" <+> text (show m1)
<+> text (show m2)
Nothing -> empty) $+$ text (typedefProof t)
d@(Defs {}) -> fsep $ (text "defs" <+> (if defsUnchecked d
then text "unchecked"
else empty) <+>
(if defsOverloaded d
then text "overloaded"
else empty))
: map (\ eq' ->
text (show (defEquationName eq')) <+> text ":" <+> doubleQuotes (
text (defEquationConst eq') <+> text "==" <+>
printTerm (defEquationTerm eq')) <+> if null (defEquationArgs eq')
then empty else brackets (text $ defEquationArgs eq')) (defsEquations d)
Fixrec fs ->
let h = map (\ (name, _, tp, _) -> text name <+> text "::" <+>
(doubleQuotes . printType) tp) fs
pretty_fixreceq name eq' =
let unchecked = if fixrecEquationUnchecked eq' then
text "(unchecked)" else empty
premises = fixrecEquationPremises eq'
p s' = punctuate $ space <> text s'
patterns = map (parens . printTerm) $ fixrecEquationPatterns eq'
tm = printTerm $ fixrecEquationTerm eq'
in unchecked <+> doubleQuotes (printTermWithPremises premises
(hsep (p "\\<cdot>" (text name : patterns)) <+> text "=" <+> tm))
body = concatMap (\ (name, _, _, eqs) -> map (pretty_fixreceq name) eqs)
fs
in text "fixrec" <+> andDocs h <+> text "where" $+$ fsep (bar body)
Primrec t eqs ->
let h = map (\ (name, _, tp, _) -> text name <+> text "::" <+>
(doubleQuotes . printType) tp) eqs
pretty_primrec name (vs, tm) = doubleQuotes (text name
<+> hsep (map printTerm vs) <+> text "=" <+> printTerm tm)
body = concatMap (\ (name, _, _, tms) ->
map (pretty_primrec name) tms) eqs
in text "primrec" <+> (case t of
Just t' -> braces (text "in" <+> text (show t'))
Nothing -> empty) <+> andDocs h <+> text "where"
$+$ fsep (bar body)
printTermWithPremises :: [Term] -> Doc -> Doc
printTermWithPremises ps t =
let p s = punctuate $ space <> text s
in fsep $ p "\\<Longrightarrow>" (map printTerm ps ++ [t])
printArity :: (Sort, [Sort]) -> Doc
printArity (sort', sorts) = parens (hsep $ punctuate comma $
map (printSortAux True) sorts) <+> printSort sort'
printVarWithSort :: (String, Sort) -> Doc
printVarWithSort (name, []) = text name
printVarWithSort (name, sort') = text name <+> printSortAux True sort'
printBody :: [Sentence] -> Doc
printBody sens = fsep $ if null sens then []
else [text "begin"] ++ map printSentence sens ++ [text "end"]
printContext :: Ctxt -> ([Doc], [Doc])
printContext ctxt =
let fixes' = map (\ (n, _, tp) -> if n == "" then empty else text n
<+> text "::" <+> (doubleQuotes . printTyp Null) tp)
(fixes ctxt)
assumes' = map (\ (n, tm) -> if n == "" then empty else text n <+> text ":"
<+> (doubleQuotes . printTerm) tm)
(assumes ctxt)
in (fixes', assumes')
printProps :: Props -> Doc
printProps (Props {propsName = n, propsArgs = a, props = p}) =
printMaybe (text . show) n <+> printMaybe text a
<+> (if isNothing n && isNothing a
then empty else text ":") <+>
vcat (map printProp p)
printProp :: Prop -> Doc
printProp (Prop {prop = t, propPats = ts}) =
let t' = doubleQuotes $ printTerm t
ts' = hsep $ map (\ p -> text "is" <+> (doubleQuotes . printTerm) p) ts
in t' <+> if null ts then empty
else parens ts'
printSetDecl :: SetDecl -> Doc
printSetDecl setdecl =
case setdecl of
SubSet v t f -> braces $ printTerm v <> doubleColon <> printType t <> dot
<+> printTerm f
FixedSet elems -> braces $ sepByCommas $ map printTerm elems
printPlainMetaTerm :: Bool -> MetaTerm -> Doc
printPlainMetaTerm b mt = case mt of
Term t -> printPlainTerm b t
Conditional conds t -> sep
[ text premiseOpenS
<+> fsep (punctuate semi $ map printTerm conds)
<+> text premiseCloseS
, text metaImplS <+> printTerm t ]
-- | print plain term
printTerm :: Term -> Doc
printTerm = printPlainTerm True
printPlainTerm :: Bool -> Term -> Doc
printPlainTerm b = fst . printTrm b
-- | print parens but leave a space if doc starts or ends with a bar
parensForTerm :: Doc -> Doc
parensForTerm d =
let s = show d
b = '|'
in parens $ if null s then d
else (if head s == b then (space <>) else id)
((if last s == b then (<> space) else id) d)
printParenTerm :: Bool -> Int -> Term -> Doc
printParenTerm b i t = case printTrm b t of
(d, j) -> if j < i then parensForTerm d else d
flatTuplex :: [Term] -> Continuity -> [Term]
flatTuplex cs c = case cs of
[] -> cs
_ -> case last cs of
Tuplex rs@(_ : _ : _) d | d == c -> init cs ++ flatTuplex rs d
_ -> cs
printMixfixAppl :: Bool -> Continuity -> Term -> [Term] -> (Doc, Int)
printMixfixAppl b c f args = case f of
Const (VName n (Just (AltSyntax s is i))) (Hide {}) ->
if length is == length args &&
(b || n == cNot || isPrefixOf "op " n) then
(fsep $ replaceUnderlines s
$ zipWith (printParenTerm b) is args, i)
else printApp b c f args
Const vn _ | new vn `elem` [allS, exS, ex1S] -> case args of
[Abs v t _] -> (fsep [text (new vn) <+> printPlainTerm False v
<> dot
, printPlainTerm b t], lowPrio)
_ -> printApp b c f args
App g a d | c == d -> printMixfixAppl b c g (a : args)
_ -> printApp b c f args
-- | print the term using the alternative syntax (if True)
printTrm :: Bool -> Term -> (Doc, Int)
printTrm b trm = case trm of
Const vn ty -> let
dvn = text $ new vn
nvn = case ty of
Hide {} -> dvn
Disp w _ _ -> parens $ dvn <+> doubleColon <+> printType w
in case altSyn vn of
Nothing -> (nvn, maxPrio)
Just (AltSyntax s is i) -> if b && null is then
(fsep $ replaceUnderlines s [], i) else (nvn, maxPrio)
Free vn -> (text $ new vn, maxPrio)
Abs v t c -> (text (case c of
NotCont -> "%"
IsCont _ -> "Lam") <+> printPlainTerm False v <> dot
<+> printPlainTerm b t, lowPrio)
If i t e c -> let d = fsep [printPlainTerm b i,
text (case c of
NotCont -> "then"
IsCont _ -> "THEN")
<+> printPlainTerm b t,
text (case c of
NotCont -> "else"
IsCont _ -> "ELSE")
<+> printPlainTerm b e]
in case c of
NotCont -> (text "if" <+> d, lowPrio)
IsCont _ -> (text "IF" <+> d <+> text "FI", maxPrio)
Case e ps -> (text "case" <+> printPlainTerm b e <+> text "of"
$+$ vcat (bar $ map (\ (p, t) ->
fsep [ printPlainTerm b p <+> text "=>"
, printParenTerm b (lowPrio + 1) t]) ps), lowPrio)
Let es i -> (fsep [text "let" <+>
vcat (punctuate semi $
map (\ (p, t) -> fsep [ printPlainTerm b p <+> equals
, printPlainTerm b t]) es)
, text "in" <+> printPlainTerm b i], lowPrio)
IsaEq t1 t2 ->
(fsep [ printParenTerm b (isaEqPrio + 1) t1 <+> isaEquals
, printParenTerm b isaEqPrio t2], isaEqPrio)
Tuplex cs c -> case c of
NotCont -> (parensForTerm
$ sepByCommas (map (printPlainTerm b)
$ flatTuplex cs c)
, maxPrio)
IsCont _ -> case cs of
[] -> error "IsaPrint, printTrm"
[a] -> printTrm b a
a : aa -> printTrm b $ App (App
lpairTerm a $ IsCont False)
(Tuplex aa c) (IsCont False)
App f a c -> printMixfixAppl b c f [a]
Set setdecl -> (printSetDecl setdecl, lowPrio)
printApp :: Bool -> Continuity -> Term -> [Term] -> (Doc, Int)
printApp b c t l = case l of
[] -> printTrm b t
_ -> printDocApp b c (printParenTerm b (maxPrio - 1) t) l
printDocApp :: Bool -> Continuity -> Doc -> [Term] -> (Doc, Int)
printDocApp b c d l =
( fsep $ (case c of
NotCont -> id
IsCont True -> punctuate $ text " $$"
IsCont False -> punctuate $ text " $")
$ d : map (printParenTerm b maxPrio) l
, maxPrio - 1)
replaceUnderlines :: String -> [Doc] -> [Doc]
replaceUnderlines str l = case str of
"" -> []
'\'' : r@(q : s) -> if q `elem` "_/'()"
then consDocBarSep (text [q]) $ replaceUnderlines s l
else consDocBarSep (text "'") $ replaceUnderlines r l
'_' : r -> case l of
h : t -> consDocBarSep h $ replaceUnderlines r t
_ -> error "replaceUnderlines"
'/' : ' ' : r -> empty : replaceUnderlines r l
q : r -> if q `elem` "()/" then replaceUnderlines r l
else consDocBarSep (text [q]) $ replaceUnderlines r l
consDocBarSep :: Doc -> [Doc] -> [Doc]
consDocBarSep d r = case r of
[] -> [d]
h : t -> let
b = '|'
hs = show h
ds = show d
hhs = head hs
lds = last ds
in if null hs || null ds then (d <> h) : t else
if hhs == b && lds == '(' || last ds == b && hhs == ')'
then (d <+> h) : t
else (d <> h) : t
-- end of term printing
printLocales :: Locales -> Doc
printLocales = vsep . map printLocale . orderLDecs . Map.toList
printDefinitions :: Defs -> Doc
printDefinitions = vsep . map printDefinition . Map.toList
printFunctions :: Funs -> Doc
printFunctions = vsep . map printFunction . Map.toList
printFixesAssumes :: Doc -> [Doc] -> [Doc] -> [Doc] -> Doc
printFixesAssumes h p' a f = vcat
[ h <+> (if null $ p' ++ a ++ f then empty else text "=") <+> hsep p'
<+> if null p' || null a && null f then empty else text "+"
, if null f then empty else text "fixes" <+> andDocs f
, if null a then empty else text "assumes" <+> andDocs a
]
printDefinition :: (String, Def) -> Doc
printDefinition (n, (tp, vs, tm)) = text "definition" <+> text n <+> text "::"
$+$ (doubleQuotes . printTyp Null) tp <+> text "where"
$+$ doubleQuotes (text n <+> hsep (map (text . fst) vs)
<+> text "\\<equiv>" <+> printTerm tm)
printFunction :: (String, FunDef) -> Doc
printFunction (n, (tp, def_eqs)) = text "fun" <+> text n <+> text "::"
$+$ (doubleQuotes . printTyp Null) tp <+> text "where"
$+$ (vcat . punctuate (text "|"))
(map (\ (pats, tm) -> doubleQuotes $ text n
<+> hsep (map printTerm pats) <+> text "="
<+> printTerm tm) def_eqs)
printLocale :: (String, LocaleDecl) -> Doc
printLocale (n, (parents, in_ax, ex_ax, params)) =
let p' = Data.List.intersperse (text "+") $ map text parents
a = map (\ (s, t) -> text s <+> text ":"
<+> (doubleQuotes . printTerm) t) in_ax
f = map (\ (s, t, alt) -> text s <+> text "::"
<+> (doubleQuotes . printTyp Null) t
<+> (case alt of
Just (AltSyntax s' [i1, i2] i) -> parens (
text (if i1 == i2 then "infix "
else if i1 < i2 then "infixr "
else "infixl ") <+> doubleQuotes (text s')
<+> text (show i))
_ -> empty
)) params
in vcat [
printFixesAssumes (text "locale" <+> text n) p' a f,
vcat (map (\ (s, t) -> text ("theorem (in " ++ n ++ ")")
<+> text s <+> text ":"
<+> (doubleQuotes . printTerm) t
<+> text "apply(auto)"
<+> text "done") ex_ax)]
printClassrel :: Classrel -> Doc
printClassrel = vsep . map printClassR . orderCDecs . Map.toList
printClassR :: (IsaClass, ClassDecl) -> Doc
printClassR (y, (parents, assumptions, fxs)) =
let a = map (\ (s, t) -> text s <+> text ":"
<+> (doubleQuotes . printTerm) t) assumptions
f = map (\ (s, t) -> text s <+> text "::"
<+> (doubleQuotes . printTyp Null) t) fxs
parents' = filter (\ (IsaClass s) -> notElem s
["HOL.type_class", "HOL.type", "type", "type_class"]) parents
p' = Data.List.intersperse (text "+") $ map printClass parents'
in printFixesAssumes (text "class" <+> printClass y) p' a f
orderCDecs :: [(IsaClass, ClassDecl)] -> [(IsaClass, ClassDecl)]
orderCDecs =
topSort crord
where
crord (_, (cs, _, _)) (c, _) = elem c cs
orderLDecs :: [(String, LocaleDecl)] -> [(String, LocaleDecl)]
orderLDecs =
topSort crord
where
crord (_, (cs, _, _, _)) (c, _) = elem c cs
printMonArities :: String -> Arities -> Doc
printMonArities tn = vcat . map ( \ (t, cl) ->
vcat $ map (printThMorp tn t) cl) . Map.toList
printThMorp :: String -> TName -> (IsaClass, [(Typ, Sort)]) -> Doc
printThMorp tn t xs = case xs of
(IsaClass "Monad", _) ->
if isSuffixOf "_mh" tn || isSuffixOf "_mhc" tn
then printMInstance tn t
else error "IsaPrint, printInstance: monads not supported"
_ -> empty
printMInstance :: String -> TName -> Doc
printMInstance tn t = let nM = text (t ++ "_tm")
nM2 = text (t ++ "_tm2")
in prnThymorph nM "MonadType" tn t [("MonadType.M", "'a")] []
$+$ text "t_instantiate MonadOps mapping" <+> nM
$+$ text "renames:" <+>
brackMapList (\ x -> t ++ "_" ++ x)
[("MonadOpEta.eta", "eta"), ("MonadOpBind.bind", "bind")]
$+$ text "without_syntax"
$++$ text "defs "
$+$ text (t ++ "_eta_def:") <+> doubleQuotes
(text (t ++ "_eta") <+> isaEquals <+> text ("return_" ++ t))
$+$ text (t ++ "_bind_def:") <+> doubleQuotes
(text (t ++ "_bind") <+> isaEquals <+> text ("mbind_" ++ t))
$++$ lunitLemma t
$+$ runitLemma t
$+$ assocLemma t
$+$ etaInjLemma t
$++$ prnThymorph nM2 "MonadAxms" tn t [("MonadType.M", "'a")]
[("MonadOpEta.eta", t ++ "_eta"),
("MonadOpBind.bind", t ++ "_bind")]
$+$ text "t_instantiate Monad mapping" <+> nM2
$+$ text "renames:" <+>
brackMapList (\ x -> t ++ "_" ++ x)
[("Monad.kapp", "kapp"),
("Monad.lift", "lift"),
("Monad.lift", "lift"),
("Monad.mapF", "mapF"),
("Monad.bind'", "mbbind"),
("Monad.joinM", "joinM"),
("Monad.kapp2", "kapp2"),
("Monad.kapp3", "kapp3"),
("Monad.lift2", "lift2"),
("Monad.lift3", "lift3")]
$+$ text "without_syntax"
$++$ text " "
where
lunitLemma w = text lemmaS <+> text (w ++ "_lunit:")
<+> doubleQuotes (text (w ++ "_bind")
<+> parens (text (w ++ "_eta x"))
<+> parens (text $ "t::'a => 'b " ++ w)
<+> equals <+> text "t x")
$+$ text "sorry "
runitLemma w = text lemmaS <+> text (w ++ "_runit:")
<+> doubleQuotes (text (w ++ "_bind")
<+> parens (text $ "t::'a " ++ w) <+> text (w ++ "_eta")
<+> equals <+> text "t")
$+$ text "sorry "
assocLemma w = text lemmaS <+> text (w ++ "_assoc:")
<+> doubleQuotes (text (w ++ "_bind")
<+> parens (text (w ++ "_bind")
<+> parens (text $ "s::'a " ++ w) <+> text "t") <+> text "u"
<+> equals <+> text (w ++ "_bind s")
<+> parens (text "%x." <+>
text (w ++ "_bind") <+> text "(t x) u"))
$+$ text "sorry "
etaInjLemma w = text lemmaS <+> text (w ++ "_eta_inj:")
<+> doubleQuotes (parens (text $ w ++ "_eta::'a => 'a " ++ w)
<+> text "x"
<+> equals <+> text (w ++ "_eta y")
<+> text "==>" <+> text "x = y")
$+$ text "sorry "
prnThymorph :: Doc -> String -> String -> TName -> [(String, String)]
-> [(String, String)] -> Doc
prnThymorph nm xn tn t ts ws = let qual s = tn ++ "." ++ s in
text "thymorph" <+> nm <+> colon <+>
text xn <+> cfun <+> text tn
$+$ text " maps" <+> brackets
(hcat [ parens $ doubleQuotes (text b <+> text a) <+> mapsto
<+> doubleQuotes (text b <+> text (qual t))
| (a, b) <- ts])
$+$ brackMapList qual ws
brackMapList :: (String -> String) -> [(String, String)] -> Doc
brackMapList f ws = brackets $ hsep $ punctuate comma
[ parens $ doubleQuotes (text a) <+> mapsto <+> doubleQuotes (text $ f b)
| (a, b) <- ws]
-- filter out types that are given in the domain table
printTypeDecls :: BaseSig -> DomainTab -> Arities -> Doc
printTypeDecls bs odt ars =
let dt = Map.fromList $ map (\ (t, _) -> (typeId t, [])) $ concat odt
in vcat $ map (printTycon bs) $ Map.toList $ Map.difference ars dt
printTycon :: BaseSig -> (TName, [(IsaClass, [(Typ, Sort)])]) -> Doc
printTycon bs (t, arity') = case arity' of
[] -> error "IsaPrint.printTycon"
(_, rs) : _ ->
if Set.member t
$ Map.findWithDefault (error "Isabelle.printTycon") bs
$ preTypes isaPrelude
then empty else
text typedeclS <+>
(if null rs then empty else
parens $ hsep $ punctuate comma
$ map (text . ("'a" ++) . show . snd) $ number rs) <+> text t
-- | show alternative syntax (computed by comorphisms)
printAlt :: VName -> Doc
printAlt (VName _ altV) = case altV of
Nothing -> empty
Just (AltSyntax s is i) -> parens $ doubleQuotes (text s)
<+> if null is then empty else text (show is) <+>
if i == maxPrio then empty else text (show i)
instance Pretty Sign where
pretty = printSign
-- | a dummy constant table with wrong types
constructors :: DomainTab -> ConstTab
constructors = Map.fromList . map (\ v -> (v, noTypeT))
. concatMap (map fst . snd) . concat
printMonSign :: Sign -> Doc
printMonSign sig = let ars = arities $ tsig sig
in
printMonArities (theoryName sig) ars
printSign :: Sign -> Doc
printSign sig = let dt = ordDoms $ domainTab sig
ars = arities $ tsig sig
in
printAbbrs (abbrs $ tsig sig) $++$
printTypeDecls (baseSig sig) dt ars $++$
printDefinitions (defs $ tsig sig) $++$
printFunctions (funs $ tsig sig) $++$
printLocales (locales $ tsig sig) $++$
printClassrel (classrel $ tsig sig) $++$
printDomainDefs dt $++$
printConstTab (Map.difference (constTab sig)
$ constructors dt) $++$
(if showLemmas sig
then showCaseLemmata dt else empty)
where
printAbbrs tab = if Map.null tab then empty else text typesS
$+$ vcat (map printAbbr $ Map.toList tab)
printAbbr (n, (vs, t)) = case vs of
[] -> empty
[x] -> text ('\'' : x)
_ -> parens $ hsep $ punctuate comma $
map (text . ('\'' :)) vs
<+> text n <+> equals <+> doubleQuotes (printType t)
printConstTab tab = if Map.null tab then empty else text constsS
$+$ vcat (map printConst $ Map.toList tab)
printConst (vn, t) = text (new vn) <+> doubleColon <+>
doubleQuotes (printType t) <+> printAlt vn
isDomain = case baseSig sig of
HOLCF_thy -> True
HsHOLCF_thy -> True
MHsHOLCF_thy -> True
_ -> False
printDomainDefs dtDefs = vcat $ map printDomainDef dtDefs
printDomainDef dts = if null dts then empty else
text (if isDomain then domainS else datatypeS)
<+> andDocs (map printDomain dts)
printDomain (t, ops) =
printTyp (if isDomain then Quoted else Null) t <+> equals <+>
fsep (bar $ map printDOp ops)
printDOp (vn, args) = let opname = new vn in
text (if any isSpace opname then show opname else opname)
<+> hsep (map (printDOpArg opname) $ number args)
<+> printAlt vn
printDOpArg o (a, i) = let
d = case a of
TFree _ _ -> printTyp Null a
_ -> doubleQuotes $ printTyp Null a
in if isDomain then
parens $ text "lazy" <+>
text (o ++ "_" ++ show i) <> doubleColon <> d
else d
showCaseLemmata dtDefs = text (concatMap showCaseLemmata1 dtDefs)
showCaseLemmata1 = concatMap showCaseLemma
showCaseLemma (_, []) = ""
showCaseLemma (tyCons, c : cns) =
let cs = "case caseVar of" ++ sp
sc b = showCons b c ++ concatMap ((" | " ++) . showCons b) cns
clSome = sc True
cl = sc False
showCons b (VName {new = cName}, args) =
let pat = cName ++ concatMap ((sp ++) . showArg) args
++ sp ++ "=>" ++ sp
term = showCaseTerm cName args
in
pat ++ if b then "Some" ++ sp ++ lb ++ term ++ rb ++ "\n"
else term ++ "\n"
showCaseTerm name args = case name of
"" -> sa
n : _ -> toLower n : sa
where sa = concatMap ((sp ++) . showArg) args
showArg (TFree [] _) = "varName"
showArg (TFree (n : ns) _) = toLower n : ns
showArg (TVar v s) = showArg (TFree (unindexed v) s)
showArg (Type [] _ _) = "varName"
showArg (Type m@(n : ns) _ s) =
if elem m ["typeAppl", "fun", "*"]
then concatMap showArg s
else toLower n : ns
showName (TFree v _) = v
showName (TVar v _) = unindexed v
showName (Type n _ _) = n
proof' = "apply (case_tac caseVar)\napply (auto)\ndone\n"
in
lemmaS ++ sp ++ "case_" ++ showName tyCons ++ "_SomeProm" ++ sp
++ "[simp]:\"" ++ sp ++ lb ++ cs ++ clSome ++ rb ++ sp
++ "=\n" ++ "Some" ++ sp ++ lb ++ cs ++ cl ++ rb ++ "\"\n"
++ proof'
instance Pretty Sentence where
pretty = printSentence
sp :: String
sp = " "
rb :: String
rb = ")"
lb :: String
lb = "("
-- Pretty printing of proofs
instance Pretty IsaProof where
pretty = printIsaProof
printIsaProof :: IsaProof -> Doc
printIsaProof (IsaProof p e) = fsep $ map pretty p ++ [pretty e]
instance Pretty ProofCommand where
pretty = printProofCommand
printProofCommand :: ProofCommand -> Doc
printProofCommand pc =
case pc of
Apply pms plus ->
let plusDoc = if plus then text "+" else empty
in text applyS <+> parens
(sepByCommas $ map pretty pms) <> plusDoc
Using ls -> text usingS <+> fsep (map text ls)
Back -> text backS
Defer x -> text deferS <+> pretty x
Prefer x -> text preferS <+> pretty x
Refute -> text refuteS
instance Pretty ProofEnd where
pretty = printProofEnd
printProofEnd :: ProofEnd -> Doc
printProofEnd pe =
case pe of
By pm -> text byS <+> parens (pretty pm)
DotDot -> text dotDot
Done -> text doneS
Oops -> text oopsS
Sorry -> text sorryS
instance Pretty Modifier where
pretty = printModifier
printModifier :: Modifier -> Doc
printModifier m =
case m of
No_asm -> text "no_asm"
No_asm_simp -> text "no_asm_simp"
No_asm_use -> text "no_asm_use"
instance Pretty ProofMethod where
pretty = printProofMethod
printProofMethod :: ProofMethod -> Doc
printProofMethod pm =
case pm of
Auto -> text autoS
Simp -> text simpS
AutoSimpAdd m names -> let modDoc = case m of
Just mod' -> parens $ pretty mod'
Nothing -> empty
in fsep $ [text autoS, text simpS, modDoc,
text "add:"] ++ map text names
SimpAdd m names -> let modDoc = case m of
Just mod' -> parens $ pretty mod'
Nothing -> empty
in fsep $ [text simpS, modDoc, text "add:"] ++
map text names
Induct var -> text inductS <+> doubleQuotes (printTerm var)
CaseTac t -> text caseTacS <+> doubleQuotes (printTerm t)
SubgoalTac t -> text subgoalTacS <+> doubleQuotes (printTerm t)
Insert ts -> fsep (text insertS : map text ts)
Other s -> text s
| nevrenato/HetsAlloy | Isabelle/IsaPrint.hs | gpl-2.0 | 41,879 | 1,387 | 27 | 14,867 | 14,680 | 7,636 | 7,044 | 927 | 54 |
{-# LANGUAGE DeriveGeneric #-} -- needed for json parsing
module Types where
import GHC.Generics (Generic)
import qualified Data.Map as M
import Data.Maybe (fromMaybe, fromJust)
-- a Symbol gets bound; a Sym is a sort of literal
type Symbol = String
data Sym = Sym String
deriving (Eq, Ord, Generic)
instance Show Sym where
show (Sym s) = '\'' : s
data Ref = R Int
deriving (Eq, Ord, Generic)
data Lit
= LInt Int
| LSym Sym
deriving (Show, Eq, Ord, Generic)
data Expr
= ELit Lit
| ESym Symbol
| ERef Ref
deriving (Show, Eq, Ord, Generic)
data Arrow = Arrow
{ source :: Expr
, pred ::Symbol
, target :: Expr }
deriving (Eq, Ord, Show, Generic)
data Application = App Symbol [Expr]
deriving (Show, Eq, Ord)
-- TODO name type
-- ? >/<
-- combine ref into it
-- context structure
-- ? context = (Linear One, [Symbol, Val], [Ref, Val])
data Effect
= Assert Arrow
| Del Expr
| ENamed Application
deriving (Show, Eq, Ord)
data Log = Log Int [Effect]
type Edge = (Expr, Expr)
data Graph = Graph (M.Map Symbol [Edge])
deriving (Show, Eq, Ord)
data Max = Max Symbol Symbol
deriving (Show, Eq, Ord)
--data Operation
-- = OMatch Atom
-- | OCount Symbol | OMax Max | ODrop Symbol
-- | ONamed Application
-- deriving (Show, Eq, Ord)
type Pattern = [Effect]
type Rule = (Pattern, Pattern)
type Context = [(Symbol, Expr)]
type Var = Expr -> Either String (Context -> Context)
type RuleContext = [(Symbol, (Pattern, [Symbol]))]
data Program = Prog
{ p_defs :: RuleContext
, p_rule :: Rule
}
deriving (Show, Eq, Ord)
instance Show Ref where
show (R i) = "#" ++ show i
look k web = fromMaybe [] (M.lookup k web)
look' k ctxt = fromJust (lookup k ctxt)
-- TODO make monadic?
class Named a where
nmap :: (Symbol -> Symbol) -> a -> a
--instance Named Node where
-- nmap _ NHole = NHole
-- nmap f (NSym s) = NSym (f s)
-- nmap f (NRoot s) = NRoot (f s)
-- nmap f (NLit v) = NLit v
instance Named Expr where
nmap f (ESym s) = ESym (f s)
nmap f (ELit v) = ELit v
--instance Named Atom where
-- nmap f (Atom l p r) = Atom (nmap f l) p (nmap f r)
--instance Named Max where
-- nmap f (Max a b) = Max (f a) (f b)
instance Named Application where
nmap f (App s es) = App s (map (nmap f) es)
--instance Named Operation where
-- nmap f (OMatch atom) = OMatch (nmap f atom)
-- nmap f (OCount s) = OCount (f s)
-- nmap f (OMax m) = OMax (nmap f m)
-- nmap f (ODrop s) = ODrop (f s)
-- nmap f (ONamed (App n args)) = ONamed (App (f n) (map (nmap f) args))
instance Named Arrow where
nmap f (Arrow a p b) = Arrow (nmap f a) p (nmap f b)
instance Named Effect where
nmap f (Assert a) = Assert (nmap f a)
nmap f (Del s) = Del (nmap f s)
nmap f (ENamed (App n args)) = ENamed (App (f n) (map (nmap f) args))
-- TODO
instance Num Expr where
fromInteger = ELit . LInt . fromIntegral
(ELit (LInt l)) + (ELit (LInt r)) = ELit (LInt (l+r))
negate (ELit (LInt l)) = (ELit (LInt $ -l))
| kovach/web | res0/Types.hs | gpl-2.0 | 2,970 | 0 | 12 | 702 | 1,010 | 555 | 455 | 69 | 1 |
import Control.Monad (liftM)
import Data.List (sort)
import Data.Char (ord)
import Data.List.Split (splitOn)
getAsSortedList :: IO [String]
getAsSortedList =
liftM (sort . concatMap (splitOn ",") . lines . filter (/= '"') ) $ readFile "names.txt"
toNum :: String -> [Int]
toNum = map chst
where
chst x = ord x - 64
main :: IO ()
main = do
l <- getAsSortedList
let tlist = zip l [1..]
rlist = map (\x -> snd x * (sum . toNum $ fst x)) tlist
print $ sum rlist
| ciderpunx/project_euler_in_haskell | euler022.hs | gpl-2.0 | 487 | 1 | 16 | 115 | 234 | 117 | 117 | 16 | 1 |
-- -*- mode: haskell -*-
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module NFA.AI where
import Autolib.NFA.Example
import Autolib.NFA.Type (NFA)
import Autolib.Set
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data AI = AI { name :: String -- abkürzung
, automat :: NFA Char Int
}
deriving ( Typeable )
example :: AI
example =
let sigma = mkSet "abc"
in AI { name = "irgendeine Sprache"
, automat = example_sigma sigma
}
$(derives [makeReader, makeToDoc] [''AI])
instance Show AI where show = render . toDoc
instance Read AI where readsPrec = parsec_readsPrec | Erdwolf/autotool-bonn | src/NFA/AI.hs | gpl-2.0 | 636 | 18 | 10 | 137 | 176 | 103 | 73 | 19 | 1 |
module Ltc.Network.Types (
Hostname, Port
) where
type Hostname = String
type Port = Int
| scvalex/ltc | src/Ltc/Network/Types.hs | gpl-3.0 | 102 | 0 | 4 | 28 | 27 | 18 | 9 | 4 | 0 |
module P08Pizza where
import Library
main :: IO ()
main = do
peeps <- promptNonNegInt "Number of people: "
pizzas <- promptNonNegInt "Number of pizzas: "
spp <- promptNonNegInt "Slices per pizza: "
let (slices, leftover) = (spp * pizzas) `divMod` peeps
putStrLn $ formatPizzas pizzas
++ formatSlices slices
++ formatLeftovers leftover
backwards :: IO ()
backwards = do
peeps <- promptNonNegInt "Number of people : "
slicesPerson <- promptNonNegInt "Slices per person: "
spp <- promptNonNegInt "Slices per pizza: "
let pizzasNeeded = (peeps * slicesPerson) `div` spp
putStrLn $ "For " ++ show peeps ++ " people to have "
++ show slicesPerson ++ " slices each and "
++ show spp ++ " slices per pizza, you need "
++ show pizzasNeeded ++ " pizzas in total"
formatSlices :: Int -> String
formatSlices slices =
case slices of
0 -> " So nobody gets a slice of pizza at all. Boo! And there was"
1 -> " So each person gets a single slice of pizza, with"
_ -> " So each person gets " ++ show slices ++ " slices of pizza, with"
formatPizzas :: Int -> String
formatPizzas pizzas =
case pizzas of
0 -> "There was no pizza. Boo!"
1 -> "There was 1 pizza."
_ -> "There were " ++ show pizzas ++ " pizzas."
formatLeftovers :: Int -> String
formatLeftovers leftover =
case leftover of
0 -> " no slices left over."
1 -> " one slice left over."
_ -> " " ++ show leftover ++ " slices left over."
| ciderpunx/57-exercises-for-programmers | src/P08Pizza.hs | gpl-3.0 | 1,576 | 0 | 15 | 460 | 373 | 181 | 192 | 39 | 3 |
{-# LANGUAGE NoImplicitPrelude #-}
module Bamboo.Theme.MiniHTML5.Control.Comment where
import Bamboo.Model.Comment (Comment)
import Bamboo.Theme.MiniHTML5.Env hiding (body, date, alt, create)
import qualified Bamboo.Theme.MiniHTML5.Atom.Comment as CommentVA
list :: [Comment] -> [Html]
list [] = []
list xs =
[ h2 ! [id "comments"] << "Responses"
, olist ! [theclass "commentlist" ]
<< xs.zip (cycle ["comments-alt", ""]).map (splash styled_entry)
]
where
styled_entry alt x = li ! [theclass alt] << x.CommentVA.entry
create :: State -> Comment -> Html
create = CommentVA.create | nfjinjing/bamboo-theme-mini-html5 | src/Bamboo/Theme/MiniHTML5/Control/Comment.hs | gpl-3.0 | 601 | 1 | 11 | 98 | 203 | 117 | 86 | -1 | -1 |
Store (Store (set a)) (get a) | hmemcpy/milewski-ctfp-pdf | src/content/3.9/code/haskell/snippet11.hs | gpl-3.0 | 29 | 0 | 9 | 5 | 27 | 12 | 15 | -1 | -1 |
-- Copyright (c) 2014 Contributors as noted in the AUTHORS file
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
import Arduino.Uno
import Data.Tuple (swap)
main = compileProgram $ do
setupAlternateBlink pin11 pin12 (createVariableTick a0)
setupAlternateBlink :: GPIO -> GPIO -> Stream a -> Action ()
setupAlternateBlink pin1 pin2 triggerStream = do
output2 (digitalOutput pin1) (digitalOutput pin2) =: alternate triggerStream
where
alternate :: Stream a -> Stream (Bit, Bit)
alternate = foldpS2Tuple (\_ -> swap) (bitLow, bitHigh)
createVariableTick :: AnalogInput -> Stream ()
createVariableTick limitInput = accumulator limitStream timerDelta
where
limitStream :: Stream Arduino.Uno.Word
limitStream = analogRead limitInput ~> mapS analogToLimit
analogToLimit :: Expression Arduino.Uno.Word -> Expression Arduino.Uno.Word
analogToLimit analog = 1000 + analog * 20
| frp-arduino/frp-arduino | examples/FrequencyBlink.hs | gpl-3.0 | 1,536 | 0 | 10 | 290 | 258 | 137 | 121 | 15 | 1 |
-- * Header
{- GHC version 7.8.3
Author: Florent Balestrieri
-}
-- ** Ghc options
{-# LANGUAGE
MultiParamTypeClasses
, FlexibleContexts
, FlexibleInstances
, TypeOperators
, UndecidableInstances
, OverlappingInstances
, NoMonomorphismRestriction
, GADTs
, TypeFamilies
, LambdaCase
, RankNTypes
, EmptyCase
, RecordWildCards
, ConstraintKinds
, PolyKinds
, DataKinds
#-}
-- ** Module
module Language.Grammars.AGalacarte.Core where
-- ** Import
import Language.Grammars.AGalacarte.Prelude
import Language.Grammars.AGalacarte.Proxy
import Language.Grammars.AGalacarte.DependentTypes
import Language.Grammars.AGalacarte.Indexed
import Language.Grammars.AGalacarte.IndexedContainer
import Language.Grammars.AGalacarte.Product
import Language.Grammars.AGalacarte.Attribute
import Control.Applicative (liftA2)
-- * Attribute Grammars
-- ** Fragments
{- Local attributes
for a node whose children are at positions "pos"
the attribute of the node is of type "node"
the attribute of its children is of type "child"
NOTE: with memoised containers, there is no need for a
datatype LocAttrs (we can use pairs), but here, since (pos
:-> child) is higher rank, we would need impredicative types
to use a pair instead of LocAttrs.
-}
data LocAttrs child_index node_type child_type n =
LocAttrs (node_type n) (Prod child_index child_type)
mapLocAttrs :: (n :-> n') -> (c :-> c') ->
LocAttrs p n c :-> LocAttrs p n' c'
mapLocAttrs nf cf (LocAttrs n pc) = LocAttrs (nf n) (cf `pmap` pc)
{- to be used with an indexed type like (Rec af)
-}
mapLocAttrs2 :: (a :--> a') ->
LocAttrs p (a i) (a s) :-> LocAttrs p (a' i) (a' s)
mapLocAttrs2 m = mapLocAttrs m m
type IFrag c a af n = forall p.
c p n -> LocAttrs p (Rec a c I) (Rec a c S) n
-> LocAttrs p (RecF af a c S) (RecF af a c I) n
-- "r" is the index of the root type
data Frag c l l' r = Frag
{ frule :: forall n . IFrag c l l' n
, froot :: Rec l c S r -> RecF l' l c I r
}
-- ** AG
type AGRule c i s r n = forall p.
c p n -> LocAttrs p i s n -> LocAttrs p s i n
{- A complete attribute grammar -}
data AG c i s r = AG
{ grule :: forall n . AGRule c i s r n
, groot :: s r -> i r
}
type AG' c a r = AG c (Rec a c I) (Rec a c S) r
-- Universal AG: can be run on any type of the family
newtype UAG c i s = UAG {fromIAG :: forall r . AG c i s r}
ag :: Frag c a a r -> AG' c a r
ag Frag{..} = AG
{ grule = \c a -> mapLocAttrs SynRec InhRec $ frule c a
, groot = InhRec . froot
}
algAG ::
AG c i s r -> Alg c (IFun i s)
algAG (AG{..}) (c :- pf) = IFun $ \i ->
let LocAttrs s pi
= grule c $ LocAttrs i $ pf `papp` pi
in s
runAG :: AG c i s r -> Expr c r -> s r
runAG g@(AG{..}) t = s
where
s = cata (algAG g) t `appIFun` groot s
-- ** Gluing Fragments
{- we can combine two fragments if their parameters are the same
each of them must define distinct attributes.
The projection for the attributes are automatically defined for tuples.
We must ensure that the tuples have only attributes on the first components.
(right parenthesizing).
The order in which we glue fragment doesn't matter. The
attribute list would be equal up a permutation, and running
the AG would be equivalent up to observations.
-}
glue :: (AppendRecF f1 f2) =>
Frag c a f1 r ->
Frag c a f2 r ->
Frag c a (f1 :++ f2) r
glue (Frag g1 r1) (Frag g2 r2) = Frag
{ frule = \ c i_ss ->
let LocAttrs s1 i1 = g1 c i_ss
LocAttrs s2 i2 = g2 c i_ss
in LocAttrs (append s1 s2) (plift2 append i1 i2)
, froot = \s -> r1 s `append` r2 s
}
-- ** Syn and Inh rules
-- *** Proxies for rule parameters
rule_container = p1
rule_record = p2
rule_aspect = p3
rule_label = p4
rule_constructor = p5
rule_child = p6 -- only irule
rule_childdom = child_dom . rule_constructor
rule_index = constr_index . rule_constructor
-- *** Attribute Projections
-- **** label ! child
type ProjPos d v a = forall l n m .
( IsNat m
, ListAt v m ~ n
, GetAttr a l S d n
) =>
l -> P m -> Type l a d n
projPos :: Prod p (Rec a d S) -> ProjPos d p a
projPos s l p = get l (pget p s)
-- **** child ! label
{- We provide a flipped version of children attribute projection
because some users might prefer the syntax child ! attribute
rather than the symmetric.
-}
type FlippedProjPos d v a = forall l n m .
( IsNat m
, ListAt v m ~ n
, GetAttr a l S d n
) =>
P m -> l -> Type l a d n
unflipProjPos :: (a :# d :# p :#N) -> FlippedProjPos d p a -> ProjPos d p a
unflipProjPos _ (!) l p = p ! l
flipProjPos :: (a :# d :# p :#N) -> ProjPos d p a -> FlippedProjPos d p a
flipProjPos _ (!) p l = l ! p
-- ***** Using rule proxies
flipP x = (rule_record x :# rule_container x :# rule_childdom x :#N)
flipProjPosP x = flipProjPos $ flipP x -- eta expansion is necessary
unflipProjPosP x = unflipProjPos $ flipP x
-- **** getInherited label
type ProjInh d a n = forall l .
(GetAttr a l I d n) => l -> Type l a d n
projInh :: Rec a d I n -> ProjInh d a n
projInh i l = get l i
-- *** SRule
-- **** Methods types: SynV, SynC
type SynV c d a l =
Rec a d I (Index c) ->
c ->
Prod (ChildDom c) (Rec a d S) ->
(Type l a d (Index c))
-- curried!
type SynC c d a l =
Rec a d I (Index c) ->
c ->
Curry (ChildDom c) (Rec a d S)
(Type l a d (Index c))
-- **** Contexts
{- We want "Constructor c" and "SynAttr t" in the class
context because we want the user to be warned when they
haven't defined an instance for their attributes, without
those constraint, the error would shows up later and would be
much harder to understand than a missing instance.
If we put the other constraints in the context of the method rather
than the class, that's because we don't want to have to write
them all the time when defining an instance.
-}
type SRuleCtxt l c =
( Constructor c
, SynAttr l)
type SRuleMethodCtxt l c d a =
( GetAttr a l S d (Index c)
, c :< d)
-- **** SRule
class SRuleCtxt l c => SRule d a (g :: *) l c where
srule :: SRuleMethodCtxt l c d a => d :# a :# g :# l :# c :#N -> SynC c d a l
srulev :: SRuleMethodCtxt l c d a => d :# a :# g :# l :# c :#N -> SynV c d a l
srule x i c = prod_curry $ srulev x i c
srulev x i c = prod_uncurry $ srule x i c
-- useful to call a rule from another namespace
with_namespace ::
d :# a :# g :# l :# c :#N ->
g' ->
d :# a :# g' :# l :# c :#N
with_namespace = cst2 proxies
-- when a namespace is parameterised with another namespace
with_super :: d :# a :# g s :# l :# c :#N ->
d :# a :# s :# l :# c :#N
with_super = const proxies
-- **** Choosing between defined and trivial cases
{- we must choose between importing a SRule and a trivial case
according to the type "x" representing "AttrFam l (Index c)"
-}
data SRuleChoice d a g l c where
SRuleDefined :: (AttrDefined l (Index c), SRule d a g l c
) => SRuleChoice d a g l c
SRuleTrivial :: (AttrTrivial l (Index c)
) => SRuleChoice d a g l c
class (x ~ AttrFam l (Index c), n ~ Index c)
=> SRuleChoose x n d a g l c where
srule_choose :: SRuleChoice d a g l c
instance (AttrTrivial l n, n ~ Index c)
=> SRuleChoose (Const3 ()) n d a g l c where
srule_choose = SRuleTrivial
instance (AttrDefined l n, SRule d a g l c, n ~ Index c)
=> SRuleChoose (Type' n) n d a g l c where
srule_choose = SRuleDefined
-- **** Syn Type
{- Defining a synthesized attribute alone. -}
type Syn c d a l = forall p n.
c p n -> Rec a d I n -> Prod p (Rec a d S) -> Attr l a d S n
-- ***** Conversion
{- "syn" builds a singleton attribute -}
syn :: (SynAttr l, Container c) => Syn c c a l -> Frag c a '[l] r
syn srule = Frag
{ frule = \c (LocAttrs i ss)
-> LocAttrs (srule c i ss :& X)
(ppure (container c) $ AttrDual :& X)
, froot = const (AttrDual :& X)
}
-- **** SRule' class
type SRuleT' g l c d a = g :# d :#N -> Syn c d a l
class (SynAttr l, a :=> l) => SRule' l c d a (g :: *) where
srule' :: SRuleT' g l c d a
instance ( SRuleChoose (AttrFam l (Index c)) (Index c) d a g l c
, SRuleCtxt l c
, c :< d
, a :=> l
, AttrCaseOf l (Index c)
) => SRule' l (C c) d a g where
srule' = res $ \x _ (C c) i s -> case detP (chooseP x) srule_choose of
SRuleDefined -> attr $ prod_uncurry (srule x i c) s
SRuleTrivial -> attrTrivial
where
res :: Cont (SRuleT' g l (C c) d a) (d :# a :# g :# l :# c :#N)
res c = c proxies -- Eta expansion is necessary for typechecking
chooseP :: (d :# a :# g :# l :# c :#N) -> P (SRuleChoice d a g l c)
chooseP _ = P
instance (SRule' l c d a g, SRule' l c' d a g)
=> SRule' l (c :+: c') d a g where
srule' = liftA2 sumElim srule' srule'
instance (SynAttr l, a :=> l) => (SRule' l EmptyC d a g) where
srule' _ = \case{}
-- *** IRule
-- **** Methods types: InhV, InhC
type InhV c d a l k =
( ListAt (ChildDom c) k ~ n
, GetAttr a l I d n
) =>
Rec a d I (Index c) ->
c ->
Prod (ChildDom c) (Rec a d S) ->
(Type l a d n)
type InhC c d a l k =
( ListAt (ChildDom c) k ~ n
, GetAttr a l I d n
) =>
Rec a d I (Index c) ->
c ->
Curry (ChildDom c) (Rec a d S)
(Type l a d n)
-- **** IRule Context
type IRuleCtxt l c =
( Constructor c
, InhAttr l)
-- **** IRule class
class (IRuleCtxt l c) => IRule d a (g :: *) l c (k :: Nat) where
irule :: (c :< d) => d :# a :# g :# l :# c :# k :#N -> InhC c d a l k
irulev :: (c :< d) => d :# a :# g :# l :# c :# k :#N -> InhV c d a l k
irule x i c = prod_curry $ irulev x i c
irulev x i c = prod_uncurry $ irule x i c
-- **** Complete IRule with trivial cases
type ChildAttrFam l c k = AttrFam l (ListAt (ChildDom c) k)
type ChildAttrTrivial l c k = AttrTrivial l (ListAt (ChildDom c) k)
type ChildAttrDefined l c k = AttrDefined l (ListAt (ChildDom c) k)
type ChildAttrCaseOf l c k = AttrCaseOf l (ListAt (ChildDom c) k)
data IRuleChoice d a g l c k where
IRuleTrivial ::
( ChildAttrTrivial l c k
, ChildAttrCaseOf l c k) => IRuleChoice d a g l c k
IRuleDefined ::
( ChildAttrDefined l c k
, IRule d a g l c k
, ChildAttrCaseOf l c k) => IRuleChoice d a g l c k
class ( x ~ ChildAttrFam l c k, ChildAttrCaseOf l c k
) => IRuleMatch x d a g l c (k :: Nat) where
irule_match :: IRuleChoice d a g l c k
instance ( ChildAttrTrivial l c k
, ChildAttrCaseOf l c k
) => IRuleMatch (Const3 ()) d a g l c k where
irule_match = IRuleTrivial
instance ( ChildAttrDefined l c k
, IRule d a g l c k
, ChildAttrCaseOf l c k
, n ~ ListAt (ChildDom c) k
) => IRuleMatch (Type' n) d a g l c k where
irule_match = IRuleDefined
-- ***** IRuleChoose
{- We must make sure "k" is used only once in the parameter
list, because we're going to quantify over it using the
Forall constraint family -}
class IRuleChoose d a g l c (k :: Nat) where
irule_choose :: IRuleChoice d a g l c k
instance (IRuleMatch (ChildAttrFam l c k) d a g l c k)
=> IRuleChoose d a g l c k where
irule_choose = irule_match
irule_choose_from_dict :: IsNat k =>
Dict2 (IRuleChoose d a g l c) k ->
IRuleChoice d a g l c k
irule_choose_from_dict Dict2 = irule_choose
-- **** Inh type
{- Inh defines the rules for the whole inherited attribute family -}
type Inh c d a l = forall p m n k.
c p m ->
Rec a d I m ->
Prod p (Rec a d S) ->
Prod p (Attr l a d I)
-- ***** Conversions
{- "inh" builds a singleton attribute, from a Inh rule and an
initialisation function. -}
inh :: (InhAttr l) =>
Inh c c a l -> (Rec a c S r -> Attr l a c I r) -> Frag c a '[l] r
inh irule init = Frag
{ frule = \c (LocAttrs i ss) ->
LocAttrs (AttrDual :& X) ((:& X) `pmap` irule c i ss)
, froot = (:& X) . init
}
{- inh' does the job of inh with a constant initialisation value -}
inh' :: (InhAttr l) =>
Inh c c a l -> Attr l a c I r -> Frag c a '[l] r
inh' irule init = inh irule (const init)
-- **** IRule' class
type IRuleT l c d a g = g :# d :#N -> Inh c d a l
class (InhAttr l, a :=> l) => IRule' l c d a (g :: *) where
irule' :: IRuleT l c d a g
instance ( IRuleCtxt l c
, Forall (Range (ChildDom c))
(IRuleChoose d a g l c)
, a :=> l
, c :< d
) => IRule' l (C c) d a g where
irule' gd (C c) i s = ptabT (is_list $ child_dom' c) $
\k -> res c (k :# gd) $
\(chooseP :# choiceP :# dom :# y) ->
case detP choiceP $ irule_choose_from_dict $ pgetDP k dom
$ forallD chooseP $ range_is_listP dom of
IRuleDefined -> attr $ prod_uncurry (irule y i c) s
IRuleTrivial -> attrTrivial
where
res :: c -> k :# g :# d :#N ->
Cont (Attr l a d I (ListAt (ChildDom c) k))
( IRuleChoose d a g l c :#
IRuleChoice d a g l c k :#
ChildDom c :#
d :# a :# g :# l :# c :# k :#N)
res = cst2 ($ proxies)
instance (IRule' l c d a g, IRule' l c' d a g)
=> IRule' l (c :+: c') d a g where
irule' = liftA2 sumElim irule' irule'
instance (InhAttr l, a :=> l) => (IRule' l EmptyC d a g) where
irule' _ = \case{}
-- ** Default rule instances, importing rules
{- the default SRule and IRule can import rules from other grammars
The import list is given by a type family "Import g"
The default grammar, by a type family "Default g"
The import list allows to pick and choose rules from
different grammars, by naming the attributes explicitely,
therefore, only a finite number of rules can be imported this
way. On the other hand, the default grammar will be resorted
to when all the other possibilities were exhausted: no rule
defined for the label in the current grammar or any of the
imported list. Note that if the default grammar is extended
with new rules, they would also benefit all the grammars that
use it by default. -}
-- *** Import and Default
{- all the rules of "Default g" are imported In particular,
the grammar "Copy" implements the automatic propagation of
inherited attributes from parents to children if the
attribute has the same type for both.
-}
type family Default g
-- *** Import specification
-- we can import rules matching an attribute or a constructor
data ImportItem aspect list
= IA aspect list
| IC aspect list
| IAC aspect list list
type Match_Attributes = IA
type Match_Constructors = IC
type Match_Both = IAC
aspectIA :: P (IA g a ': xs) -> P g
aspectIC :: P (IC g a ': xs) -> P g
aspectIAC :: P (IAC g a b ': xs) -> P g
tailIA ::
P (IA g (l ': ls) ': xs) ->
P (IA g ls ': xs)
tailIC ::
P (IC g (c ': cs) ': xs) ->
P (IC g cs ': xs)
tailIAC_A ::
P (IAC g (l ': ls) cs ': xs) ->
P (IAC g ls cs ': xs)
tailIAC_C ::
P (IAC g ls (c ': cs) ': xs) ->
P (IAC g ls cs ': xs)
aspectIA = const P
aspectIC = const P
aspectIAC = const P
tailIA = const P
tailIC = const P
tailIAC_A = const P
tailIAC_C = const P
type family Import g :: [ImportItem * [*]]
importP :: P g -> P (Import g)
importP = const P
importAllP :: P g -> P (Default g)
importAllP = const P
-- *** Union of SRule and IRule
{- The import list works for both IRule and SRule, so we
package their methods in a union type.
The type "n" is irrelevant for SRule, we use Zero in the instance.
-}
data Rule s l n c d a where
RuleS :: SynC c d a l -> Rule S l n c d a
RuleI :: InhC c d a l n -> Rule I l n c d a
-- *** ImportList
class (Attribute l, Mode l ~ s, Constructor c
) => ImportList (xs :: [ImportItem * [*]])
i s l n c d a where
import_list ::
xs :# i :# s :# l :# n :# c :# d :# a :#N ->
Rule s l n c d a
-- **** SRules
-- ***** Attribute for every constructor
-- "l" matches
instance ( SRuleCtxt l c
, SRuleMethodCtxt l c d a
, SRule d a g l c
) => ImportList (IA g (l ': ls) ': xs) i S l n c d a where
import_list (xs :# i :# s :# l :# n :# c :# d :# a :#N)
= RuleS $ srule (d :# a :# aspectIA xs :# l :# c :#N)
-- ***** Constructor for every attribute
-- "c" matches
instance ( SRuleCtxt l c
, SRuleMethodCtxt l c d a
, SRule d a g l c
) => ImportList (IC g (c ': cs) ': xs) i S l n c d a where
import_list (xs :# i :# s :# l :# n :# c :# d :# a :#N)
= RuleS $ srule (d :# a :# aspectIC xs :# l :# c :#N)
-- ***** Cartesian product of attribute and constructor lists
-- "c" and "l" match.
instance ( SRuleCtxt l c
, SRuleMethodCtxt l c d a
, SRule d a g l c
) => ImportList (IAC g (l ': ls) (c ': cs) ': xs) i S l n c d a where
import_list (xs :# i :# s :# l :# n :# c :# d :# a :#N)
= RuleS $ srule (d :# a :# aspectIAC xs :# l :# c :#N)
-- ***** Default Import
instance ( SRuleCtxt l c
, SRuleMethodCtxt l c d a
, SRule d a g l c
) => ImportList '[] g S l n c d a where
import_list (xs :# g :# s :# l :# n :# c :# d :# a :#N)
= RuleS $ srule (d :# a :# g :# l :# c :#N)
-- **** IRule
-- ***** Attribute for every constructor
-- "l" matches
instance ( IRuleCtxt l c
, c :< d
, IRule d a g l c n
) => ImportList (IA g (l ': ls) ': xs) i I l n c d a where
import_list (xs :# i :# s :# l :# n :# c :# d :# a :#N)
= RuleI $ irule (d :# a :# aspectIA xs :# l :# c :# n :#N)
-- ***** Constructor for every attribute
-- "c" matches
instance ( IRuleCtxt l c
, c :< d
, IRule d a g l c n
) => ImportList (IC g (c ': cs) ': xs) i I l n c d a where
import_list (xs :# i :# s :# l :# n :# c :# d :# a :#N)
= RuleI $ irule (d :# a :# aspectIC xs :# l :# c :# n :#N)
-- ***** Cartesian product of attribute and constructor lists
-- "c" and "l" match.
instance ( IRuleCtxt l c
, c :< d
, IRule d a g l c n
) => ImportList (IAC g (l ': ls) (c ': cs) ': xs) i I l n c d a where
import_list (xs :# i :# s :# l :# n :# c :# d :# a :#N)
= RuleI $ irule (d :# a :# aspectIAC xs :# l :# c :# n :#N)
-- ***** Default import
instance ( IRuleCtxt l c
, c :< d
, IRule d a g l c n
) => ImportList '[] g I l n c d a where
import_list (xs :# g :# s :# l :# n :# c :# d :# a :#N)
= RuleI $ irule (d :# a :# g :# l :# c :# n :#N)
-- **** Iterating through the lists
-- ***** Next attribute of an IA list
instance ( ImportList (IA g ls ': xs) i s l n c d a
) => ImportList (IA g (l' ': ls) ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailIA xs :# rest)
-- ***** Next constructor of an IC list
instance ( ImportList (IC g cs ': xs) i s l n c d a
) => ImportList (IC g (c' ': cs) ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailIC xs :# rest)
-- ***** Next attribute of an IAC list
instance ( ImportList (IAC g ls (c ': cs) ': xs) i s l n c d a
) => ImportList (IAC g (l' ': ls) (c ': cs) ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailIAC_A xs :# rest)
-- ***** Next constructor of an IAC list
instance ( ImportList (IAC g (l' ': ls) cs ': xs) i s l n c d a
) => ImportList (IAC g (l' ': ls) (c' ': cs) ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailIAC_C xs :# rest)
-- ***** Next item in the import list
-- ****** IA: Empty list
instance ( ImportList xs i s l n c d a
) => ImportList (IA g '[] ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailP xs :# rest)
-- ****** IC: Empty list
instance ( ImportList xs i s l n c d a
) => ImportList (IC g '[] ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailP xs :# rest)
-- ****** IAC: Empty attribute list
instance ( ImportList xs i s l n c d a
) => ImportList (IAC g '[] (c' ': cs) ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailP xs :# rest)
-- ****** IAC: Empty constructor list
instance ( ImportList xs i s l n c d a
) => ImportList (IAC g (l' ': ls) '[] ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailP xs :# rest)
-- ****** IAC: Both empty lists
instance ( ImportList xs i s l n c d a
) => ImportList (IAC g '[] '[] ': xs) i s l n c d a where
import_list (xs :# rest)
= import_list (tailP xs :# rest)
-- *** Default SRule
instance (ImportList (Import g) (Default g) S l Zero c d a)
=> SRule d a g l c where
srule (d :# a :# g :# l :# c :#N) =
case import_list $ importP g :# importAllP g :# synthesizedP
:# l :# n0 :# c :# d :# a :#N
of RuleS rule -> rule
-- *** Default IRule
instance (ImportList (Import g) (Default g) I l n c d a)
=> IRule d a g l c n where
irule (d :# a :# g :# l :# c :# n :#N) =
case import_list $ importP g :# importAllP g :# inheritedP
:# l :# n :# c :# d :# a :#N
of RuleI rule -> rule
-- ** Building fragments
{- we design a generic operator that builds a fragment from
an attribute label, it works with three different types:
a SRule needs just the attribute label
an IRule needs to initialise the attribute for the root,
either with a function from the synthesized attribute record
or with a constant.
-}
-- *** Rules
-- Careful, unnecessarily kind polymorphic
class EmptyRule c d a g where
instance EmptyRule c d a g where
-- Product of grammar rules
class (r1 c d a g, r2 c d a g) => PairRules r1 r2 c d a g where
instance (r1 c d a g, r2 c d a g) => PairRules r1 r2 c d a g where
type family Rules (i :: [*]) (s :: [*]) where
Rules (i ': ii) s = IRule' i `PairRules` Rules ii s
Rules '[] (s ': ss) = SRule' s `PairRules` Rules '[] ss
Rules '[] '[] = EmptyRule
-- **** lemmas
{- Those lemmas are used in the definition of pglue and (|->) -}
-- ***** Bringing together inherited and synthesized rules
rules_pair ::
Rules i '[] c d a g =>
Rules '[] s c d a g =>
s :# c :# d :# a :# g :#N ->
TList i ->
Dict (Rules i s c d a g)
rules_pair p TNil = Dict
rules_pair p (TCons _ i) =
case rules_pair p i of Dict -> Dict
-- ***** Splitting inherited and synthesized rules
rules_split ::
Rules i s c d a g =>
s :# c :# d :# a :# g :#N ->
TList i ->
( Dict (Rules '[] s c d a g)
, Dict (Rules i '[] c d a g))
rules_split p TNil = (Dict, Dict)
rules_split p (TCons _ i) =
case rules_split p i of (Dict, Dict) -> (Dict, Dict)
-- ***** (S) splitting the rules of an appended list
rules_append_s ::
Rules '[] (s1 :++ s2) c d a g =>
s2 :# c :# d :# a :# g :#N ->
TList s1 ->
( Dict (Rules '[] s1 c d a g)
, Dict (Rules '[] s2 c d a g))
rules_append_s p TNil = (Dict, Dict)
rules_append_s p (TCons _ s1) =
case rules_append_s p s1 of
(Dict, Dict) -> (Dict, Dict)
-- ***** (I) splitting the rules of an appended list
rules_append_i ::
Rules (i1 :++ i2) '[] c d a g =>
i2 :# c :# d :# a :# g :#N ->
TList i1 ->
( Dict (Rules i1 '[] c d a g)
, Dict (Rules i2 '[] c d a g))
rules_append_i p TNil = (Dict, Dict)
rules_append_i p (TCons _ i1) =
case rules_append_i p i1 of
(Dict, Dict) -> (Dict, Dict)
-- ***** Parallel append
{- we first split, append separately inherited an synthesized rules
and then combine.
-}
rules_append ::
Rules (i1 :++ i2) (s1 :++ s2) c d a g =>
i1 :# i2 :# s1 :# s2 :# c :# d :# a :# g :#N ->
TList i1 ->
TList i2 ->
TList s1 ->
( Dict (Rules i1 s1 c d a g)
, Dict (Rules i2 s2 c d a g))
rules_append (i1' :# i2' :# s1' :# s2' :# r) i1 i2 s1 =
case rules_split (appendP s1' s2' :# r) (appendT i1 i2) of
(Dict,Dict) ->
case ( rules_append_i (i2' :# r) i1
, rules_append_s (s2' :# r) s1) of
((Dict,Dict),(Dict,Dict)) ->
( rules_pair (s1' :# r) i1
, rules_pair (s2' :# r) i2)
-- *** PFrag
{-
- i :: list of attribute labels that must have a IRule instance
- s :: list of attribute labels that must have a SRule instance
- c :: container of the expression on which the grammar is ultimately run
- a :: attribute record on which the rules of i and s lists must apply
- r :: the renamed attribute record which will be used in practice
- f :: the attributes computed by this fragment
- n :: the non-terminal of the expression on which to run this fragment.
-}
newtype PFrag i s c a r f n = PFrag {fromPFrag :: forall g .
Rules i s c c a g => P g -> Frag c r f n}
{- In the library, the only way to use a PFrag is to execute the grammar.
Therefore we keep universal quantification for all fragments,
only when executing the grammar do we need to give the proxy.
-}
run :: (Rules i s c c a' g) =>
g -> PFrag i s c a' a a n -> Expr c n -> Rec a c 'S n
run g f = runAG $ ag $ fromPFrag f (proxy g)
pfrag_syn :: (SynAttr l, Container c) =>
l -> PFrag '[] '[l] c a a '[l] n
pfrag_syn l = PFrag $ \ g -> syn $ srule' $ g :# proxies
pfrag_inh :: (InhAttr l) =>
(Rec a c S n -> Attr l a c I n) ->
PFrag '[l] '[] c a a '[l] n
pfrag_inh init = PFrag $ \g -> inh (irule' $ g :# proxies) init
-- *** MkFrag
type family MkFragT t c a r n where
MkFragT (PFrag i s c a r l n) c a r n = (PFrag i s c a r l n)
MkFragT (Rec a c S n -> Attr l a c I n) c a a n = PFrag '[l] '[] c a a '[l] n
MkFragT (Attr l a c I n) c a a n = PFrag '[l] '[] c a a '[l] n
MkFragT l c a a n = PFrag '[] '[l] c a a '[l] n
class MkFrag t c a r n where
frag :: (MkFragT t c a r n ~ PFrag i s c a r l n)
=> t -> PFrag i s c a r l n
-- Thanks to this instance, we use only one glue operator: "&"
instance (MkFragT (PFrag i s c a r l n) c a r n
~ PFrag i s c a r l n
, c ~ c', a ~ a', r ~ r', n ~ n'
) => MkFrag (PFrag i s c a r l n) c' a' r' n' where
frag = id
instance ( SynAttr l
, Container c
, MkFragT l c a a n ~ PFrag '[] '[l] c a a '[l] n
, a ~ a'
) => MkFrag l c a a' n where
frag = pfrag_syn
instance ( InhAttr l
, MkFragT (Rec a c S n -> Attr l a c I n) c a a n
~ PFrag '[l] '[] c a a '[l] n
, x ~ Rec a c S n, c' ~ c, a' ~ a, a ~ a'', n' ~ n, s ~ S, i ~ I
) => MkFrag (x -> Attr l a' c' i n') c a a'' n where
frag = pfrag_inh
instance ( InhAttr l
, MkFragT (Attr l a c I n) c a a n
~ PFrag '[l] '[] c a a '[l] n
, c' ~ c, a' ~ a, a'' ~ a, n' ~ n, s ~ I)
=> MkFrag (Attr l a' c' s n') c a a'' n where
frag = pfrag_inh . const
pglue ::
( AppendRecF f1 f2
, IsList i1, IsList i2, IsList s1) =>
PFrag i1 s1 c a r f1 n ->
PFrag i2 s2 c a r f2 n ->
PFrag (i1 :++ i2) (s1 :++ s2) c a r (f1 :++ f2) n
pglue p1@(PFrag f1) p2@(PFrag f2) = PFrag $ \ g ->
let p@(i1 :# i2 :# s1 :# _) = prox p1 p2 g
in case rules_append p (is_list i1) (is_list i2) (is_list s1) of
(Dict,Dict) -> f1 g `glue` f2 g
where
prox ::
PFrag i1 s1 c a r f1 n ->
PFrag i2 s2 c a r f2 n ->
P g -> (i1 :# i2 :# s1 :# s2 :# c :# c :# a :# g :#N)
prox = cst3 proxies
infix 2 `as`, `asAttr`, `with`, `withAttr`
infixr 1 &
x & y = frag x `pglue` frag y
asAttr :: Attr l a c s n -> l -> Attr l a c s n
x `asAttr` t = x
as ::
( AttrCaseOf l n
, AttrDefined l n
) =>
Type l a c n -> l -> Attr l a c (Mode l) n
x `as` t = attr x `asAttr` t
with = flip as
withAttr = flip asAttr
-- ** Renaming attributes
{- To reuse a fragment more than once in a AG, we must rename
the attributes it uses and produces. Otherwise the leftmost
in the attribute record will override its homonyms.
-}
rename :: (RecBifunctor f', Container c) =>
(Rec a' :~~~> Rec a) ->
(Rec a :~~~> Rec a') ->
(RecF f :~~~~> RecF f') ->
Frag c a f r ->
Frag c a' f' r
rename from_a to_a to_f (Frag rule root) = Frag
{ frule = \c -> mapLocAttrs2 (rmap from_a to_a . to_f)
. rule c . mapLocAttrs2 from_a
, froot = rmap from_a to_a . to_f . root . from_a
}
{- When working with the SRule and IRule classes, we don't
have access to the concrete attribute record type. The following
function allows to rename exactly one attribute.
-}
type RenameCtxt l l' r r' f f' =
( ModifyBothWays l l' r r'
, Modify l l' f f'
, Attribute l
, Attribute l'
, Mode l ~ Mode l'
, RecBifunctor r
, RecBifunctor r'
, RecBifunctor f'
, Exchange l l')
rename1 :: (RenameCtxt l l' r r' f f', Container c) =>
l -> l' ->
Frag c r f :->
Frag c r' f'
rename1 l l' =
rename (modifyRec (coerceAttr to) (coerceAttr from))
(modifyRec (coerceAttr from) (coerceAttr to))
(modify (coerceAttr to))
where
to = l # l' # N
from = l' # l # N
infix 9 |->
(|->) :: (RenameCtxt l l' r r' f f', Container c) =>
l -> l' ->
PFrag i s c a r f :->
PFrag i s c a r' f'
(|->) l l' (PFrag f) = PFrag $ rename1 l l' . f
renaming = flip ($)
{- note: (.) is exported with fixity 9 by the prelude, we must lower it.
to avoid parentheses around |-> pairs when composing them.
This is done as a local binding in the following example: -}
renaming_example f a b c d m n =
f `renaming` (a |-> b . c |-> d . m |-> n)
where infixr 8 .
(.) = (Prelude..)
| balez/ag-a-la-carte | Language/Grammars/AGalacarte/Core.hs | gpl-3.0 | 28,989 | 2 | 20 | 8,347 | 11,761 | 6,121 | 5,640 | -1 | -1 |
module Java2js.Convert where
convert =undefined
| ledyba/java.js | lib/Java2js/Convert.hs | gpl-3.0 | 50 | 0 | 4 | 7 | 11 | 7 | 4 | 2 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.ConsentStores.UserDataMAppings.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the User data mappings in the specified consent store.
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.consentStores.userDataMappings.list@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.ConsentStores.UserDataMAppings.List
(
-- * REST Resource
ProjectsLocationsDataSetsConsentStoresUserDataMAppingsListResource
-- * Creating a Request
, projectsLocationsDataSetsConsentStoresUserDataMAppingsList
, ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList
-- * Request Lenses
, pldscsudmalParent
, pldscsudmalXgafv
, pldscsudmalUploadProtocol
, pldscsudmalAccessToken
, pldscsudmalUploadType
, pldscsudmalFilter
, pldscsudmalPageToken
, pldscsudmalPageSize
, pldscsudmalCallback
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.consentStores.userDataMappings.list@ method which the
-- 'ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList' request conforms to.
type ProjectsLocationsDataSetsConsentStoresUserDataMAppingsListResource
=
"v1" :>
Capture "parent" Text :>
"userDataMappings" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListUserDataMAppingsResponse
-- | Lists the User data mappings in the specified consent store.
--
-- /See:/ 'projectsLocationsDataSetsConsentStoresUserDataMAppingsList' smart constructor.
data ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList =
ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList'
{ _pldscsudmalParent :: !Text
, _pldscsudmalXgafv :: !(Maybe Xgafv)
, _pldscsudmalUploadProtocol :: !(Maybe Text)
, _pldscsudmalAccessToken :: !(Maybe Text)
, _pldscsudmalUploadType :: !(Maybe Text)
, _pldscsudmalFilter :: !(Maybe Text)
, _pldscsudmalPageToken :: !(Maybe Text)
, _pldscsudmalPageSize :: !(Maybe (Textual Int32))
, _pldscsudmalCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldscsudmalParent'
--
-- * 'pldscsudmalXgafv'
--
-- * 'pldscsudmalUploadProtocol'
--
-- * 'pldscsudmalAccessToken'
--
-- * 'pldscsudmalUploadType'
--
-- * 'pldscsudmalFilter'
--
-- * 'pldscsudmalPageToken'
--
-- * 'pldscsudmalPageSize'
--
-- * 'pldscsudmalCallback'
projectsLocationsDataSetsConsentStoresUserDataMAppingsList
:: Text -- ^ 'pldscsudmalParent'
-> ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList
projectsLocationsDataSetsConsentStoresUserDataMAppingsList pPldscsudmalParent_ =
ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList'
{ _pldscsudmalParent = pPldscsudmalParent_
, _pldscsudmalXgafv = Nothing
, _pldscsudmalUploadProtocol = Nothing
, _pldscsudmalAccessToken = Nothing
, _pldscsudmalUploadType = Nothing
, _pldscsudmalFilter = Nothing
, _pldscsudmalPageToken = Nothing
, _pldscsudmalPageSize = Nothing
, _pldscsudmalCallback = Nothing
}
-- | Required. Name of the consent store to retrieve User data mappings from.
pldscsudmalParent :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList Text
pldscsudmalParent
= lens _pldscsudmalParent
(\ s a -> s{_pldscsudmalParent = a})
-- | V1 error format.
pldscsudmalXgafv :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList (Maybe Xgafv)
pldscsudmalXgafv
= lens _pldscsudmalXgafv
(\ s a -> s{_pldscsudmalXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldscsudmalUploadProtocol :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList (Maybe Text)
pldscsudmalUploadProtocol
= lens _pldscsudmalUploadProtocol
(\ s a -> s{_pldscsudmalUploadProtocol = a})
-- | OAuth access token.
pldscsudmalAccessToken :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList (Maybe Text)
pldscsudmalAccessToken
= lens _pldscsudmalAccessToken
(\ s a -> s{_pldscsudmalAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldscsudmalUploadType :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList (Maybe Text)
pldscsudmalUploadType
= lens _pldscsudmalUploadType
(\ s a -> s{_pldscsudmalUploadType = a})
-- | Optional. Restricts the User data mappings returned to those matching a
-- filter. The following syntax is available: * A string field value can be
-- written as text inside quotation marks, for example \`\"query text\"\`.
-- The only valid relational operation for text fields is equality (\`=\`),
-- where text is searched within the field, rather than having the field be
-- equal to the text. For example, \`\"Comment = great\"\` returns messages
-- with \`great\` in the comment field. * A number field value can be
-- written as an integer, a decimal, or an exponential. The valid
-- relational operators for number fields are the equality operator
-- (\`=\`), along with the less than\/greater than operators (\`\<\`,
-- \`\<=\`, \`>\`, \`>=\`). Note that there is no inequality (\`!=\`)
-- operator. You can prepend the \`NOT\` operator to an expression to
-- negate it. * A date field value must be written in \`yyyy-mm-dd\` form.
-- Fields with date and time use the RFC3339 time format. Leading zeros are
-- required for one-digit months and days. The valid relational operators
-- for date fields are the equality operator (\`=\`) , along with the less
-- than\/greater than operators (\`\<\`, \`\<=\`, \`>\`, \`>=\`). Note that
-- there is no inequality (\`!=\`) operator. You can prepend the \`NOT\`
-- operator to an expression to negate it. * Multiple field query
-- expressions can be combined in one query by adding \`AND\` or \`OR\`
-- operators between the expressions. If a boolean operator appears within
-- a quoted string, it is not treated as special, it\'s just another part
-- of the character string to be matched. You can prepend the \`NOT\`
-- operator to an expression to negate it. The fields available for
-- filtering are: - data_id - user_id. For example,
-- \`filter=user_id=\\\"user123\\\"\`. - archived - archive_time
pldscsudmalFilter :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList (Maybe Text)
pldscsudmalFilter
= lens _pldscsudmalFilter
(\ s a -> s{_pldscsudmalFilter = a})
-- | Optional. Token to retrieve the next page of results, or empty to get
-- the first page.
pldscsudmalPageToken :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList (Maybe Text)
pldscsudmalPageToken
= lens _pldscsudmalPageToken
(\ s a -> s{_pldscsudmalPageToken = a})
-- | Optional. Limit on the number of User data mappings to return in a
-- single response. If not specified, 100 is used. May not be larger than
-- 1000.
pldscsudmalPageSize :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList (Maybe Int32)
pldscsudmalPageSize
= lens _pldscsudmalPageSize
(\ s a -> s{_pldscsudmalPageSize = a})
. mapping _Coerce
-- | JSONP
pldscsudmalCallback :: Lens' ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList (Maybe Text)
pldscsudmalCallback
= lens _pldscsudmalCallback
(\ s a -> s{_pldscsudmalCallback = a})
instance GoogleRequest
ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList
where
type Rs
ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList
= ListUserDataMAppingsResponse
type Scopes
ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsConsentStoresUserDataMAppingsList'{..}
= go _pldscsudmalParent _pldscsudmalXgafv
_pldscsudmalUploadProtocol
_pldscsudmalAccessToken
_pldscsudmalUploadType
_pldscsudmalFilter
_pldscsudmalPageToken
_pldscsudmalPageSize
_pldscsudmalCallback
(Just AltJSON)
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsConsentStoresUserDataMAppingsListResource)
mempty
| brendanhay/gogol | gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/ConsentStores/UserDataMAppings/List.hs | mpl-2.0 | 9,845 | 0 | 19 | 1,918 | 989 | 583 | 406 | 149 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Plus.People.ListByActivity
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List all of the people in the specified collection for a particular
-- activity.
--
-- /See:/ <https://developers.google.com/+/api/ Google+ API Reference> for @plus.people.listByActivity@.
module Network.Google.Resource.Plus.People.ListByActivity
(
-- * REST Resource
PeopleListByActivityResource
-- * Creating a Request
, peopleListByActivity
, PeopleListByActivity
-- * Request Lenses
, plbaActivityId
, plbaCollection
, plbaPageToken
, plbaMaxResults
) where
import Network.Google.Plus.Types
import Network.Google.Prelude
-- | A resource alias for @plus.people.listByActivity@ method which the
-- 'PeopleListByActivity' request conforms to.
type PeopleListByActivityResource =
"plus" :>
"v1" :>
"activities" :>
Capture "activityId" Text :>
"people" :>
Capture "collection" PeopleListByActivityCollection
:>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] PeopleFeed
-- | List all of the people in the specified collection for a particular
-- activity.
--
-- /See:/ 'peopleListByActivity' smart constructor.
data PeopleListByActivity = PeopleListByActivity'
{ _plbaActivityId :: !Text
, _plbaCollection :: !PeopleListByActivityCollection
, _plbaPageToken :: !(Maybe Text)
, _plbaMaxResults :: !(Textual Word32)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PeopleListByActivity' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plbaActivityId'
--
-- * 'plbaCollection'
--
-- * 'plbaPageToken'
--
-- * 'plbaMaxResults'
peopleListByActivity
:: Text -- ^ 'plbaActivityId'
-> PeopleListByActivityCollection -- ^ 'plbaCollection'
-> PeopleListByActivity
peopleListByActivity pPlbaActivityId_ pPlbaCollection_ =
PeopleListByActivity'
{ _plbaActivityId = pPlbaActivityId_
, _plbaCollection = pPlbaCollection_
, _plbaPageToken = Nothing
, _plbaMaxResults = 20
}
-- | The ID of the activity to get the list of people for.
plbaActivityId :: Lens' PeopleListByActivity Text
plbaActivityId
= lens _plbaActivityId
(\ s a -> s{_plbaActivityId = a})
-- | The collection of people to list.
plbaCollection :: Lens' PeopleListByActivity PeopleListByActivityCollection
plbaCollection
= lens _plbaCollection
(\ s a -> s{_plbaCollection = a})
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \"nextPageToken\" from the previous response.
plbaPageToken :: Lens' PeopleListByActivity (Maybe Text)
plbaPageToken
= lens _plbaPageToken
(\ s a -> s{_plbaPageToken = a})
-- | The maximum number of people to include in the response, which is used
-- for paging. For any response, the actual number returned might be less
-- than the specified maxResults.
plbaMaxResults :: Lens' PeopleListByActivity Word32
plbaMaxResults
= lens _plbaMaxResults
(\ s a -> s{_plbaMaxResults = a})
. _Coerce
instance GoogleRequest PeopleListByActivity where
type Rs PeopleListByActivity = PeopleFeed
type Scopes PeopleListByActivity =
'["https://www.googleapis.com/auth/plus.login",
"https://www.googleapis.com/auth/plus.me"]
requestClient PeopleListByActivity'{..}
= go _plbaActivityId _plbaCollection _plbaPageToken
(Just _plbaMaxResults)
(Just AltJSON)
plusService
where go
= buildClient
(Proxy :: Proxy PeopleListByActivityResource)
mempty
| rueshyna/gogol | gogol-plus/gen/Network/Google/Resource/Plus/People/ListByActivity.hs | mpl-2.0 | 4,637 | 0 | 16 | 1,057 | 560 | 332 | 228 | 89 | 1 |
-- | This module is very similar to the "Language.Interpreter.Dao" module, except most of the
-- functions have been renamed to an abbreviation so that they are easier to type into a GHCI
-- Read-Eval-Print Loop session. Another major difference is that this module uses
-- 'System.IO.Unsafe.unsafePerformIO' to create a static 'Language.Interpreter.Dao.SessionState',
-- and most of the 'Session' typed functions in the "Language.Interpreter.Dao" module are simply
-- @IO@ typed functions in this module which act on the static
-- 'Language.Interpreter.Dao.SessionState'.
--
-- There are also features for doing simple debugging of query execution, so you can see what
-- production rules are reacting to which queries, and trace program execution so as to see what
-- data is being constructed and returned (or what errors are thrown) in reaction to the query.
--
-- TODO: write all of the code for this module.
module Language.Interpreter.Dao.GHCI
(
)
where
import Language.Interpreter.Dao.Kernel
| RaminHAL9001/Dao | src/Language/Interpreter/Dao/GHCI.hs | agpl-3.0 | 1,021 | 0 | 4 | 171 | 31 | 26 | 5 | 3 | 0 |
{-# LANGUAGE
NoImplicitPrelude,
GADTs,
DataKinds,
TypeFamilies,
TypeOperators,
RankNTypes,
DeriveFunctor,
UndecidableInstances
#-}
--
module Singletons where
import Prelude hiding (drop,
take,
head,
tail,
index,
zipWith,
replicate,
map,
(++))
--
data Vec a n where
VNil :: Vec a Zero
VCons :: a -> Vec a n -> Vec a (Succ n)
--
-- promoted to type level by data kinds
data Nat = Zero | Succ Nat deriving (Show)
data SNat a where
SZero :: SNat Zero
SSucc :: SNat a -> SNat (Succ a)
--
type family (a :: Nat) :< (b :: Nat) :: Bool
type instance m :< Zero = False
type instance Zero :< Succ n = True
type instance (Succ m) :< (Succ n) = m :< n
type family (Add (a :: Nat) (b :: Nat)) :: Nat
type instance Add Zero m = m
type instance Add (Succ n) m = Succ (Add n m)
-- question: why I cannot use wildcard???
type family (Sub (a :: Nat) (b :: Nat)) :: Nat
type instance Sub m Zero = m
type instance Sub Zero m = Zero
type instance Sub (Succ n) (Succ m) = Sub n m
type family (Min (a :: Nat) (b :: Nat)) :: Nat
type instance Min m Zero = Zero
type instance Min Zero m = Zero
type instance Min (Succ n) (Succ m) = Succ (Min n m)
map :: (a -> b) -> Vec a n -> Vec b n
map f VNil = VNil
map f (VCons x xs) = VCons (f x) $ map f xs
index :: ((a :< b) ~ True) => SNat a -> Vec s b -> s
index SZero (VCons v _) = v
index (SSucc n) (VCons _ xs) = index n xs
replicate :: s -> SNat a -> Vec s a
replicate s SZero = VNil
replicate s (SSucc n) = VCons s $ replicate s n
-- Both vectors must be of equal length
zipWith :: (x -> y -> z) -> Vec x a -> Vec y a -> Vec z a
zipWith _ VNil VNil = VNil
zipWith f (VCons v l) (VCons v2 l2) = VCons (f v v2) $ zipWith f l l2
(++) :: Vec v m -> Vec v n -> Vec v (Add m n)
VNil ++ ls = ls
(VCons a b) ++ ls = VCons a (b ++ ls)
-- The semantics should match that of take for normal lists.
take :: SNat n -> Vec a m -> Vec a (Min m n)
take _ VNil = VNil
take SZero _ = VNil
take (SSucc n) (VCons a m) = VCons a $ take n m
-- The semantics should match that of drop for normal lists.
drop :: SNat n -> Vec a m -> Vec a (Sub m n)
drop _ VNil = VNil
drop SZero survivor = survivor
drop (SSucc n) (VCons a m) = drop n m
head :: Vec a n -> a
head (VCons a n) = a
tail :: Vec a (Succ n) -> Vec a n
tail (VCons _ m) = m
-- Fuck dependent type
numberOne = Succ Zero
numberTwo = Succ numberOne
numberThree = Succ numberTwo
numberFour = Succ numberThree
numberFive = Succ numberFour
numberSix = Succ numberFive
numberSeven = Succ numberSix
exampleTestList = VCons numberFive $ VCons numberSix $ VCons numberTwo $ VCons numberFour $ VCons numberSeven VNil
| ice1000/OI-codes | codewars/1-100/singletons.hs | agpl-3.0 | 2,903 | 6 | 10 | 912 | 1,194 | 631 | 563 | 76 | 1 |
{-
passman
Copyright (C) 2018-2021 Jonathan Lamothe
<jonathan@jlamothe.net>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this program. If not, see
<https://www.gnu.org/licenses/>.
-}
module Spec.ValidatePWDatabase (tests) where
import qualified Data.Map as M
import Lens.Micro (set)
import System.Random (mkStdGen, StdGen)
import Test.HUnit (Test (..), (~?=))
import Password
tests :: Test
tests = TestLabel "validatePWDatabase" $ TestList $ map test'
[ ( "empty", newPWDatabase, True )
, ( "valid", validDB, True )
, ( "foo invalid", fooInvalid, False )
, ( "bar invalid", barInvalid, False )
]
test' :: (String, PWDatabase, Bool) -> Test
test' (label, x, expect) = TestLabel label $
validatePWDatabase x ~?= expect
validDB :: M.Map String PWData
validDB = M.fromList [("foo", validData), ("bar", validData)]
fooInvalid :: M.Map String PWData
fooInvalid = M.insert "foo" invalidData validDB
barInvalid :: M.Map String PWData
barInvalid = M.insert "bar" invalidData validDB
validData :: PWData
(validData, _) = newPWData g
invalidData :: PWData
invalidData = set (pwPolicy.pwLength) (-1) validData
g :: StdGen
g = mkStdGen 1
--jl
| jlamothe/passman | test/Spec/ValidatePWDatabase.hs | lgpl-3.0 | 1,713 | 0 | 8 | 308 | 341 | 198 | 143 | 27 | 1 |
module Network.Haskoin.Node.Message
( Message(..)
, MessageHeader(..)
) where
import Control.DeepSeq (NFData, rnf)
import Control.Monad (unless)
import Control.Applicative ((<$>),(<*>))
import Data.Word (Word32)
import Data.Binary (Binary, get, put)
import Data.Binary.Get
( lookAhead
, getByteString
, getWord32le
, getWord32be
)
import Data.Binary.Put
( putByteString
, putWord32le
, putWord32be
)
import qualified Data.ByteString as BS
( length
, append
, empty
)
import Network.Haskoin.Node.Types
import Network.Haskoin.Transaction.Types
import Network.Haskoin.Block.Types
import Network.Haskoin.Block.Merkle
import Network.Haskoin.Crypto.Hash
import Network.Haskoin.Node.Bloom
import Network.Haskoin.Constants
import Network.Haskoin.Util
-- | Data type representing the header of a 'Message'. All messages sent between
-- nodes contain a message header.
data MessageHeader =
MessageHeader {
-- | Network magic bytes. It is used to differentiate
-- messages meant for different bitcoin networks, such as
-- prodnet and testnet.
headMagic :: !Word32
-- | Message command identifying the type of message.
-- included in the payload.
, headCmd :: !MessageCommand
-- | Byte length of the payload.
, headPayloadSize :: !Word32
-- | Checksum of the payload.
, headChecksum :: !CheckSum32
} deriving (Eq, Show, Read)
instance NFData MessageHeader where
rnf (MessageHeader m c p s) = rnf m `seq` rnf c `seq` rnf p `seq` rnf s
instance Binary MessageHeader where
get = MessageHeader <$> getWord32be
<*> get
<*> getWord32le
<*> get
put (MessageHeader m c l chk) = do
putWord32be m
put c
putWord32le l
put chk
-- | The 'Message' type is used to identify all the valid messages that can be
-- sent between bitcoin peers. Only values of type 'Message' will be accepted
-- by other bitcoin peers as bitcoin protocol messages need to be correctly
-- serialized with message headers. Serializing a 'Message' value will
-- include the 'MessageHeader' with the correct checksum value automatically.
-- No need to add the 'MessageHeader' separately.
data Message
= MVersion !Version
| MVerAck
| MAddr !Addr
| MInv !Inv
| MGetData !GetData
| MNotFound !NotFound
| MGetBlocks !GetBlocks
| MGetHeaders !GetHeaders
| MTx !Tx
| MBlock !Block
| MMerkleBlock !MerkleBlock
| MHeaders !Headers
| MGetAddr
| MFilterLoad !FilterLoad
| MFilterAdd !FilterAdd
| MFilterClear
| MPing !Ping
| MPong !Pong
| MAlert !Alert
| MReject !Reject
deriving (Eq, Show)
instance Binary Message where
get = do
(MessageHeader mgc cmd len chk) <- get
bs <- lookAhead $ getByteString $ fromIntegral len
unless (mgc == networkMagic)
(fail $ "get: Invalid network magic bytes: " ++ (show mgc))
unless (chksum32 bs == chk)
(fail $ "get: Invalid message checksum: " ++ (show chk))
if len > 0
then isolate (fromIntegral len) $ case cmd of
MCVersion -> MVersion <$> get
MCAddr -> MAddr <$> get
MCInv -> MInv <$> get
MCGetData -> MGetData <$> get
MCNotFound -> MNotFound <$> get
MCGetBlocks -> MGetBlocks <$> get
MCGetHeaders -> MGetHeaders <$> get
MCTx -> MTx <$> get
MCBlock -> MBlock <$> get
MCMerkleBlock -> MMerkleBlock <$> get
MCHeaders -> MHeaders <$> get
MCFilterLoad -> MFilterLoad <$> get
MCFilterAdd -> MFilterAdd <$> get
MCPing -> MPing <$> get
MCPong -> MPong <$> get
MCAlert -> MAlert <$> get
MCReject -> MReject <$> get
_ -> fail $ "get: Invalid command " ++ (show cmd)
else case cmd of
MCGetAddr -> return MGetAddr
MCVerAck -> return MVerAck
MCFilterClear -> return MFilterClear
_ -> fail $ "get: Invalid command " ++ (show cmd)
put msg = do
let (cmd, payload) = case msg of
MVersion m -> (MCVersion, encode' m)
MVerAck -> (MCVerAck, BS.empty)
MAddr m -> (MCAddr, encode' m)
MInv m -> (MCInv, encode' m)
MGetData m -> (MCGetData, encode' m)
MNotFound m -> (MCNotFound, encode' m)
MGetBlocks m -> (MCGetBlocks, encode' m)
MGetHeaders m -> (MCGetHeaders, encode' m)
MTx m -> (MCTx, encode' m)
MBlock m -> (MCBlock, encode' m)
MMerkleBlock m -> (MCMerkleBlock, encode' m)
MHeaders m -> (MCHeaders, encode' m)
MGetAddr -> (MCGetAddr, BS.empty)
MFilterLoad m -> (MCFilterLoad, encode' m)
MFilterAdd m -> (MCFilterAdd, encode' m)
MFilterClear -> (MCFilterClear, BS.empty)
MPing m -> (MCPing, encode' m)
MPong m -> (MCPong, encode' m)
MAlert m -> (MCAlert, encode' m)
MReject m -> (MCReject, encode' m)
chk = chksum32 payload
len = fromIntegral $ BS.length payload
header = MessageHeader networkMagic cmd len chk
putByteString $ (encode' header) `BS.append` payload
| nuttycom/haskoin | Network/Haskoin/Node/Message.hs | unlicense | 6,001 | 0 | 15 | 2,268 | 1,347 | 718 | 629 | 171 | 0 |
import Control.Monad
import System.Exit (exitSuccess)
import Data.Char (toLower, isLetter)
palindrome :: IO ()
palindrome = forever $ do
line1 <- getLine
case (line1 == reverse line1) of
True -> putStrLn "It's a palindrome!"
False -> do
putStrLn "Nope!"
exitSuccess
palindrome' :: IO ()
palindrome' = forever $ do
-- Get input. Clear out spaces, and punctuation. Change all caps to lowercase.
line1 <- getLine >>= (return . filter isLetter . map toLower)
case (line1 == reverse line1) of
True -> putStrLn "It's a palindrome!"
False -> do
putStrLn "Nope!"
exitSuccess
| dmp1ce/Haskell-Programming-Exercises | Chapter 13/palindrome.hs | unlicense | 620 | 0 | 13 | 143 | 185 | 90 | 95 | 19 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Import/Export</title>
<maps>
<homeID>exim</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | secdec/zap-extensions | addOns/exim/src/main/javahelp/help_ru_RU/helpset_ru_RU.hs | apache-2.0 | 959 | 77 | 67 | 155 | 408 | 207 | 201 | -1 | -1 |
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
-- | Minimal set of imports needed to support basic CodeWorld syntax.
-- Many of these are not directly used by students, but are needed for
-- desugaring with the RebindableSyntax extension.
module Core (
Program,
Number,
Text,
Truth,
Bool(..),
fromInteger,
fromRational,
fromString,
ifThenElse,
fail
) where
| alphalambda/codeworld | codeworld-base/src/Core.hs | apache-2.0 | 965 | 0 | 5 | 207 | 44 | 31 | 13 | 11 | 0 |
{- Copyright 2014 David Farrell <shokku.ra@gmail.com>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module CModeNoExternal where
import qualified Data.Map as M
import IRC.Numeric
import IRC.Action
import qualified IRC.Server.Client as Client
import qualified IRC.Server.Channel as Chan
import qualified IRC.Server.Environment as Env
import Plugin
plugin = defaultPlugin {handlers=[TransformHandler trans]}
trans :: TransformHSpec
trans env = env {Env.actions=map f (Env.actions env)}
where
channels = Client.channels (Env.client env)
f a@(ChanAction "Privmsg" chanName _) = if notElem 'n' (Chan.modes chan) || elem chanName channels
then a
else GenericAction $ \e -> sendNumeric e numERR_CANNOTSENDTOCHAN [chanName, "Cannot send to channel"]
>> return e
where chan = Env.channels (Env.local env) M.! chanName
f a = a
| shockkolate/lambdircd | plugins.old/CModeNoExternal.hs | apache-2.0 | 1,375 | 0 | 13 | 255 | 242 | 137 | 105 | 18 | 3 |
-- 45228
import Data.List(nub)
import Euler(digitUsage, fromDigits, intSqrt)
kk = 9
isPandigital n a b = digitUsage n == xs
where xs = zipWith3 (\x y z -> x+y+z)
(digitUsage a) (digitUsage b) (digitUsage $ a*b)
genPandigital m n = [x*y | x <- [1..intSqrt m],
y <- [x+1..m `div` x], isPandigital n x y]
sumPandigital k = sum $ nub $ genPandigital (fromDigits ys) (fromDigits xs)
where xs = reverse [1..k]
n = (k-1) `div` 2
ys = take n xs
main = putStrLn $ show $ sumPandigital kk
| higgsd/euler | hs/32.hs | bsd-2-clause | 556 | 0 | 11 | 167 | 268 | 142 | 126 | 13 | 1 |
-- |
-- Module : Main
-- Copyright : [2013] Manuel M T Chakravarty
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <chak@cse.unsw.edu.au>
-- Portability : haskell2011
module Graphics.Gloss.Game (
-- * Reexport some basic Gloss datatypes
module Graphics.Gloss.Data.Color,
module Graphics.Gloss.Data.Display,
module Graphics.Gloss.Data.Picture,
module Graphics.Gloss.Interface.Pure.Game,
-- * Geometry
Size, Rect,
-- * Load sprites into pictures
bmp, png, jpg,
-- * Query pictures
boundingBox,
-- * More convenient game play
play, playInScene,
-- * Game scenes
Animation, animation, noAnimation, animationPicture,
Scene, picture, picturing, animating, translating, rotating, scaling, scenes,
drawScene,
) where
-- standard libraries
import Data.IORef
import Data.Maybe
import System.IO.Unsafe (unsafePerformIO)
-- packages
import Graphics.Gloss.Data.Color
import Graphics.Gloss.Data.Display
import Graphics.Gloss.Data.Picture hiding (Picture(..))
import Graphics.Gloss.Data.Picture (Picture) -- keep 'Picture' abstract
import Graphics.Gloss.Interface.Pure.Game (Event(..), Key(..), SpecialKey(..), MouseButton(..), KeyState(..))
import Graphics.Gloss.Juicy
import qualified Graphics.Gloss as G
import qualified Graphics.Gloss.Interface.IO.Game as G
-- Geometry
-- --------
type Size = (Float, Float) -- ^width & height
type Rect = (Point, Size) -- ^origin & extent, where the origin is at the centre
-- On-the-fly image loading
-- ------------------------
-- |Turn a bitmap file into a picture.
--
-- NB: Define loaded pictures on the toplevel to avoid reloading.
--
bmp :: FilePath -> Picture
bmp fname = unsafePerformIO $ loadBMP fname
-- |Turn a PNG file into a picture.
--
-- NB: Define loaded pictures on the toplevel to avoid reloading.
--
png :: FilePath -> Picture
png fname = maybe (text "PNG ERROR") id (unsafePerformIO $ loadJuicyPNG fname)
-- |Turn a JPEG file into a picture.
--
-- NB: Define loaded pictures on the toplevel to avoid reloading.
--
jpg :: FilePath -> Picture
jpg fname = maybe (text "JPEG ERROR") id (unsafePerformIO $ loadJuicyJPG fname)
-- Query pictures
-- --------------
-- |Determine the bounding box of a picture.
--
-- FIXME: Current implementation is incomplete!
--
boundingBox :: Picture -> Rect
boundingBox G.Blank = ((0, 0), (0, 0))
boundingBox (G.Polygon _) = error "Graphics.Gloss.Game.boundingbox: Polygon not implemented yet"
boundingBox (G.Line _) = error "Graphics.Gloss.Game.boundingbox: Line not implemented yet"
boundingBox (G.Circle r) = ((0, 0), (2 * r, 2 * r))
boundingBox (G.ThickCircle t r) = ((0, 0), (2 * r + t, 2 * r + t))
boundingBox (G.Arc _ _ _) = error "Graphics.Gloss.Game.boundingbox: Arc not implemented yet"
boundingBox (G.ThickArc _ _ _ _) = error "Graphics.Gloss.Game.boundingbox: ThickArc not implemented yet"
boundingBox (G.Text _) = error "Graphics.Gloss.Game.boundingbox: Text not implemented yet"
boundingBox (G.Bitmap w h _ _) = ((0, 0), (fromIntegral w, fromIntegral h))
boundingBox (G.Color _ p) = boundingBox p
boundingBox (G.Translate dx dy p) = let ((x, y), size) = boundingBox p in ((x + dx, y + dy), size)
boundingBox (G.Rotate _ang _p) = error "Graphics.Gloss.Game.boundingbox: Rotate not implemented yet"
boundingBox (G.Scale xf yf p) = let (origin, (w, h)) = boundingBox p in (origin, (w * xf, h * yf))
boundingBox (G.Pictures _ps) = error "Graphics.Gloss.Game.boundingbox: Pictures not implemented yet"
-- Extended play function
-- ----------------------
-- |Play a game.
--
play :: Display -- ^Display mode
-> Color -- ^Background color
-> Int -- ^Number of simulation steps to take for each second of real time
-> world -- ^The initial world state
-> (world -> Picture) -- ^A function to convert the world to a picture
-> (Event -> world -> world) -- ^A function to handle individual input events
-> [Float -> world -> world] -- ^Set of functions invoked once per iteration —
-- first argument is the period of time (in seconds) needing to be advanced
-> IO ()
play display bg fps world draw handler steppers
= G.play display bg fps world draw handler (perform steppers)
where
perform [] _time world = world
perform (stepper:steppers) time world = perform steppers time (stepper time world)
-- Global variable to keep track of the time since we started playing (there can only always be one game anyway).
--
currentTime :: IORef Float
{-# NOINLINE currentTime #-}
currentTime = unsafePerformIO $ newIORef 0
-- |Play a game in a scene.
--
playInScene :: Display -- ^Display mode
-> Color -- ^Background color
-> Int -- ^Number of simulation steps to take for each second of real time
-> world -- ^The initial world state
-> Scene world -- ^A scene parameterised by the world
-> (Float -> Event -> world -> world) -- ^A function to handle individual input events
-- * first argument is the absolute time (in seconds)
-> [Float -> Float -> world -> world] -- ^Set of functions invoked once per iteration —
-- * first argument is the absolute time (in seconds)
-- * second argument is the period of time needing to be advanced
-> IO ()
playInScene display bg fps world scene handler steppers
= G.playIO display bg fps world drawSceneNow performHandler (advanceTimeAndPerform steppers)
where
drawSceneNow world
= do
{ now <- readIORef currentTime
; return $ drawScene scene now world
}
performHandler event world
= do
{ now <- readIORef currentTime
; return $ handler now event world
}
advanceTimeAndPerform steppers deltaT world
= do
{ now <- readIORef currentTime
; let future = now + deltaT
; writeIORef currentTime future
; perform steppers future deltaT world
}
perform [] _now _deltaT world = return world
perform (stepper:steppers) now deltaT world = perform steppers now deltaT (stepper now deltaT world)
-- Scenes are parameterised pictures
-- ---------------------------------
-- |An abstract representation of an animation.
--
data Animation = Animation [Picture] Float Float
-- |Construct a new animation with a list of pictures for the animation, the time between animation frames, and a given
-- (absolute) start time.
--
animation :: [Picture] -> Float -> Float -> Animation
animation = Animation
-- |An empty animation.
--
noAnimation :: Animation
noAnimation = animation [] 1 0
animationPicture :: Animation -> Float -> Maybe Picture
animationPicture (Animation pics delay start) time
| start > time = Nothing
| i >= length pics = Nothing
| otherwise = Just $ pics !! i
where
i = round ((time - start) / delay)
-- |A scene describes the rendering of a world state — i.e., which picture should be draw depending on the current time
-- and of the state of the world.
--
data Scene world
= Picturing (Float -> world -> Picture)
| Translating ( world -> Point) (Scene world)
| Rotating ( world -> Float) (Scene world)
| Scaling ( world -> (Float, Float)) (Scene world)
| Scenes [Scene world]
-- |Turn a static picture into a scene.
--
picture :: Picture -> Scene world
picture p = picturing (const p)
-- |Turn a world-dependent picture into a scene.
--
picturing :: (world -> Picture) -> Scene world
picturing worldToPic = Picturing (const worldToPic)
-- |Animate a world-dependent animation. The default picture is displayed while no animation is running.
--
animating :: (world -> Animation) -> Picture -> Scene world
animating anim defaultPic
= Picturing (\currentTime world -> fromMaybe defaultPic $ animationPicture (anim world) currentTime)
-- |Move a scene in dependences on a world-dependent location.
--
translating :: (world -> Point) -> Scene world -> Scene world
translating = Translating
-- |Rotate a scene in dependences on a world-dependent angle.
--
rotating :: (world -> Float) -> Scene world -> Scene world
rotating = Rotating
-- |Scale a scene in dependences on world-dependent scaling factors.
--
scaling :: (world -> (Float, Float)) -> Scene world -> Scene world
scaling = Scaling
-- |Compose a scene from a list of scenes.
--
scenes :: [Scene world] -> Scene world
scenes = Scenes
-- |Render a scene on the basis of time since playing started and the specific world state.
--
drawScene :: Scene world -> Float -> world -> Picture
drawScene scene time world = drawS scene
where
drawS (Picturing draw) = draw time world
drawS (Translating movement scene) = let (x, y) = movement world in translate x y (drawS scene)
drawS (Rotating rotation scene) = rotate (rotation world) (drawS scene)
drawS (Scaling scaling scene) = let (xf, yf) = scaling world in scale xf yf (drawS scene)
drawS (Scenes scenes) = pictures $ map drawS scenes
-- -- Game objects
-- -- ------------
--
-- data Object objClass = | mchakravarty/gloss-game | Graphics/Gloss/Game.hs | bsd-3-clause | 9,807 | 0 | 15 | 2,644 | 2,155 | 1,196 | 959 | 126 | 5 |
{-# LANGUAGE TemplateHaskell #-}
module Language.Haskell.Liquid.TH.Misc (
-- * Convert Data to Exp
dataToExpQ'
-- * Split AnnoType
, splitSigmaRTy
, splitForAllRTy
, splitPhiRTy
, splitCxtArrowRTy
, splitCxtArrowRTy_maybe
, splitTuplesRTy
, splitTupleRTy
, splitTupleRTy_maybe
) where
import Control.Applicative
import Control.Arrow
import Data.Data
import Data.List
import Data.Typeable
import qualified Data.Text as T
import Language.Haskell.TH.Lib
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.TH.WiredIns
--------------------------------------------------------------------------------
-- Convert Data to Exp ---------------------------------------------------------
--------------------------------------------------------------------------------
dataToExpQ' :: Data a => a -> Q Exp
dataToExpQ' = dataToExpQ pack
where
pack :: Data b => b -> Maybe (Q Exp)
pack x = (packStr <$> cast x) <|> (packText <$> cast x)
packStr :: String -> Q Exp
packStr = litE . StringL
packText :: T.Text -> Q Exp
packText = appE (varE 'T.pack) . packStr . T.unpack
--------------------------------------------------------------------------------
-- Split AnnoType --------------------------------------------------------------
--------------------------------------------------------------------------------
splitSigmaRTy :: AnnoType -> ([String], [AnnoType], AnnoType)
splitSigmaRTy ty =
let (tvs, rho) = splitForAllRTy ty
(theta, tau) = splitPhiRTy rho
in (tvs, theta, tau)
splitForAllRTy :: AnnoType -> ([String], AnnoType)
splitForAllRTy ty = split ty []
where
split (RAllT tv ty) tvs = split ty (tv:tvs)
split ty tvs = (reverse tvs, ty)
splitPhiRTy :: AnnoType -> ([AnnoType], AnnoType)
splitPhiRTy ty = first concat $ split ty []
where
split ty cs = case splitCxtArrowRTy_maybe ty of
Just (c, ty') -> split ty' (c:cs)
Nothing -> (reverse cs, ty)
splitCxtArrowRTy :: AnnoType -> ([AnnoType], AnnoType)
splitCxtArrowRTy ty = case splitCxtArrowRTy_maybe ty of
Just out -> out
Nothing -> ([], ty)
splitCxtArrowRTy_maybe :: AnnoType -> Maybe ([AnnoType], AnnoType)
splitCxtArrowRTy_maybe (RApp c [cxt, ty] _ _)
| val c == cxtArrowTcName = Just (splitTupleRTy cxt, ty)
splitCxtArrowRTy_maybe _ = Nothing
splitTuplesRTy :: AnnoType -> [AnnoType]
splitTuplesRTy t = case splitTupleRTy_maybe t of
Just ts -> concatMap splitTuplesRTy ts
Nothing -> [t]
splitTupleRTy :: AnnoType -> [AnnoType]
splitTupleRTy t = case splitTupleRTy_maybe t of
Just ts -> ts
Nothing -> [t]
splitTupleRTy_maybe :: AnnoType -> Maybe [AnnoType]
splitTupleRTy_maybe (RApp c as _ _) | val c == tupleTcName = Just as
splitTupleRTy_maybe _ = Nothing
| spinda/liquidhaskell | src/Language/Haskell/Liquid/TH/Misc.hs | bsd-3-clause | 2,844 | 0 | 12 | 493 | 816 | 442 | 374 | 63 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Provers.Prover
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Provable abstraction and the connection to SMT solvers
-----------------------------------------------------------------------------
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE BangPatterns #-}
module Data.SBV.Provers.Prover (
SMTSolver(..), SMTConfig(..), Predicate, Provable(..)
, ThmResult(..), SatResult(..), AllSatResult(..), SMTResult(..)
, isSatisfiable, isSatisfiableWith, isTheorem, isTheoremWith
, prove, proveWith
, sat, satWith
, allSat, allSatWith
, isVacuous, isVacuousWith
, SatModel(..), Modelable(..), displayModels, extractModels
, getModelDictionaries, getModelValues, getModelUninterpretedValues
, boolector, cvc4, yices, z3, mathSAT, defaultSMTCfg
, compileToSMTLib, generateSMTBenchmarks
, isSBranchFeasibleInState
) where
import Control.Monad (when, unless)
import Control.Monad.Trans (liftIO)
import Data.List (intercalate)
import Data.Maybe (mapMaybe, fromMaybe)
import System.FilePath (addExtension, splitExtension)
import System.Time (getClockTime)
import System.IO.Unsafe (unsafeInterleaveIO)
import qualified Data.Set as Set (Set, toList)
import Data.SBV.BitVectors.Data
import Data.SBV.SMT.SMT
import Data.SBV.SMT.SMTLib
import qualified Data.SBV.Provers.Boolector as Boolector
import qualified Data.SBV.Provers.CVC4 as CVC4
import qualified Data.SBV.Provers.Yices as Yices
import qualified Data.SBV.Provers.Z3 as Z3
import qualified Data.SBV.Provers.MathSAT as MathSAT
import Data.SBV.Utils.TDiff
mkConfig :: SMTSolver -> Bool -> [String] -> SMTConfig
mkConfig s isSMTLib2 tweaks = SMTConfig { verbose = False
, timing = False
, sBranchTimeOut = Nothing
, timeOut = Nothing
, printBase = 10
, printRealPrec = 16
, smtFile = Nothing
, solver = s
, solverTweaks = tweaks
, useSMTLib2 = isSMTLib2
, satCmd = "(check-sat)"
, roundingMode = RoundNearestTiesToEven
, useLogic = Nothing
}
-- | Default configuration for the Boolector SMT solver
boolector :: SMTConfig
boolector = mkConfig Boolector.boolector True []
-- | Default configuration for the CVC4 SMT Solver.
cvc4 :: SMTConfig
cvc4 = mkConfig CVC4.cvc4 True []
-- | Default configuration for the Yices SMT Solver.
yices :: SMTConfig
yices = mkConfig Yices.yices False []
-- | Default configuration for the Z3 SMT solver
z3 :: SMTConfig
z3 = mkConfig Z3.z3 True ["(set-option :smt.mbqi true) ; use model based quantifier instantiation"]
-- | Default configuration for the MathSAT SMT solver
mathSAT :: SMTConfig
mathSAT = mkConfig MathSAT.mathSAT True []
-- | The default solver used by SBV. This is currently set to z3.
defaultSMTCfg :: SMTConfig
defaultSMTCfg = z3
-- | A predicate is a symbolic program that returns a (symbolic) boolean value. For all intents and
-- purposes, it can be treated as an n-ary function from symbolic-values to a boolean. The 'Symbolic'
-- monad captures the underlying representation, and can/should be ignored by the users of the library,
-- unless you are building further utilities on top of SBV itself. Instead, simply use the 'Predicate'
-- type when necessary.
type Predicate = Symbolic SBool
-- | A type @a@ is provable if we can turn it into a predicate.
-- Note that a predicate can be made from a curried function of arbitrary arity, where
-- each element is either a symbolic type or up-to a 7-tuple of symbolic-types. So
-- predicates can be constructed from almost arbitrary Haskell functions that have arbitrary
-- shapes. (See the instance declarations below.)
class Provable a where
-- | Turns a value into a universally quantified predicate, internally naming the inputs.
-- In this case the sbv library will use names of the form @s1, s2@, etc. to name these variables
-- Example:
--
-- > forAll_ $ \(x::SWord8) y -> x `shiftL` 2 .== y
--
-- is a predicate with two arguments, captured using an ordinary Haskell function. Internally,
-- @x@ will be named @s0@ and @y@ will be named @s1@.
forAll_ :: a -> Predicate
-- | Turns a value into a predicate, allowing users to provide names for the inputs.
-- If the user does not provide enough number of names for the variables, the remaining ones
-- will be internally generated. Note that the names are only used for printing models and has no
-- other significance; in particular, we do not check that they are unique. Example:
--
-- > forAll ["x", "y"] $ \(x::SWord8) y -> x `shiftL` 2 .== y
--
-- This is the same as above, except the variables will be named @x@ and @y@ respectively,
-- simplifying the counter-examples when they are printed.
forAll :: [String] -> a -> Predicate
-- | Turns a value into an existentially quantified predicate. (Indeed, 'exists' would have been
-- a better choice here for the name, but alas it's already taken.)
forSome_ :: a -> Predicate
-- | Version of 'forSome' that allows user defined names
forSome :: [String] -> a -> Predicate
instance Provable Predicate where
forAll_ = id
forAll [] = id
forAll xs = error $ "SBV.forAll: Extra unmapped name(s) in predicate construction: " ++ intercalate ", " xs
forSome_ = id
forSome [] = id
forSome xs = error $ "SBV.forSome: Extra unmapped name(s) in predicate construction: " ++ intercalate ", " xs
instance Provable SBool where
forAll_ = return
forAll _ = return
forSome_ = return
forSome _ = return
{-
-- The following works, but it lets us write properties that
-- are not useful.. Such as: prove $ \x y -> (x::SInt8) == y
-- Running that will throw an exception since Haskell's equality
-- is not be supported by symbolic things. (Needs .==).
instance Provable Bool where
forAll_ x = forAll_ (if x then true else false :: SBool)
forAll s x = forAll s (if x then true else false :: SBool)
forSome_ x = forSome_ (if x then true else false :: SBool)
forSome s x = forSome s (if x then true else false :: SBool)
-}
-- Functions
instance (SymWord a, Provable p) => Provable (SBV a -> p) where
forAll_ k = forall_ >>= \a -> forAll_ $ k a
forAll (s:ss) k = forall s >>= \a -> forAll ss $ k a
forAll [] k = forAll_ k
forSome_ k = exists_ >>= \a -> forSome_ $ k a
forSome (s:ss) k = exists s >>= \a -> forSome ss $ k a
forSome [] k = forSome_ k
-- Arrays (memory), only supported universally for the time being
instance (HasKind a, HasKind b, SymArray array, Provable p) => Provable (array a b -> p) where
forAll_ k = newArray_ Nothing >>= \a -> forAll_ $ k a
forAll (s:ss) k = newArray s Nothing >>= \a -> forAll ss $ k a
forAll [] k = forAll_ k
forSome_ _ = error "SBV.forSome: Existential arrays are not currently supported."
forSome _ _ = error "SBV.forSome: Existential arrays are not currently supported."
-- 2 Tuple
instance (SymWord a, SymWord b, Provable p) => Provable ((SBV a, SBV b) -> p) where
forAll_ k = forall_ >>= \a -> forAll_ $ \b -> k (a, b)
forAll (s:ss) k = forall s >>= \a -> forAll ss $ \b -> k (a, b)
forAll [] k = forAll_ k
forSome_ k = exists_ >>= \a -> forSome_ $ \b -> k (a, b)
forSome (s:ss) k = exists s >>= \a -> forSome ss $ \b -> k (a, b)
forSome [] k = forSome_ k
-- 3 Tuple
instance (SymWord a, SymWord b, SymWord c, Provable p) => Provable ((SBV a, SBV b, SBV c) -> p) where
forAll_ k = forall_ >>= \a -> forAll_ $ \b c -> k (a, b, c)
forAll (s:ss) k = forall s >>= \a -> forAll ss $ \b c -> k (a, b, c)
forAll [] k = forAll_ k
forSome_ k = exists_ >>= \a -> forSome_ $ \b c -> k (a, b, c)
forSome (s:ss) k = exists s >>= \a -> forSome ss $ \b c -> k (a, b, c)
forSome [] k = forSome_ k
-- 4 Tuple
instance (SymWord a, SymWord b, SymWord c, SymWord d, Provable p) => Provable ((SBV a, SBV b, SBV c, SBV d) -> p) where
forAll_ k = forall_ >>= \a -> forAll_ $ \b c d -> k (a, b, c, d)
forAll (s:ss) k = forall s >>= \a -> forAll ss $ \b c d -> k (a, b, c, d)
forAll [] k = forAll_ k
forSome_ k = exists_ >>= \a -> forSome_ $ \b c d -> k (a, b, c, d)
forSome (s:ss) k = exists s >>= \a -> forSome ss $ \b c d -> k (a, b, c, d)
forSome [] k = forSome_ k
-- 5 Tuple
instance (SymWord a, SymWord b, SymWord c, SymWord d, SymWord e, Provable p) => Provable ((SBV a, SBV b, SBV c, SBV d, SBV e) -> p) where
forAll_ k = forall_ >>= \a -> forAll_ $ \b c d e -> k (a, b, c, d, e)
forAll (s:ss) k = forall s >>= \a -> forAll ss $ \b c d e -> k (a, b, c, d, e)
forAll [] k = forAll_ k
forSome_ k = exists_ >>= \a -> forSome_ $ \b c d e -> k (a, b, c, d, e)
forSome (s:ss) k = exists s >>= \a -> forSome ss $ \b c d e -> k (a, b, c, d, e)
forSome [] k = forSome_ k
-- 6 Tuple
instance (SymWord a, SymWord b, SymWord c, SymWord d, SymWord e, SymWord f, Provable p) => Provable ((SBV a, SBV b, SBV c, SBV d, SBV e, SBV f) -> p) where
forAll_ k = forall_ >>= \a -> forAll_ $ \b c d e f -> k (a, b, c, d, e, f)
forAll (s:ss) k = forall s >>= \a -> forAll ss $ \b c d e f -> k (a, b, c, d, e, f)
forAll [] k = forAll_ k
forSome_ k = exists_ >>= \a -> forSome_ $ \b c d e f -> k (a, b, c, d, e, f)
forSome (s:ss) k = exists s >>= \a -> forSome ss $ \b c d e f -> k (a, b, c, d, e, f)
forSome [] k = forSome_ k
-- 7 Tuple
instance (SymWord a, SymWord b, SymWord c, SymWord d, SymWord e, SymWord f, SymWord g, Provable p) => Provable ((SBV a, SBV b, SBV c, SBV d, SBV e, SBV f, SBV g) -> p) where
forAll_ k = forall_ >>= \a -> forAll_ $ \b c d e f g -> k (a, b, c, d, e, f, g)
forAll (s:ss) k = forall s >>= \a -> forAll ss $ \b c d e f g -> k (a, b, c, d, e, f, g)
forAll [] k = forAll_ k
forSome_ k = exists_ >>= \a -> forSome_ $ \b c d e f g -> k (a, b, c, d, e, f, g)
forSome (s:ss) k = exists s >>= \a -> forSome ss $ \b c d e f g -> k (a, b, c, d, e, f, g)
forSome [] k = forSome_ k
-- | Prove a predicate, equivalent to @'proveWith' 'defaultSMTCfg'@
prove :: Provable a => a -> IO ThmResult
prove = proveWith defaultSMTCfg
-- | Find a satisfying assignment for a predicate, equivalent to @'satWith' 'defaultSMTCfg'@
sat :: Provable a => a -> IO SatResult
sat = satWith defaultSMTCfg
-- | Return all satisfying assignments for a predicate, equivalent to @'allSatWith' 'defaultSMTCfg'@.
-- Satisfying assignments are constructed lazily, so they will be available as returned by the solver
-- and on demand.
--
-- NB. Uninterpreted constant/function values and counter-examples for array values are ignored for
-- the purposes of @'allSat'@. That is, only the satisfying assignments modulo uninterpreted functions and
-- array inputs will be returned. This is due to the limitation of not having a robust means of getting a
-- function counter-example back from the SMT solver.
allSat :: Provable a => a -> IO AllSatResult
allSat = allSatWith defaultSMTCfg
-- | Check if the given constraints are satisfiable, equivalent to @'isVacuousWith' 'defaultSMTCfg'@.
-- See the function 'constrain' for an example use of 'isVacuous'.
isVacuous :: Provable a => a -> IO Bool
isVacuous = isVacuousWith defaultSMTCfg
-- Decision procedures (with optional timeout)
-- | Check whether a given property is a theorem, with an optional time out and the given solver.
-- Returns @Nothing@ if times out, or the result wrapped in a @Just@ otherwise.
isTheoremWith :: Provable a => SMTConfig -> Maybe Int -> a -> IO (Maybe Bool)
isTheoremWith cfg mbTo p = do r <- proveWith cfg{timeOut = mbTo} p
case r of
ThmResult (Unsatisfiable _) -> return $ Just True
ThmResult (Satisfiable _ _) -> return $ Just False
ThmResult (TimeOut _) -> return Nothing
_ -> error $ "SBV.isTheorem: Received:\n" ++ show r
-- | Check whether a given property is satisfiable, with an optional time out and the given solver.
-- Returns @Nothing@ if times out, or the result wrapped in a @Just@ otherwise.
isSatisfiableWith :: Provable a => SMTConfig -> Maybe Int -> a -> IO (Maybe Bool)
isSatisfiableWith cfg mbTo p = do r <- satWith cfg{timeOut = mbTo} p
case r of
SatResult (Satisfiable _ _) -> return $ Just True
SatResult (Unsatisfiable _) -> return $ Just False
SatResult (TimeOut _) -> return Nothing
_ -> error $ "SBV.isSatisfiable: Received: " ++ show r
-- | Checks theoremhood within the given optional time limit of @i@ seconds.
-- Returns @Nothing@ if times out, or the result wrapped in a @Just@ otherwise.
isTheorem :: Provable a => Maybe Int -> a -> IO (Maybe Bool)
isTheorem = isTheoremWith defaultSMTCfg
-- | Checks satisfiability within the given optional time limit of @i@ seconds.
-- Returns @Nothing@ if times out, or the result wrapped in a @Just@ otherwise.
isSatisfiable :: Provable a => Maybe Int -> a -> IO (Maybe Bool)
isSatisfiable = isSatisfiableWith defaultSMTCfg
-- | Compiles to SMT-Lib and returns the resulting program as a string. Useful for saving
-- the result to a file for off-line analysis, for instance if you have an SMT solver that's not natively
-- supported out-of-the box by the SBV library. It takes two booleans:
--
-- * smtLib2: If 'True', will generate SMT-Lib2 output, otherwise SMT-Lib1 output
--
-- * isSat : If 'True', will translate it as a SAT query, i.e., in the positive. If 'False', will
-- translate as a PROVE query, i.e., it will negate the result. (In this case, the check-sat
-- call to the SMT solver will produce UNSAT if the input is a theorem, as usual.)
compileToSMTLib :: Provable a => Bool -- ^ If True, output SMT-Lib2, otherwise SMT-Lib1
-> Bool -- ^ If True, translate directly, otherwise negate the goal. (Use True for SAT queries, False for PROVE queries.)
-> a
-> IO String
compileToSMTLib smtLib2 isSat a = do
t <- getClockTime
let comments = ["Created on " ++ show t]
cvt = if smtLib2 then toSMTLib2 else toSMTLib1
(_, _, _, _, smtLibPgm) <- simulate cvt defaultSMTCfg isSat comments a
let out = show smtLibPgm
return $ out ++ if smtLib2 -- append check-sat in case of smtLib2
then "\n(check-sat)\n"
else "\n"
-- | Create both SMT-Lib1 and SMT-Lib2 benchmarks. The first argument is the basename of the file,
-- SMT-Lib1 version will be written with suffix ".smt1" and SMT-Lib2 version will be written with
-- suffix ".smt2". The 'Bool' argument controls whether this is a SAT instance, i.e., translate the query
-- directly, or a PROVE instance, i.e., translate the negated query. (See the second boolean argument to
-- 'compileToSMTLib' for details.)
generateSMTBenchmarks :: Provable a => Bool -> FilePath -> a -> IO ()
generateSMTBenchmarks isSat f a = gen False smt1 >> gen True smt2
where smt1 = addExtension f "smt1"
smt2 = addExtension f "smt2"
gen b fn = do s <- compileToSMTLib b isSat a
writeFile fn s
putStrLn $ "Generated SMT benchmark " ++ show fn ++ "."
-- | Proves the predicate using the given SMT-solver
proveWith :: Provable a => SMTConfig -> a -> IO ThmResult
proveWith config a = simulate cvt config False [] a >>= callSolver False "Checking Theoremhood.." ThmResult config
where cvt = if useSMTLib2 config then toSMTLib2 else toSMTLib1
-- | Find a satisfying assignment using the given SMT-solver
satWith :: Provable a => SMTConfig -> a -> IO SatResult
satWith config a = simulate cvt config True [] a >>= callSolver True "Checking Satisfiability.." SatResult config
where cvt = if useSMTLib2 config then toSMTLib2 else toSMTLib1
-- | Determine if the constraints are vacuous using the given SMT-solver
isVacuousWith :: Provable a => SMTConfig -> a -> IO Bool
isVacuousWith config a = do
Result ki tr uic is cs ts as uis ax asgn cstr _ <- runSymbolic (True, Just config) $ forAll_ a >>= output
case cstr of
[] -> return False -- no constraints, no need to check
_ -> do let is' = [(EX, i) | (_, i) <- is] -- map all quantifiers to "exists" for the constraint check
res' = Result ki tr uic is' cs ts as uis ax asgn cstr [trueSW]
cvt = if useSMTLib2 config then toSMTLib2 else toSMTLib1
SatResult result <- runProofOn cvt config True [] res' >>= callSolver True "Checking Satisfiability.." SatResult config
case result of
Unsatisfiable{} -> return True -- constraints are unsatisfiable!
Satisfiable{} -> return False -- constraints are satisfiable!
Unknown{} -> error "SBV: isVacuous: Solver returned unknown!"
ProofError _ ls -> error $ "SBV: isVacuous: error encountered:\n" ++ unlines ls
TimeOut _ -> error "SBV: isVacuous: time-out."
-- | Find all satisfying assignments using the given SMT-solver
allSatWith :: Provable a => SMTConfig -> a -> IO AllSatResult
allSatWith config p = do
let converter = if useSMTLib2 config then toSMTLib2 else toSMTLib1
msg "Checking Satisfiability, all solutions.."
sbvPgm@(qinps, _, _, ki, _) <- simulate converter config True [] p
let usorts = [s | KUninterpreted s <- Set.toList ki]
unless (null usorts) $ msg $ "SBV.allSat: Uninterpreted sorts present: " ++ unwords usorts
++ "\n SBV will use equivalence classes to generate all-satisfying instances."
results <- unsafeInterleaveIO $ go sbvPgm (1::Int) []
-- See if there are any existentials below any universals
-- If such is the case, then the solutions are unique upto prefix existentials
let w = ALL `elem` map fst qinps
return $ AllSatResult (w, results)
where msg = when (verbose config) . putStrLn . ("** " ++)
go sbvPgm = loop
where loop !n nonEqConsts = do
curResult <- invoke nonEqConsts n sbvPgm
case curResult of
Nothing -> return []
Just (SatResult r) -> let cont model = do rest <- unsafeInterleaveIO $ loop (n+1) (modelAssocs model : nonEqConsts)
return (r : rest)
in case r of
Satisfiable _ (SMTModel [] _ _) -> return [r]
Unknown _ (SMTModel [] _ _) -> return [r]
ProofError _ _ -> return [r]
TimeOut _ -> return []
Unsatisfiable _ -> return []
Satisfiable _ model -> cont model
Unknown _ model -> cont model
invoke nonEqConsts n (qinps, modelMap, skolemMap, _, smtLibPgm) = do
msg $ "Looking for solution " ++ show n
case addNonEqConstraints (roundingMode config) qinps nonEqConsts smtLibPgm of
Nothing -> -- no new constraints added, stop
return Nothing
Just finalPgm -> do msg $ "Generated SMTLib program:\n" ++ finalPgm
smtAnswer <- engine (solver config) (updateName (n-1) config) True qinps modelMap skolemMap finalPgm
msg "Done.."
return $ Just $ SatResult smtAnswer
updateName i cfg = cfg{smtFile = upd `fmap` smtFile cfg}
where upd nm = let (b, e) = splitExtension nm in b ++ "_allSat_" ++ show i ++ e
type SMTProblem = ( [(Quantifier, NamedSymVar)] -- inputs
, [(String, UnintKind)] -- model-map
, [Either SW (SW, [SW])] -- skolem-map
, Set.Set Kind -- kinds used
, SMTLibPgm -- SMTLib representation
)
callSolver :: Bool -> String -> (SMTResult -> b) -> SMTConfig -> SMTProblem -> IO b
callSolver isSat checkMsg wrap config (qinps, modelMap, skolemMap, _, smtLibPgm) = do
let msg = when (verbose config) . putStrLn . ("** " ++)
msg checkMsg
let finalPgm = intercalate "\n" (pre ++ post) where SMTLibPgm _ (_, pre, post) = smtLibPgm
msg $ "Generated SMTLib program:\n" ++ finalPgm
smtAnswer <- engine (solver config) config isSat qinps modelMap skolemMap finalPgm
msg "Done.."
return $ wrap smtAnswer
simulate :: Provable a => SMTLibConverter -> SMTConfig -> Bool -> [String] -> a -> IO SMTProblem
simulate converter config isSat comments predicate = do
let msg = when (verbose config) . putStrLn . ("** " ++)
isTiming = timing config
msg "Starting symbolic simulation.."
res <- timeIf isTiming "problem construction" $ runSymbolic (isSat, Just config) $ (if isSat then forSome_ else forAll_) predicate >>= output
msg $ "Generated symbolic trace:\n" ++ show res
msg "Translating to SMT-Lib.."
runProofOn converter config isSat comments res
runProofOn :: SMTLibConverter -> SMTConfig -> Bool -> [String] -> Result -> IO SMTProblem
runProofOn converter config isSat comments res =
let isTiming = timing config
solverCaps = capabilities (solver config)
in case res of
Result ki _qcInfo _codeSegs is consts tbls arrs uis axs pgm cstrs [o@(SW KBool _)] ->
timeIf isTiming "translation"
$ let uiMap = mapMaybe arrayUIKind arrs ++ map unintFnUIKind uis
skolemMap = skolemize (if isSat then is else map flipQ is)
where flipQ (ALL, x) = (EX, x)
flipQ (EX, x) = (ALL, x)
skolemize :: [(Quantifier, NamedSymVar)] -> [Either SW (SW, [SW])]
skolemize qinps = go qinps ([], [])
where go [] (_, sofar) = reverse sofar
go ((ALL, (v, _)):rest) (us, sofar) = go rest (v:us, Left v : sofar)
go ((EX, (v, _)):rest) (us, sofar) = go rest (us, Right (v, reverse us) : sofar)
in return (is, uiMap, skolemMap, ki, converter (roundingMode config) (useLogic config) solverCaps ki isSat comments is skolemMap consts tbls arrs uis axs pgm cstrs o)
Result _kindInfo _qcInfo _codeSegs _is _consts _tbls _arrs _uis _axs _pgm _cstrs os -> case length os of
0 -> error $ "Impossible happened, unexpected non-outputting result\n" ++ show res
1 -> error $ "Impossible happened, non-boolean output in " ++ show os
++ "\nDetected while generating the trace:\n" ++ show res
_ -> error $ "User error: Multiple output values detected: " ++ show os
++ "\nDetected while generating the trace:\n" ++ show res
++ "\n*** Check calls to \"output\", they are typically not needed!"
-- | Check if a branch condition is feasible in the current state
isSBranchFeasibleInState :: State -> String -> SBool -> IO Bool
isSBranchFeasibleInState st branch cond = do
let cfg = let pickedConfig = fromMaybe defaultSMTCfg (getSBranchRunConfig st)
in pickedConfig { timeOut = sBranchTimeOut pickedConfig }
msg = when (verbose cfg) . putStrLn . ("** " ++)
sw <- sbvToSW st cond
() <- forceSWArg sw
Result ki tr uic is cs ts as uis ax asgn cstr _ <- liftIO $ extractSymbolicSimulationState st
let -- Construct the corresponding sat-checker for the branch. Note that we need to
-- forget about the quantifiers and just use an "exist", as we're looking for a
-- point-satisfiability check here; whatever the original program was.
pgm = Result ki tr uic [(EX, n) | (_, n) <- is] cs ts as uis ax asgn cstr [sw]
cvt = if useSMTLib2 cfg then toSMTLib2 else toSMTLib1
check <- runProofOn cvt cfg True [] pgm >>= callSolver True ("sBranch: Checking " ++ show branch ++ " feasibility") SatResult cfg
res <- case check of
SatResult (Unsatisfiable _) -> return False
_ -> return True -- No risks, even if it timed-our or anything else, we say it's feasible
msg $ "sBranch: Conclusion: " ++ if res then "Feasible" else "Unfeasible"
return res
| TomMD/cryptol | sbv/Data/SBV/Provers/Prover.hs | bsd-3-clause | 26,293 | 0 | 25 | 8,180 | 6,531 | 3,438 | 3,093 | 302 | 10 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
module Data.Derivation.Structural where
import Data.Derivation
data Weaken (rule :: [([k],l)] -> [k] -> l -> *) :: [([k],l)] -> [k] -> l -> * where
Weaken :: (forall x. Index del x -> Index gam x)
-> Weaken rule '[ del # a ] gam a
Unweak :: !(rule args gam a)
-> Weaken rule args gam a
weaken :: (forall x. Index del x -> Index gam x)
-> Derivation (Weaken rule) del a
-> Derivation (Weaken rule) gam a
weaken sub d = only d ==> Weaken sub
unweak :: Derivation rule gam a
-> Derivation (Weaken rule) gam a
unweak = mapDeriv Unweak
data Exchange (rule :: [([k],l)] -> [k] -> l -> *) :: [([k],l)] -> [k] -> l -> * where
Exchange :: Exchange rule '[ (b & a & gam) # c ] (a & b & gam) c
Unexc :: !(rule args gam a)
-> Exchange rule args gam a
data Contraction (rule :: [([k],l)] -> [k] -> l -> *) :: [([k],l)] -> [k] -> l -> * where
Contraction :: Contraction rule '[ (a & gam) # b ] (a & a & gam) b
Uncon :: !(rule args gam a)
-> Contraction rule args gam a
| kylcarte/derivation | src/Data/Derivation/Structural.hs | bsd-3-clause | 1,414 | 0 | 12 | 347 | 568 | 316 | 252 | 40 | 1 |
{-# LANGUAGE CPP, FlexibleInstances, NamedFieldPuns,
NoImplicitPrelude, TemplateHaskell,
UndecidableInstances #-}
#include "overlapping-compat.h"
{-|
Module: Data.Aeson.TH
Copyright: (c) 2011-2015 Bryan O'Sullivan
(c) 2011 MailRank, Inc.
License: Apache
Stability: experimental
Portability: portable
Functions to mechanically derive 'ToJSON' and 'FromJSON' instances. Note that
you need to enable the @TemplateHaskell@ language extension in order to use this
module.
An example shows how instances are generated for arbitrary data types. First we
define a data type:
@
data D a = Nullary
| Unary Int
| Product String Char a
| Record { testOne :: Double
, testTwo :: Bool
, testThree :: D a
} deriving Eq
@
Next we derive the necessary instances. Note that we make use of the
feature to change record field names. In this case we drop the first 4
characters of every field name. We also modify constructor names by
lower-casing them:
@
$('deriveJSON' 'defaultOptions'{'fieldLabelModifier' = 'drop' 4, 'constructorTagModifier' = map toLower} ''D)
@
Now we can use the newly created instances.
@
d :: D 'Int'
d = Record { testOne = 3.14159
, testTwo = 'True'
, testThree = Product \"test\" \'A\' 123
}
@
>>> fromJSON (toJSON d) == Success d
> True
This also works for data family instances, but instead of passing in the data
family name (with double quotes), we pass in a data family instance
constructor (with a single quote):
@
data family DF a
data instance DF Int = DF1 Int
| DF2 Int Int
deriving Eq
$('deriveJSON' 'defaultOptions' 'DF1)
-- Alternatively, one could pass 'DF2 instead
@
Please note that you can derive instances for tuples using the following syntax:
@
-- FromJSON and ToJSON instances for 4-tuples.
$('deriveJSON' 'defaultOptions' ''(,,,))
@
-}
module Data.Aeson.TH
( -- * Encoding configuration
Options(..), SumEncoding(..), defaultOptions, defaultTaggedObject
-- * FromJSON and ToJSON derivation
, deriveJSON
, deriveToJSON
, deriveFromJSON
, mkToJSON
, mkToEncoding
, mkParseJSON
) where
import Control.Applicative ( pure, (<$>), (<*>) )
import Data.Aeson ( toJSON, Object, (.=), (.:), (.:?)
, ToJSON, toEncoding, toJSON
, FromJSON, parseJSON
)
import Data.Aeson.Types ( Value(..), Parser
, Options(..)
, SumEncoding(..)
, defaultOptions
, defaultTaggedObject
)
import Data.Aeson.Types.Internal (Encoding(..))
import Control.Monad ( join, liftM2, return, mapM, fail )
import Data.Bool ( Bool(False, True), otherwise, (&&), not )
import Data.Either ( Either(Left, Right) )
import Data.Eq ( (==) )
import Data.Function ( ($), (.), flip )
import Data.Functor ( fmap )
import Data.Int ( Int )
import Data.List ( (++), all, any, filter, find, foldl, foldl'
, genericLength , intercalate , intersperse, length, map
, partition, zip
)
import Data.Maybe ( Maybe(Nothing, Just), catMaybes )
import Data.Monoid ( (<>), mconcat )
import Language.Haskell.TH
import Language.Haskell.TH.Syntax ( VarStrictType )
import Prelude ( String, (-), Integer, error, foldr1, fromIntegral
, snd, uncurry
)
#if MIN_VERSION_template_haskell(2,8,0) && __GLASGOW_HASKELL__ < 710
import Prelude ( drop )
#endif
import Text.Printf ( printf )
import Text.Show ( show )
import qualified Data.Aeson as A
import qualified Data.Aeson.Encode.Builder as E
import qualified Data.Aeson.Encode.Functions as E
import qualified Data.HashMap.Strict as H ( lookup, toList )
#if MIN_VERSION_template_haskell(2,8,0) && __GLASGOW_HASKELL__ < 710
import qualified Data.Set as Set ( Set, empty, singleton, size, union, unions )
#endif
import qualified Data.Text as T ( Text, pack, unpack )
import qualified Data.Vector as V ( unsafeIndex, null, length, create, fromList )
import qualified Data.Vector.Mutable as VM ( unsafeNew, unsafeWrite )
--------------------------------------------------------------------------------
-- Convenience
--------------------------------------------------------------------------------
-- | Generates both 'ToJSON' and 'FromJSON' instance declarations for the given
-- data type or data family instance constructor.
--
-- This is a convienience function which is equivalent to calling both
-- 'deriveToJSON' and 'deriveFromJSON'.
deriveJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate 'ToJSON' and 'FromJSON'
-- instances.
-> Q [Dec]
deriveJSON opts name =
liftM2 (++)
(deriveToJSON opts name)
(deriveFromJSON opts name)
--------------------------------------------------------------------------------
-- ToJSON
--------------------------------------------------------------------------------
{-
TODO: Don't constrain phantom type variables.
data Foo a = Foo Int
instance (ToJSON a) ⇒ ToJSON Foo where ...
The above (ToJSON a) constraint is not necessary and perhaps undesirable.
-}
-- | Generates a 'ToJSON' instance declaration for the given data type or
-- data family instance constructor.
deriveToJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'ToJSON' instance
-- declaration.
-> Q [Dec]
deriveToJSON opts name =
withType name $ \name' tvbs cons mbTys -> fmap (:[]) $ fromCons name' tvbs cons mbTys
where
fromCons :: Name -> [TyVarBndr] -> [Con] -> Maybe [Type] -> Q Dec
fromCons name' tvbs cons mbTys =
instanceD instanceCxt
instanceType
[ funD 'toJSON
[ clause []
(normalB $ consToValue opts cons)
[]
]
, funD 'toEncoding
[ clause []
(normalB $ consToEncoding opts cons)
[]
]
]
where
(instanceCxt, instanceType) =
buildTypeInstance name' ''ToJSON tvbs mbTys
-- | Generates a lambda expression which encodes the given data type or
-- data family instance constructor as a 'Value'.
mkToJSON :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkToJSON opts name = withType name (\_ _ cons _ -> consToValue opts cons)
-- | Generates a lambda expression which encodes the given data type or
-- data family instance constructor as a JSON string.
mkToEncoding :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkToEncoding opts name = withType name (\_ _ cons _ -> consToEncoding opts cons)
-- | Helper function used by both 'deriveToJSON' and 'mkToJSON'. Generates
-- code to generate a 'Value' of a number of constructors. All constructors
-- must be from the same type.
consToValue :: Options
-- ^ Encoding options.
-> [Con]
-- ^ Constructors for which to generate JSON generating code.
-> Q Exp
consToValue _ [] = error $ "Data.Aeson.TH.consToValue: "
++ "Not a single constructor given!"
-- A single constructor is directly encoded. The constructor itself may be
-- forgotten.
consToValue opts [con] = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) [argsToValue opts False con]
consToValue opts cons = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) matches
where
matches
| allNullaryToStringTag opts && all isNullary cons =
[ match (conP conName []) (normalB $ conStr opts conName) []
| con <- cons
, let conName = getConName con
]
| otherwise = [argsToValue opts True con | con <- cons]
conStr :: Options -> Name -> Q Exp
conStr opts = appE [|String|] . conTxt opts
conTxt :: Options -> Name -> Q Exp
conTxt opts = appE [|T.pack|] . conStringE opts
conStringE :: Options -> Name -> Q Exp
conStringE opts = stringE . constructorTagModifier opts . nameBase
-- | Helper function used by both 'deriveToJSON' and 'mkToEncoding'. Generates
-- code to write out a value for a number of constructors. All constructors
-- must be from the same type.
consToEncoding :: Options
-- ^ Encoding options.
-> [Con]
-- ^ Constructors for which to generate JSON generating code.
-> Q Exp
consToEncoding _ [] = error $ "Data.Aeson.TH.consToEncoding: "
++ "Not a single constructor given!"
-- A single constructor is directly encoded. The constructor itself may be
-- forgotten.
consToEncoding opts [con] = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) [argsToEncoding opts False con]
-- Encode just the name of the constructor of a sum type iff all the
-- constructors are nullary.
consToEncoding opts cons = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) matches
where
matches
| allNullaryToStringTag opts && all isNullary cons =
[ match (conP conName [])
(normalB $ [|Encoding|] `appE` encStr opts conName) []
| con <- cons
, let conName = getConName con
]
| otherwise = [argsToEncoding opts True con | con <- cons]
encStr :: Options -> Name -> Q Exp
encStr opts = appE [|E.text|] . conTxt opts
-- | If constructor is nullary.
isNullary :: Con -> Bool
isNullary (NormalC _ []) = True
isNullary _ = False
sumToValue :: Options -> Bool -> Name -> Q Exp -> Q Exp
sumToValue opts multiCons conName exp
| multiCons =
case sumEncoding opts of
TwoElemArray ->
[|Array|] `appE` ([|V.fromList|] `appE` listE [conStr opts conName, exp])
TaggedObject{tagFieldName, contentsFieldName} ->
[|A.object|] `appE` listE
[ infixApp [|T.pack tagFieldName|] [|(.=)|] (conStr opts conName)
, infixApp [|T.pack contentsFieldName|] [|(.=)|] exp
]
ObjectWithSingleField ->
[|A.object|] `appE` listE
[ infixApp (conTxt opts conName) [|(.=)|] exp
]
| otherwise = exp
-- | Generates code to generate the JSON encoding of a single constructor.
argsToValue :: Options -> Bool -> Con -> Q Match
-- Nullary constructors. Generates code that explicitly matches against the
-- constructor even though it doesn't contain data. This is useful to prevent
-- type errors.
argsToValue opts multiCons (NormalC conName []) =
match (conP conName [])
(normalB (sumToValue opts multiCons conName [e|toJSON ([] :: [()])|]))
[]
-- Polyadic constructors with special case for unary constructors.
argsToValue opts multiCons (NormalC conName ts) = do
let len = length ts
args <- mapM newName ["arg" ++ show n | n <- [1..len]]
js <- case [[|toJSON|] `appE` varE arg | arg <- args] of
-- Single argument is directly converted.
[e] -> return e
-- Multiple arguments are converted to a JSON array.
es -> do
mv <- newName "mv"
let newMV = bindS (varP mv)
([|VM.unsafeNew|] `appE`
litE (integerL $ fromIntegral len))
stmts = [ noBindS $
[|VM.unsafeWrite|] `appE`
(varE mv) `appE`
litE (integerL ix) `appE`
e
| (ix, e) <- zip [(0::Integer)..] es
]
ret = noBindS $ [|return|] `appE` varE mv
return $ [|Array|] `appE`
(varE 'V.create `appE`
doE (newMV:stmts++[ret]))
match (conP conName $ map varP args)
(normalB $ sumToValue opts multiCons conName js)
[]
-- Records.
argsToValue opts multiCons (RecC conName ts) = case (unwrapUnaryRecords opts, not multiCons, ts) of
(True,True,[(_,st,ty)]) -> argsToValue opts multiCons (NormalC conName [(st,ty)])
_ -> do
args <- mapM newName ["arg" ++ show n | (_, n) <- zip ts [1 :: Integer ..]]
let exp = [|A.object|] `appE` pairs
pairs | omitNothingFields opts = infixApp maybeFields
[|(++)|]
restFields
| otherwise = listE $ map toPair argCons
argCons = zip args ts
maybeFields = [|catMaybes|] `appE` listE (map maybeToPair maybes)
restFields = listE $ map toPair rest
(maybes, rest) = partition isMaybe argCons
maybeToPair (arg, (field, _, _)) =
infixApp (infixE (Just $ toFieldName field)
[|(.=)|]
Nothing)
[|(<$>)|]
(varE arg)
toPair (arg, (field, _, _)) =
infixApp (toFieldName field)
[|(.=)|]
(varE arg)
toFieldName field = [|T.pack|] `appE` fieldLabelExp opts field
match (conP conName $ map varP args)
( normalB
$ if multiCons
then case sumEncoding opts of
TwoElemArray -> [|toJSON|] `appE` tupE [conStr opts conName, exp]
TaggedObject{tagFieldName} ->
[|A.object|] `appE`
-- TODO: Maybe throw an error in case
-- tagFieldName overwrites a field in pairs.
infixApp (infixApp [|T.pack tagFieldName|]
[|(.=)|]
(conStr opts conName))
[|(:)|]
pairs
ObjectWithSingleField ->
[|A.object|] `appE` listE
[ infixApp (conTxt opts conName) [|(.=)|] exp ]
else exp
) []
-- Infix constructors.
argsToValue opts multiCons (InfixC _ conName _) = do
al <- newName "argL"
ar <- newName "argR"
match (infixP (varP al) conName (varP ar))
( normalB
$ sumToValue opts multiCons conName
$ [|toJSON|] `appE` listE [ [|toJSON|] `appE` varE a
| a <- [al,ar]
]
)
[]
-- Existentially quantified constructors.
argsToValue opts multiCons (ForallC _ _ con) =
argsToValue opts multiCons con
isMaybe :: (a, (b, c, Type)) -> Bool
isMaybe (_, (_, _, AppT (ConT t) _)) = t == ''Maybe
isMaybe _ = False
(<^>) :: ExpQ -> ExpQ -> ExpQ
(<^>) a b = infixApp a [|(<>)|] b
infixr 6 <^>
(<:>) :: ExpQ -> ExpQ -> ExpQ
(<:>) a b = a <^> [|E.char7 ':'|] <^> b
infixr 5 <:>
(<%>) :: ExpQ -> ExpQ -> ExpQ
(<%>) a b = a <^> [|E.char7 ','|] <^> b
infixr 4 <%>
array :: ExpQ -> ExpQ
array exp = [|Encoding|] `appE` ([|E.char7 '['|] <^> exp <^> [|E.char7 ']'|])
object :: ExpQ -> ExpQ
object exp = [|Encoding|] `appE` ([|E.char7 '{'|] <^> exp <^> [|E.char7 '}'|])
sumToEncoding :: Options -> Bool -> Name -> Q Exp -> Q Exp
sumToEncoding opts multiCons conName exp
| multiCons =
let fexp = [|fromEncoding|] `appE` exp in
case sumEncoding opts of
TwoElemArray ->
array (encStr opts conName <%> fexp)
TaggedObject{tagFieldName, contentsFieldName} ->
object $
([|E.text (T.pack tagFieldName)|] <:> encStr opts conName) <%>
([|E.text (T.pack contentsFieldName)|] <:> fexp)
ObjectWithSingleField ->
object (encStr opts conName <:> fexp)
| otherwise = exp
-- | Generates code to generate the JSON encoding of a single constructor.
argsToEncoding :: Options -> Bool -> Con -> Q Match
-- Nullary constructors. Generates code that explicitly matches against the
-- constructor even though it doesn't contain data. This is useful to prevent
-- type errors.
argsToEncoding opts multiCons (NormalC conName []) =
match (conP conName [])
(normalB (sumToEncoding opts multiCons conName [e|toEncoding ([] :: [()])|]))
[]
-- Polyadic constructors with special case for unary constructors.
argsToEncoding opts multiCons (NormalC conName ts) = do
let len = length ts
args <- mapM newName ["arg" ++ show n | n <- [1..len]]
js <- case args of
-- Single argument is directly converted.
[e] -> return ([|toEncoding|] `appE` varE e)
-- Multiple arguments are converted to a JSON array.
es ->
return (array (foldr1 (<%>) [[|E.builder|] `appE` varE x | x <- es]))
match (conP conName $ map varP args)
(normalB $ sumToEncoding opts multiCons conName js)
[]
-- Records.
argsToEncoding opts multiCons (RecC conName ts) = case (unwrapUnaryRecords opts, not multiCons, ts) of
(True,True,[(_,st,ty)]) -> argsToEncoding opts multiCons (NormalC conName [(st,ty)])
_ -> do
args <- mapM newName ["arg" ++ show n | (_, n) <- zip ts [1 :: Integer ..]]
let exp = object objBody
objBody = [|mconcat|] `appE`
([|intersperse (E.char7 ',')|] `appE` pairs)
pairs | omitNothingFields opts = infixApp maybeFields
[|(<>)|]
restFields
| otherwise = listE (map toPair argCons)
argCons = zip args ts
maybeFields = [|catMaybes|] `appE` listE (map maybeToPair maybes)
restFields = listE (map toPair rest)
(maybes, rest) = partition isMaybe argCons
maybeToPair (arg, (field, _, _)) =
infixApp
(infixApp
(infixE
(Just $ toFieldName field <^> [|E.char7 ':'|])
[|(<>)|]
Nothing)
[|(.)|]
[|E.builder|])
[|(<$>)|]
(varE arg)
toPair (arg, (field, _, _)) =
toFieldName field <:> [|E.builder|] `appE` varE arg
toFieldName field = [|E.text|] `appE`
([|T.pack|] `appE` fieldLabelExp opts field)
match (conP conName $ map varP args)
( normalB
$ if multiCons
then case sumEncoding opts of
TwoElemArray -> array $
encStr opts conName <%> [|fromEncoding|] `appE` exp
TaggedObject{tagFieldName} -> object $
([|E.text (T.pack tagFieldName)|] <:>
encStr opts conName) <%>
objBody
ObjectWithSingleField -> object $
encStr opts conName <:> [|fromEncoding|] `appE` exp
else exp
) []
-- Infix constructors.
argsToEncoding opts multiCons (InfixC _ conName _) = do
al <- newName "argL"
ar <- newName "argR"
match (infixP (varP al) conName (varP ar))
( normalB
$ sumToEncoding opts multiCons conName
$ [|toEncoding|] `appE` listE [ [|toJSON|] `appE` varE a
| a <- [al,ar]
]
)
[]
-- Existentially quantified constructors.
argsToEncoding opts multiCons (ForallC _ _ con) =
argsToEncoding opts multiCons con
--------------------------------------------------------------------------------
-- FromJSON
--------------------------------------------------------------------------------
-- | Generates a 'FromJSON' instance declaration for the given data type or
-- data family instance constructor.
deriveFromJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'FromJSON' instance
-- declaration.
-> Q [Dec]
deriveFromJSON opts name =
withType name $ \name' tvbs cons mbTys -> fmap (:[]) $ fromCons name' tvbs cons mbTys
where
fromCons :: Name -> [TyVarBndr] -> [Con] -> Maybe [Type] -> Q Dec
fromCons name' tvbs cons mbTys =
instanceD instanceCxt
instanceType
[ funD 'parseJSON
[ clause []
(normalB $ consFromJSON name' opts cons)
[]
]
]
where
(instanceCxt, instanceType) =
buildTypeInstance name' ''FromJSON tvbs mbTys
-- | Generates a lambda expression which parses the JSON encoding of the given
-- data type or data family instance constructor.
mkParseJSON :: Options -- ^ Encoding options.
-> Name -- ^ Name of the encoded type.
-> Q Exp
mkParseJSON opts name =
withType name (\name' _ cons _ -> consFromJSON name' opts cons)
-- | Helper function used by both 'deriveFromJSON' and 'mkParseJSON'. Generates
-- code to parse the JSON encoding of a number of constructors. All constructors
-- must be from the same type.
consFromJSON :: Name
-- ^ Name of the type to which the constructors belong.
-> Options
-- ^ Encoding options
-> [Con]
-- ^ Constructors for which to generate JSON parsing code.
-> Q Exp
consFromJSON _ _ [] = error $ "Data.Aeson.TH.consFromJSON: "
++ "Not a single constructor given!"
consFromJSON tName opts [con] = do
value <- newName "value"
lam1E (varP value) (parseArgs tName opts con (Right value))
consFromJSON tName opts cons = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) $
if allNullaryToStringTag opts && all isNullary cons
then allNullaryMatches
else mixedMatches
where
allNullaryMatches =
[ do txt <- newName "txt"
match (conP 'String [varP txt])
(guardedB $
[ liftM2 (,) (normalG $
infixApp (varE txt)
[|(==)|]
([|T.pack|] `appE`
conStringE opts conName)
)
([|pure|] `appE` conE conName)
| con <- cons
, let conName = getConName con
]
++
[ liftM2 (,)
(normalG [|otherwise|])
( [|noMatchFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|T.unpack|] `appE` varE txt)
)
]
)
[]
, do other <- newName "other"
match (varP other)
(normalB $ [|noStringFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
mixedMatches =
case sumEncoding opts of
TaggedObject {tagFieldName, contentsFieldName} ->
parseObject $ parseTaggedObject tagFieldName contentsFieldName
ObjectWithSingleField ->
parseObject $ parseObjectWithSingleField
TwoElemArray ->
[ do arr <- newName "array"
match (conP 'Array [varP arr])
(guardedB $
[ liftM2 (,) (normalG $ infixApp ([|V.length|] `appE` varE arr)
[|(==)|]
(litE $ integerL 2))
(parse2ElemArray arr)
, liftM2 (,) (normalG [|otherwise|])
(([|not2ElemArray|]
`appE` (litE $ stringL $ show tName)
`appE` ([|V.length|] `appE` varE arr)))
]
)
[]
, do other <- newName "other"
match (varP other)
( normalB
$ [|noArrayFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
parseObject f =
[ do obj <- newName "obj"
match (conP 'Object [varP obj]) (normalB $ f obj) []
, do other <- newName "other"
match (varP other)
( normalB
$ [|noObjectFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
parseTaggedObject typFieldName valFieldName obj = do
conKey <- newName "conKey"
doE [ bindS (varP conKey)
(infixApp (varE obj)
[|(.:)|]
([|T.pack|] `appE` stringE typFieldName))
, noBindS $ parseContents conKey (Left (valFieldName, obj)) 'conNotFoundFailTaggedObject
]
parse2ElemArray arr = do
conKey <- newName "conKey"
conVal <- newName "conVal"
let letIx n ix =
valD (varP n)
(normalB ([|V.unsafeIndex|] `appE`
varE arr `appE`
litE (integerL ix)))
[]
letE [ letIx conKey 0
, letIx conVal 1
]
(caseE (varE conKey)
[ do txt <- newName "txt"
match (conP 'String [varP txt])
(normalB $ parseContents txt
(Right conVal)
'conNotFoundFail2ElemArray
)
[]
, do other <- newName "other"
match (varP other)
( normalB
$ [|firstElemNoStringFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
)
parseObjectWithSingleField obj = do
conKey <- newName "conKey"
conVal <- newName "conVal"
caseE ([e|H.toList|] `appE` varE obj)
[ match (listP [tupP [varP conKey, varP conVal]])
(normalB $ parseContents conKey (Right conVal) 'conNotFoundFailObjectSingleField)
[]
, do other <- newName "other"
match (varP other)
(normalB $ [|wrongPairCountFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|show . length|] `appE` varE other)
)
[]
]
parseContents conKey contents errorFun =
caseE (varE conKey)
[ match wildP
( guardedB $
[ do g <- normalG $ infixApp (varE conKey)
[|(==)|]
([|T.pack|] `appE`
conNameExp opts con)
e <- parseArgs tName opts con contents
return (g, e)
| con <- cons
]
++
[ liftM2 (,)
(normalG [e|otherwise|])
( varE errorFun
`appE` (litE $ stringL $ show tName)
`appE` listE (map ( litE
. stringL
. constructorTagModifier opts
. nameBase
. getConName
) cons
)
`appE` ([|T.unpack|] `appE` varE conKey)
)
]
)
[]
]
parseNullaryMatches :: Name -> Name -> [Q Match]
parseNullaryMatches tName conName =
[ do arr <- newName "arr"
match (conP 'Array [varP arr])
(guardedB $
[ liftM2 (,) (normalG $ [|V.null|] `appE` varE arr)
([|pure|] `appE` conE conName)
, liftM2 (,) (normalG [|otherwise|])
(parseTypeMismatch tName conName
(litE $ stringL "an empty Array")
(infixApp (litE $ stringL $ "Array of length ")
[|(++)|]
([|show . V.length|] `appE` varE arr)
)
)
]
)
[]
, matchFailed tName conName "Array"
]
parseUnaryMatches :: Name -> [Q Match]
parseUnaryMatches conName =
[ do arg <- newName "arg"
match (varP arg)
( normalB $ infixApp (conE conName)
[|(<$>)|]
([|parseJSON|] `appE` varE arg)
)
[]
]
parseRecord :: Options -> Name -> Name -> [VarStrictType] -> Name -> ExpQ
parseRecord opts tName conName ts obj =
foldl' (\a b -> infixApp a [|(<*>)|] b)
(infixApp (conE conName) [|(<$>)|] x)
xs
where
x:xs = [ [|lookupField|]
`appE` (litE $ stringL $ show tName)
`appE` (litE $ stringL $ constructorTagModifier opts $ nameBase conName)
`appE` (varE obj)
`appE` ( [|T.pack|] `appE` fieldLabelExp opts field
)
| (field, _, _) <- ts
]
getValField :: Name -> String -> [MatchQ] -> Q Exp
getValField obj valFieldName matches = do
val <- newName "val"
doE [ bindS (varP val) $ infixApp (varE obj)
[|(.:)|]
([|T.pack|] `appE`
(litE $ stringL valFieldName))
, noBindS $ caseE (varE val) matches
]
-- | Generates code to parse the JSON encoding of a single constructor.
parseArgs :: Name -- ^ Name of the type to which the constructor belongs.
-> Options -- ^ Encoding options.
-> Con -- ^ Constructor for which to generate JSON parsing code.
-> Either (String, Name) Name -- ^ Left (valFieldName, objName) or
-- Right valName
-> Q Exp
-- Nullary constructors.
parseArgs tName _ (NormalC conName []) (Left (valFieldName, obj)) =
getValField obj valFieldName $ parseNullaryMatches tName conName
parseArgs tName _ (NormalC conName []) (Right valName) =
caseE (varE valName) $ parseNullaryMatches tName conName
-- Unary constructors.
parseArgs _ _ (NormalC conName [_]) (Left (valFieldName, obj)) =
getValField obj valFieldName $ parseUnaryMatches conName
parseArgs _ _ (NormalC conName [_]) (Right valName) =
caseE (varE valName) $ parseUnaryMatches conName
-- Polyadic constructors.
parseArgs tName _ (NormalC conName ts) (Left (valFieldName, obj)) =
getValField obj valFieldName $ parseProduct tName conName $ genericLength ts
parseArgs tName _ (NormalC conName ts) (Right valName) =
caseE (varE valName) $ parseProduct tName conName $ genericLength ts
-- Records.
parseArgs tName opts (RecC conName ts) (Left (_, obj)) =
parseRecord opts tName conName ts obj
parseArgs tName opts (RecC conName ts) (Right valName) = case (unwrapUnaryRecords opts,ts) of
(True,[(_,st,ty)])-> parseArgs tName opts (NormalC conName [(st,ty)]) (Right valName)
_ -> do
obj <- newName "recObj"
caseE (varE valName)
[ match (conP 'Object [varP obj]) (normalB $ parseRecord opts tName conName ts obj) []
, matchFailed tName conName "Object"
]
-- Infix constructors. Apart from syntax these are the same as
-- polyadic constructors.
parseArgs tName _ (InfixC _ conName _) (Left (valFieldName, obj)) =
getValField obj valFieldName $ parseProduct tName conName 2
parseArgs tName _ (InfixC _ conName _) (Right valName) =
caseE (varE valName) $ parseProduct tName conName 2
-- Existentially quantified constructors. We ignore the quantifiers
-- and proceed with the contained constructor.
parseArgs tName opts (ForallC _ _ con) contents =
parseArgs tName opts con contents
-- | Generates code to parse the JSON encoding of an n-ary
-- constructor.
parseProduct :: Name -- ^ Name of the type to which the constructor belongs.
-> Name -- ^ 'Con'structor name.
-> Integer -- ^ 'Con'structor arity.
-> [Q Match]
parseProduct tName conName numArgs =
[ do arr <- newName "arr"
-- List of: "parseJSON (arr `V.unsafeIndex` <IX>)"
let x:xs = [ [|parseJSON|]
`appE`
infixApp (varE arr)
[|V.unsafeIndex|]
(litE $ integerL ix)
| ix <- [0 .. numArgs - 1]
]
match (conP 'Array [varP arr])
(normalB $ condE ( infixApp ([|V.length|] `appE` varE arr)
[|(==)|]
(litE $ integerL numArgs)
)
( foldl' (\a b -> infixApp a [|(<*>)|] b)
(infixApp (conE conName) [|(<$>)|] x)
xs
)
( parseTypeMismatch tName conName
(litE $ stringL $ "Array of length " ++ show numArgs)
( infixApp (litE $ stringL $ "Array of length ")
[|(++)|]
([|show . V.length|] `appE` varE arr)
)
)
)
[]
, matchFailed tName conName "Array"
]
--------------------------------------------------------------------------------
-- Parsing errors
--------------------------------------------------------------------------------
matchFailed :: Name -> Name -> String -> MatchQ
matchFailed tName conName expected = do
other <- newName "other"
match (varP other)
( normalB $ parseTypeMismatch tName conName
(litE $ stringL expected)
([|valueConName|] `appE` varE other)
)
[]
parseTypeMismatch :: Name -> Name -> ExpQ -> ExpQ -> ExpQ
parseTypeMismatch tName conName expected actual =
foldl appE
[|parseTypeMismatch'|]
[ litE $ stringL $ nameBase conName
, litE $ stringL $ show tName
, expected
, actual
]
class (FromJSON a) => LookupField a where
lookupField :: String -> String -> Object -> T.Text -> Parser a
instance OVERLAPPABLE_ (FromJSON a) => LookupField a where
lookupField tName rec obj key =
case H.lookup key obj of
Nothing -> unknownFieldFail tName rec (T.unpack key)
Just v -> parseJSON v
instance (FromJSON a) => LookupField (Maybe a) where
lookupField _ _ obj key = join <$> obj .:? key
unknownFieldFail :: String -> String -> String -> Parser fail
unknownFieldFail tName rec key =
fail $ printf "When parsing the record %s of type %s the key %s was not present."
rec tName key
noArrayFail :: String -> String -> Parser fail
noArrayFail t o = fail $ printf "When parsing %s expected Array but got %s." t o
noObjectFail :: String -> String -> Parser fail
noObjectFail t o = fail $ printf "When parsing %s expected Object but got %s." t o
firstElemNoStringFail :: String -> String -> Parser fail
firstElemNoStringFail t o = fail $ printf "When parsing %s expected an Array of 2 elements where the first element is a String but got %s at the first element." t o
wrongPairCountFail :: String -> String -> Parser fail
wrongPairCountFail t n =
fail $ printf "When parsing %s expected an Object with a single tag/contents pair but got %s pairs."
t n
noStringFail :: String -> String -> Parser fail
noStringFail t o = fail $ printf "When parsing %s expected String but got %s." t o
noMatchFail :: String -> String -> Parser fail
noMatchFail t o =
fail $ printf "When parsing %s expected a String with the tag of a constructor but got %s." t o
not2ElemArray :: String -> Int -> Parser fail
not2ElemArray t i = fail $ printf "When parsing %s expected an Array of 2 elements but got %i elements" t i
conNotFoundFail2ElemArray :: String -> [String] -> String -> Parser fail
conNotFoundFail2ElemArray t cs o =
fail $ printf "When parsing %s expected a 2-element Array with a tag and contents element where the tag is one of [%s], but got %s."
t (intercalate ", " cs) o
conNotFoundFailObjectSingleField :: String -> [String] -> String -> Parser fail
conNotFoundFailObjectSingleField t cs o =
fail $ printf "When parsing %s expected an Object with a single tag/contents pair where the tag is one of [%s], but got %s."
t (intercalate ", " cs) o
conNotFoundFailTaggedObject :: String -> [String] -> String -> Parser fail
conNotFoundFailTaggedObject t cs o =
fail $ printf "When parsing %s expected an Object with a tag field where the value is one of [%s], but got %s."
t (intercalate ", " cs) o
parseTypeMismatch' :: String -> String -> String -> String -> Parser fail
parseTypeMismatch' conName tName expected actual =
fail $ printf "When parsing the constructor %s of type %s expected %s but got %s."
conName tName expected actual
--------------------------------------------------------------------------------
-- Utility functions
--------------------------------------------------------------------------------
-- | Boilerplate for top level splices.
--
-- The given 'Name' must meet one of two criteria:
--
-- 1. It must be the name of a type constructor of a plain data type or newtype.
-- 2. It must be the name of a data family instance or newtype instance constructor.
-- Any other value will result in an exception.
withType :: Name
-> (Name -> [TyVarBndr] -> [Con] -> Maybe [Type] -> Q a)
-- ^ Function that generates the actual code. Will be applied
-- to the datatype/data family 'Name', type variable binders and
-- constructors extracted from the given 'Name'. If the 'Name' is
-- from a data family instance constructor, it will also have its
-- instantiated types; otherwise, it will be 'Nothing'.
-> Q a
-- ^ Resulting value in the 'Q'uasi monad.
withType name f = do
info <- reify name
case info of
TyConI dec ->
case dec of
DataD _ _ tvbs cons _ -> f name tvbs cons Nothing
NewtypeD _ _ tvbs con _ -> f name tvbs [con] Nothing
other -> error $ ns ++ "Unsupported type: " ++ show other
#if MIN_VERSION_template_haskell(2,11,0)
DataConI _ _ parentName -> do
#else
DataConI _ _ parentName _ -> do
#endif
parentInfo <- reify parentName
case parentInfo of
#if MIN_VERSION_template_haskell(2,11,0)
FamilyI (DataFamilyD _ tvbs _) decs ->
#else
FamilyI (FamilyD DataFam _ tvbs _) decs ->
#endif
let instDec = flip find decs $ \dec -> case dec of
DataInstD _ _ _ cons _ -> any ((name ==) . getConName) cons
NewtypeInstD _ _ _ con _ -> name == getConName con
_ -> error $ ns ++ "Must be a data or newtype instance."
in case instDec of
Just (DataInstD _ _ instTys cons _)
-> f parentName tvbs cons $ Just instTys
Just (NewtypeInstD _ _ instTys con _)
-> f parentName tvbs [con] $ Just instTys
_ -> error $ ns ++
"Could not find data or newtype instance constructor."
_ -> error $ ns ++ "Data constructor " ++ show name ++
" is not from a data family instance constructor."
#if MIN_VERSION_template_haskell(2,11,0)
FamilyI DataFamilyD{} _ ->
#else
FamilyI (FamilyD DataFam _ _ _) _ ->
#endif
error $ ns ++
"Cannot use a data family name. Use a data family instance constructor instead."
_ -> error $ ns ++ "I need the name of a plain data type constructor, "
++ "or a data family instance constructor."
where
ns :: String
ns = "Data.Aeson.TH.withType: "
-- | Infer the context and instance head needed for a FromJSON or ToJSON instance.
buildTypeInstance :: Name
-- ^ The type constructor or data family name
-> Name
-- ^ The typeclass name ('ToJSON' or 'FromJSON')
-> [TyVarBndr]
-- ^ The type variables from the data type/data family declaration
-> Maybe [Type]
-- ^ 'Just' the types used to instantiate a data family instance,
-- or 'Nothing' if it's a plain data type
-> (Q Cxt, Q Type)
-- ^ The resulting 'Cxt' and 'Type' to use in a class instance
-- Plain data type/newtype case
buildTypeInstance tyConName constraint tvbs Nothing =
(applyCon constraint typeNames, conT constraint `appT` instanceType)
where
typeNames :: [Name]
typeNames = map tvbName tvbs
instanceType :: Q Type
instanceType = applyTyCon tyConName $ map varT typeNames
-- Data family instance case
buildTypeInstance dataFamName constraint tvbs (Just instTysAndKinds) =
(applyCon constraint lhsTvbNames, conT constraint `appT` instanceType)
where
-- We need to make sure that type variables in the instance head which have
-- constraints aren't poly-kinded, e.g.,
--
-- @
-- instance ToJSON a => ToJSON (Foo (a :: k)) where
-- @
--
-- To do this, we remove every kind ascription (i.e., strip off every 'SigT').
instanceType :: Q Type
instanceType = applyTyCon dataFamName $ map (return . unSigT) rhsTypes
-- We need to mindful of an old GHC bug which causes kind variables appear in
-- @instTysAndKinds@ (as the name suggests) if (1) @PolyKinds@ is enabled, and
-- (2) either GHC 7.6 or 7.8 is being used (for more info, see
-- https://ghc.haskell.org/trac/ghc/ticket/9692).
--
-- Since Template Haskell doesn't seem to have a mechanism for detecting which
-- language extensions are enabled, we do the next-best thing by counting
-- the number of distinct kind variables in the data family declaration, and
-- then dropping that number of entries from @instTysAndKinds@
instTypes :: [Type]
instTypes =
#if __GLASGOW_HASKELL__ >= 710 || !(MIN_VERSION_template_haskell(2,8,0))
instTysAndKinds
#else
drop (Set.size . Set.unions $ map (distinctKindVars . tvbKind) tvbs)
instTysAndKinds
#endif
lhsTvbNames :: [Name]
lhsTvbNames = map (tvbName . uncurry replaceTyVarName)
. filter (isTyVar . snd)
$ zip tvbs rhsTypes
-- In GHC 7.8, only the @Type@s up to the rightmost non-eta-reduced type variable
-- in @instTypes@ are provided (as a result of this bug:
-- https://ghc.haskell.org/trac/ghc/ticket/9692). To work around this, we borrow
-- some type variables from the data family instance declaration.
rhsTypes :: [Type]
rhsTypes =
#if __GLASGOW_HASKELL__ >= 708 && __GLASGOW_HASKELL__ < 710
instTypes ++ map tvbToType
(drop (length instTypes)
tvbs)
#else
instTypes
#endif
#if MIN_VERSION_template_haskell(2,8,0) && __GLASGOW_HASKELL__ < 710
distinctKindVars :: Kind -> Set.Set Name
distinctKindVars (AppT k1 k2) = distinctKindVars k1 `Set.union` distinctKindVars k2
distinctKindVars (SigT k _) = distinctKindVars k
distinctKindVars (VarT k) = Set.singleton k
distinctKindVars _ = Set.empty
-- | Extracts the kind from a type variable binder.
tvbKind :: TyVarBndr -> Kind
tvbKind (PlainTV _ ) = starK
tvbKind (KindedTV _ k) = k
#endif
#if __GLASGOW_HASKELL__ >= 708 && __GLASGOW_HASKELL__ < 710
tvbToType :: TyVarBndr -> Type
tvbToType (PlainTV n) = VarT n
tvbToType (KindedTV n k) = SigT (VarT n) k
#endif
-- | Extracts the name from a constructor.
getConName :: Con -> Name
getConName (NormalC name _) = name
getConName (RecC name _) = name
getConName (InfixC _ name _) = name
getConName (ForallC _ _ con) = getConName con
-- | Extracts the name from a type variable binder.
tvbName :: TyVarBndr -> Name
tvbName (PlainTV name ) = name
tvbName (KindedTV name _) = name
-- | Replace the Name of a TyVarBndr with one from a Type (if the Type has a Name).
replaceTyVarName :: TyVarBndr -> Type -> TyVarBndr
replaceTyVarName tvb (SigT t _) = replaceTyVarName tvb t
replaceTyVarName (PlainTV _) (VarT n) = PlainTV n
replaceTyVarName (KindedTV _ k) (VarT n) = KindedTV n k
replaceTyVarName tvb _ = tvb
-- | Fully applies a type constructor to its type variables.
applyTyCon :: Name -> [Q Type] -> Q Type
applyTyCon = foldl' appT . conT
-- | Is the given type a variable?
isTyVar :: Type -> Bool
isTyVar (VarT _) = True
isTyVar (SigT t _) = isTyVar t
isTyVar _ = False
-- | Peel off a kind signature from a Type (if it has one).
unSigT :: Type -> Type
unSigT (SigT t _) = t
unSigT t = t
-- | Makes a string literal expression from a constructor's name.
conNameExp :: Options -> Con -> Q Exp
conNameExp opts = litE
. stringL
. constructorTagModifier opts
. nameBase
. getConName
-- | Creates a string literal expression from a record field label.
fieldLabelExp :: Options -- ^ Encoding options
-> Name
-> Q Exp
fieldLabelExp opts = litE . stringL . fieldLabelModifier opts . nameBase
-- | The name of the outermost 'Value' constructor.
valueConName :: Value -> String
valueConName (Object _) = "Object"
valueConName (Array _) = "Array"
valueConName (String _) = "String"
valueConName (Number _) = "Number"
valueConName (Bool _) = "Boolean"
valueConName Null = "Null"
applyCon :: Name -> [Name] -> Q [Pred]
applyCon con typeNames = return (map apply typeNames)
where apply t =
#if MIN_VERSION_template_haskell(2,10,0)
AppT (ConT con) (VarT t)
#else
ClassP con [VarT t]
#endif
| neobrain/aeson | Data/Aeson/TH.hs | bsd-3-clause | 48,240 | 0 | 27 | 17,437 | 10,780 | 5,828 | 4,952 | -1 | -1 |
-- | This module implements encoding and decoding of CSV data. The
-- implementation is RFC 4180 compliant, with the following
-- extensions:
--
-- * Empty lines are ignored.
--
-- * Non-escaped fields may contain any characters except
-- double-quotes, commas, carriage returns, and newlines.
--
-- * Escaped fields may contain any characters (but double-quotes
-- need to be escaped).
module Data.Csv
(
-- * Usage example
-- $example
-- * Treating CSV data as opaque byte strings
-- $generic-processing
-- * Custom type conversions
-- $customtypeconversions
-- ** Dealing with bad data
-- $baddata
-- * Encoding and decoding
-- $encoding
HasHeader(..)
, decode
, decodeByName
, encode
, encodeByName
-- ** Encoding and decoding options
-- $options
, DecodeOptions(..)
, defaultDecodeOptions
, decodeWith
, decodeByNameWith
, EncodeOptions(..)
, defaultEncodeOptions
, encodeWith
, encodeByNameWith
-- * Core CSV types
, Csv
, Record
, Field
, Header
, Name
, NamedRecord
-- * Type conversion
-- $typeconversion
-- ** Index-based record conversion
-- $indexbased
, FromRecord(..)
, Parser
, runParser
, index
, (.!)
, unsafeIndex
, ToRecord(..)
, record
, Only(..)
-- ** Name-based record conversion
-- $namebased
, FromNamedRecord(..)
, lookup
, (.:)
, ToNamedRecord(..)
, namedRecord
, namedField
, (.=)
-- ** Field conversion
-- $fieldconversion
, FromField(..)
, ToField(..)
) where
import Prelude hiding (lookup)
import Data.Csv.Conversion
import Data.Csv.Encoding
import Data.Csv.Types
-- $example
--
-- A short encoding usage example:
--
-- > >>> encode [("John" :: Text, 27), ("Jane", 28)]
-- > Chunk "John,27\r\nJane,28\r\n" Empty
--
-- Since string literals are overloaded we have to supply a type
-- signature as the compiler couldn't deduce which string type (i.e.
-- 'String' or 'Text') we want to use. In most cases type inference
-- will infer the type from the context and you can omit type
-- signatures.
--
-- A short decoding usage example:
--
-- > >>> decode NoHeader "John,27\r\nJane,28\r\n" :: Either String (Vector (Text, Int))
-- > Right (fromList [("John",27),("Jane",28)])
--
-- We pass 'NoHeader' as the first argument to indicate that the CSV
-- input data isn't preceded by a header.
--
-- In practice, the return type of 'decode' rarely needs to be given,
-- as it can often be inferred from the context.
-- $generic-processing
--
-- Sometimes you might want to work with a CSV file which contents is
-- unknown to you. For example, you might want remove the second
-- column of a file without knowing anything about its content. To
-- parse a CSV file to a generic representation, just convert each
-- record to a @'Vector' 'ByteString'@ value, like so:
--
-- > decode NoHeader "John,27\r\nJane,28\r\n" :: Either String (Vector (Vector ByteString))
-- > Right (fromList [fromList ["John","27"],fromList ["Jane","28"]])
--
-- As the example output above shows, all the fields are returned as
-- uninterpreted 'ByteString' values.
-- $customtypeconversions
--
-- Most of the time the existing 'FromField' and 'ToField' instances
-- do what you want. However, if you need to parse a different format
-- (e.g. hex) but use a type (e.g. 'Int') for which there's already a
-- 'FromField' instance, you need to use a @newtype@. Example:
--
-- > newtype Hex = Hex Int
-- >
-- > parseHex :: ByteString -> Parser Int
-- > parseHex = ...
-- >
-- > instance FromField Hex where
-- > parseField s = Hex <$> parseHex s
--
-- Other than giving an explicit type signature, you can pattern match
-- on the @newtype@ constructor to indicate which type conversion you
-- want to have the library use:
--
-- > case decode NoHeader "0xff,0xaa\r\n0x11,0x22\r\n" of
-- > Left err -> putStrLn err
-- > Right v -> forM_ v $ \ (Hex val1, Hex val2) ->
-- > print (val1, val2)
--
-- If a field might be in one several different formats, you can use a
-- newtype to normalize the result:
--
-- > newtype HexOrDecimal = HexOrDecimal Int
-- >
-- > instance FromField DefaultToZero where
-- > parseField s = case runParser (parseField s :: Parser Hex) of
-- > Left err -> HexOrDecimal <$> parseField s -- Uses Int instance
-- > Right n -> pure $ HexOrDecimal n
--
-- You can use the unit type, @()@, to ignore a column. The
-- 'parseField' method for @()@ doesn't look at the 'Field' and thus
-- always decodes successfully. Note that it lacks a corresponding
-- 'ToField' instance. Example:
--
-- > case decode NoHeader "foo,1\r\nbar,22" of
-- > Left err -> putStrLn err
-- > Right v -> forM_ v $ \ ((), i) -> print (i :: Int)
-- $baddata
--
-- If your input might contain invalid fields, you can write a custom
-- 'FromField' instance to deal with them. Example:
--
-- > newtype DefaultToZero = DefaultToZero Int
-- >
-- > instance FromField DefaultToZero where
-- > parseField s = case runParser (parseField s) of
-- > Left err -> pure $ DefaultToZero 0
-- > Right n -> pure $ DefaultToZero n
-- $encoding
--
-- Encoding and decoding is a two step process. To encode a value, it
-- is first converted to a generic representation, using either
-- 'ToRecord' or 'ToNamedRecord'. The generic representation is then
-- encoded as CSV data. To decode a value the process is reversed and
-- either 'FromRecord' or 'FromNamedRecord' is used instead. Both
-- these steps are combined in the 'encode' and 'decode' functions.
-- $typeconversion
--
-- There are two ways to convert CSV records to and from and
-- user-defined data types: index-based conversion and name-based
-- conversion.
-- $indexbased
--
-- Index-based conversion lets you convert CSV records to and from
-- user-defined data types by referring to a field's position (its
-- index) in the record. The first column in a CSV file is given index
-- 0, the second index 1, and so on.
-- $namebased
--
-- Name-based conversion lets you convert CSV records to and from
-- user-defined data types by referring to a field's name. The names
-- of the fields are defined by the first line in the file, also known
-- as the header. Name-based conversion is more robust to changes in
-- the file structure e.g. to reording or addition of columns, but can
-- be a bit slower.
-- $options
--
-- These functions can be used to control how data is encoded and
-- decoded. For example, they can be used to encode data in a
-- tab-separated format instead of in a comma-separated format.
-- $fieldconversion
--
-- The 'FromField' and 'ToField' classes define how to convert between
-- 'Field's and values you care about (e.g. 'Int's). Most of the time
-- you don't need to write your own instances as the standard ones
-- cover most use cases.
| plow-technologies/cassava | Data/Csv.hs | bsd-3-clause | 6,937 | 0 | 5 | 1,492 | 351 | 292 | 59 | 43 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Minecraft.Player where
import Control.Lens.TH (makeLenses)
import Data.Data (Data)
import Data.Typeable (Typeable)
import Data.NBT
import Data.Int (Int8, Int16, Int32, Int64)
import Data.Text (Text)
import GHC.Generics (Generic)
import Minecraft.Core (Attribute(..), Dimension(..), Int6, Item(..), GameMode(..), ToNBT(..), ToNBTContents(..), XYZ(..), Pos(..), PosKind(..))
import TextShow (showt)
data Abilities = Abilities
{ _walkSpeed :: Float -- ^ 0.1
, _flySpeed :: Float -- ^ 0.05
, _mayFly :: Bool -- ^ true
, _flying :: Bool -- ^ false
, _invulnerable :: Bool -- ^ false
, _mayBuild :: Bool -- ^ true
, _instabuild :: Bool -- ^ false
}
deriving (Eq, Ord, Read, Show, Data, Typeable, Generic)
makeLenses ''Abilities
instance ToNBT Abilities where
toNBT abilities =
NBT "abilities" (toNBTContents abilities)
instance ToNBTContents Abilities where
toNBTContents abilities =
(CompoundTag
[ NBT "walkSpeed" (FloatTag (_walkSpeed abilities))
, NBT "flySpeed" (FloatTag (_flySpeed abilities))
, NBT "mayfly" (toNBTContents (_mayFly abilities))
, NBT "flying" (toNBTContents (_flying abilities))
, NBT "invulnerable" (toNBTContents (_invulnerable abilities))
, NBT "mayBuild" (toNBTContents (_mayBuild abilities))
, NBT "instabuild" (toNBTContents (_instabuild abilities))
])
defaultAbilities :: Abilities
defaultAbilities = Abilities
{ _walkSpeed = 0.1
, _flySpeed = 0.05
, _mayFly = True
, _flying = False
, _invulnerable = False
, _mayBuild = True
, _instabuild = False
}
data InventoryItem = InventoryItem
{ _slot :: Int6
, _item :: Item
, _count :: Int6
, _damage :: Int16
}
deriving (Eq, Ord, Read, Show, Data, Typeable, Generic)
makeLenses ''InventoryItem
instance ToNBTContents InventoryItem where
toNBTContents ii =
CompoundTag [ NBT "Slot" (ByteTag (_slot ii))
, NBT "id" (StringTag (showt (_item ii)))
, NBT "Count" (ByteTag (_count ii))
, NBT "Damage" (ShortTag (_damage ii))
]
data EnderItem = EnderItem
deriving (Eq, Ord, Read, Show, Data, Typeable, Generic)
instance ToNBTContents EnderItem where
toNBTContents EnderItem = error "ToNBTContents EnterItem not implemented."
data Player = Player
{ _dimension :: Dimension
, _playerGameType :: GameMode
, _score :: Int32
, _selectedItemSlot :: Int32
, _playerSpawn :: Maybe XYZ
-- , _spawnForced :: Bool
, _sleeping :: Bool
, _sleepTimer :: Int16 -- ^ 0, no effect
, _fire :: Int16 -- ^ -20
, _foodLevel :: Int32 -- ^ 20 is full
, _foodExhaustionLevel :: Float -- ^ 0
, _foodSaturationLevel :: Float -- ^ 5
, _foodTickTimer :: Int32 -- ^ 0
, _xpLevel :: Int32 -- ^ 0
, _xpP :: Float -- ^ 0
, _xpTotal :: Int32 -- ^ 0
, _xpSeed :: Int32
, _inventory :: [InventoryItem]
, _enderItems :: [EnderItem]
, _abilities :: Abilities
, _hurtByTimestamp :: Int32 -- ^ 0
, _hurtTime :: Int16 -- ^ 0
, _attributes :: [Attribute]
}
deriving (Eq, Ord, Read, Show, Data, Typeable, Generic)
makeLenses ''Player
instance ToNBT Player where
toNBT player =
NBT "Player" (toNBTContents player)
-- FIXME: what happens if the player spawn position includes tilda/caret
instance ToNBTContents Player where
toNBTContents player = (CompoundTag
((case _playerSpawn player of
Nothing -> []
(Just (XYZ (Pos Abs x) (Pos Abs y) (Pos Abs z))) ->
[ NBT "SpawnX" (toNBTContents x)
, NBT "SpawnY" (toNBTContents y)
, NBT "SpawnZ" (toNBTContents z)
]) ++
[ NBT "Dimension" (toNBTContents (_dimension player))
, NBT "GameType" (toNBTContents (_playerGameType player))
, NBT "Score" (toNBTContents (_score player))
, NBT "SelectedItemSlot" (toNBTContents (_selectedItemSlot player))
, NBT "Sleeping" (toNBTContents (_sleeping player))
, NBT "SleepTimer" (toNBTContents (_sleepTimer player))
, NBT "Fire" (toNBTContents (_fire player))
, NBT "foodLevel" (toNBTContents (_foodLevel player))
, NBT "foodExhaustionLevel" (toNBTContents (_foodExhaustionLevel player))
, NBT "foodSaturationLevel" (toNBTContents (_foodSaturationLevel player))
, NBT "foodTickTimer" (toNBTContents (_foodTickTimer player))
, NBT "XpLevel" (toNBTContents (_xpLevel player))
, NBT "XpP" (toNBTContents (_xpP player))
, NBT "XpTotal" (toNBTContents (_xpTotal player))
, NBT "Inventory" (toNBTContents (_inventory player))
, NBT "EnderItems" (toNBTContents (_enderItems player))
, toNBT (_abilities player)
, NBT "HurtByTimestamp" (toNBTContents (_hurtByTimestamp player))
, NBT "HurtTime" (toNBTContents (_hurtTime player))
, NBT "Attributes" (toNBTContents (_attributes player))
]))
defaultPlayer :: Int32 -- ^ XpSeed
-> Player
defaultPlayer seed = Player
{ _dimension = Overworld
, _playerGameType = SurvivalMode
, _score = 0
, _selectedItemSlot = 0
, _playerSpawn = Nothing
, _sleeping = False
, _sleepTimer = 0
, _fire = (-20)
, _foodLevel = 20
, _foodExhaustionLevel = 0
, _foodSaturationLevel = 5
, _foodTickTimer = 0
, _xpLevel = 0
, _xpP = 0
, _xpTotal = 0
, _xpSeed = seed
, _inventory = []
, _enderItems = []
, _abilities = defaultAbilities
, _hurtByTimestamp = 0
, _hurtTime = 0
, _attributes = []
}
| stepcut/minecraft-data | Minecraft/Player.hs | bsd-3-clause | 6,179 | 0 | 19 | 1,831 | 1,663 | 934 | 729 | 146 | 1 |
{-# LANGUAGE CPP, DeriveDataTypeable, GeneralizedNewtypeDeriving, Rank2Types,
RecordWildCards #-}
#if __GLASGOW_HASKELL >= 800
-- a) THQ works on cross-compilers and unregisterised GHCs
-- b) may make compilation faster as no dynamic loading is ever needed (not sure about this)
-- c) removes one hindrance to have code inferred as SafeHaskell safe
{-# LANGUAGE TemplateHaskellQuotes #-}
#else
{-# LANGUAGE TemplateHaskell #-}
#endif
-- |
-- Module: Data.Aeson.Types.Internal
-- Copyright: (c) 2011-2016 Bryan O'Sullivan
-- (c) 2011 MailRank, Inc.
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <bos@serpentine.com>
-- Stability: experimental
-- Portability: portable
--
-- Types for working with JSON data.
module Data.Aeson.Types.Internal
(
-- * Core JSON types
Value(..)
, Encoding(..)
, Series(..)
, Array
, emptyArray, isEmptyArray
, Pair
, Object
, emptyObject
-- * Type conversion
, Parser
, Result(..)
, IResult(..)
, JSONPathElement(..)
, JSONPath
, iparse
, parse
, parseEither
, parseMaybe
, modifyFailure
, formatError
, (<?>)
-- * Constructors and accessors
, object
-- * Generic and TH encoding configuration
, Options(..)
, SumEncoding(..)
, defaultOptions
, defaultTaggedObject
-- * Used for changing CamelCase names into something else.
, camelTo
, camelTo2
-- * Other types
, DotNetTime(..)
) where
import Control.Arrow (first)
import Control.Applicative
import Control.DeepSeq (NFData(..))
import Control.Monad (MonadPlus(..), ap)
import qualified Control.Monad.Fail as Fail
import Data.ByteString.Builder (Builder, char7, toLazyByteString)
import Data.Char (isLower, isUpper, toLower, isAlpha, isAlphaNum)
import Data.Data (Data)
import Data.HashMap.Strict (HashMap)
import Data.Hashable (Hashable(..))
import Data.Semigroup (Semigroup((<>)))
import Data.Scientific (Scientific)
import Data.String (IsString(..))
import Data.Text (Text, pack, unpack)
import Data.Time (UTCTime)
import Data.Time.Format (FormatTime)
import Data.Typeable (Typeable)
import Data.Vector (Vector)
import qualified Data.HashMap.Strict as H
import qualified Data.Scientific as S
import qualified Data.Vector as V
import qualified Language.Haskell.TH.Syntax as TH
#if !MIN_VERSION_base(4,8,0)
import Data.Foldable (Foldable(..))
import Data.Monoid (Monoid(..))
import Data.Traversable (Traversable(..))
#endif
-- | Elements of a JSON path used to describe the location of an
-- error.
data JSONPathElement = Key Text
-- ^ JSON path element of a key into an object,
-- \"object.key\".
| Index {-# UNPACK #-} !Int
-- ^ JSON path element of an index into an
-- array, \"array[index]\".
deriving (Eq, Show, Typeable)
type JSONPath = [JSONPathElement]
-- | The internal result of running a 'Parser'.
data IResult a = IError JSONPath String
| ISuccess a
deriving (Eq, Show, Typeable)
-- | The result of running a 'Parser'.
data Result a = Error String
| Success a
deriving (Eq, Show, Typeable)
instance NFData JSONPathElement where
rnf (Key t) = rnf t
rnf (Index i) = rnf i
instance (NFData a) => NFData (IResult a) where
rnf (ISuccess a) = rnf a
rnf (IError path err) = rnf path `seq` rnf err
instance (NFData a) => NFData (Result a) where
rnf (Success a) = rnf a
rnf (Error err) = rnf err
instance Functor IResult where
fmap f (ISuccess a) = ISuccess (f a)
fmap _ (IError path err) = IError path err
{-# INLINE fmap #-}
instance Functor Result where
fmap f (Success a) = Success (f a)
fmap _ (Error err) = Error err
{-# INLINE fmap #-}
instance Monad IResult where
return = pure
{-# INLINE return #-}
ISuccess a >>= k = k a
IError path err >>= _ = IError path err
{-# INLINE (>>=) #-}
fail = Fail.fail
{-# INLINE fail #-}
instance Fail.MonadFail IResult where
fail err = IError [] err
{-# INLINE fail #-}
instance Monad Result where
return = pure
{-# INLINE return #-}
Success a >>= k = k a
Error err >>= _ = Error err
{-# INLINE (>>=) #-}
fail = Fail.fail
{-# INLINE fail #-}
instance Fail.MonadFail Result where
fail err = Error err
{-# INLINE fail #-}
instance Applicative IResult where
pure = ISuccess
{-# INLINE pure #-}
(<*>) = ap
{-# INLINE (<*>) #-}
instance Applicative Result where
pure = Success
{-# INLINE pure #-}
(<*>) = ap
{-# INLINE (<*>) #-}
instance MonadPlus IResult where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus a@(ISuccess _) _ = a
mplus _ b = b
{-# INLINE mplus #-}
instance MonadPlus Result where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus a@(Success _) _ = a
mplus _ b = b
{-# INLINE mplus #-}
instance Alternative IResult where
empty = mzero
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
instance Alternative Result where
empty = mzero
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
instance Semigroup (IResult a) where
(<>) = mplus
{-# INLINE (<>) #-}
instance Monoid (IResult a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = (<>)
{-# INLINE mappend #-}
instance Semigroup (Result a) where
(<>) = mplus
{-# INLINE (<>) #-}
instance Monoid (Result a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = (<>)
{-# INLINE mappend #-}
instance Foldable IResult where
foldMap _ (IError _ _) = mempty
foldMap f (ISuccess y) = f y
{-# INLINE foldMap #-}
foldr _ z (IError _ _) = z
foldr f z (ISuccess y) = f y z
{-# INLINE foldr #-}
instance Foldable Result where
foldMap _ (Error _) = mempty
foldMap f (Success y) = f y
{-# INLINE foldMap #-}
foldr _ z (Error _) = z
foldr f z (Success y) = f y z
{-# INLINE foldr #-}
instance Traversable IResult where
traverse _ (IError path err) = pure (IError path err)
traverse f (ISuccess a) = ISuccess <$> f a
{-# INLINE traverse #-}
instance Traversable Result where
traverse _ (Error err) = pure (Error err)
traverse f (Success a) = Success <$> f a
{-# INLINE traverse #-}
-- | Failure continuation.
type Failure f r = JSONPath -> String -> f r
-- | Success continuation.
type Success a f r = a -> f r
-- | A JSON parser.
newtype Parser a = Parser {
runParser :: forall f r.
JSONPath
-> Failure f r
-> Success a f r
-> f r
}
instance Monad Parser where
m >>= g = Parser $ \path kf ks -> let ks' a = runParser (g a) path kf ks
in runParser m path kf ks'
{-# INLINE (>>=) #-}
return = pure
{-# INLINE return #-}
fail = Fail.fail
{-# INLINE fail #-}
instance Fail.MonadFail Parser where
fail msg = Parser $ \path kf _ks -> kf (reverse path) msg
{-# INLINE fail #-}
instance Functor Parser where
fmap f m = Parser $ \path kf ks -> let ks' a = ks (f a)
in runParser m path kf ks'
{-# INLINE fmap #-}
instance Applicative Parser where
pure a = Parser $ \_path _kf ks -> ks a
{-# INLINE pure #-}
(<*>) = apP
{-# INLINE (<*>) #-}
instance Alternative Parser where
empty = fail "empty"
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
instance MonadPlus Parser where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus a b = Parser $ \path kf ks -> let kf' _ _ = runParser b path kf ks
in runParser a path kf' ks
{-# INLINE mplus #-}
instance Semigroup (Parser a) where
(<>) = mplus
{-# INLINE (<>) #-}
instance Monoid (Parser a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = (<>)
{-# INLINE mappend #-}
apP :: Parser (a -> b) -> Parser a -> Parser b
apP d e = do
b <- d
a <- e
return (b a)
{-# INLINE apP #-}
-- | A JSON \"object\" (key\/value map).
type Object = HashMap Text Value
-- | A JSON \"array\" (sequence).
type Array = Vector Value
-- | A JSON value represented as a Haskell value.
data Value = Object !Object
| Array !Array
| String !Text
| Number !Scientific
| Bool !Bool
| Null
deriving (Eq, Read, Show, Typeable, Data)
-- | An encoding of a JSON value.
newtype Encoding = Encoding {
fromEncoding :: Builder
-- ^ Acquire the underlying bytestring builder.
} deriving (Semigroup,Monoid)
instance Show Encoding where
show (Encoding e) = show (toLazyByteString e)
instance Eq Encoding where
Encoding a == Encoding b = toLazyByteString a == toLazyByteString b
instance Ord Encoding where
compare (Encoding a) (Encoding b) =
compare (toLazyByteString a) (toLazyByteString b)
-- | A series of values that, when encoded, should be separated by commas.
data Series = Empty
| Value Encoding
deriving (Typeable)
instance Semigroup Series where
Empty <> a = a
Value a <> b =
Value $
a <> case b of
Empty -> mempty
Value c -> Encoding (char7 ',') <> c
instance Monoid Series where
mempty = Empty
mappend = (<>)
-- | A newtype wrapper for 'UTCTime' that uses the same non-standard
-- serialization format as Microsoft .NET, whose
-- <https://msdn.microsoft.com/en-us/library/system.datetime(v=vs.110).aspx System.DateTime>
-- type is by default serialized to JSON as in the following example:
--
-- > /Date(1302547608878)/
--
-- The number represents milliseconds since the Unix epoch.
newtype DotNetTime = DotNetTime {
fromDotNetTime :: UTCTime
-- ^ Acquire the underlying value.
} deriving (Eq, Ord, Read, Show, Typeable, FormatTime)
instance NFData Value where
rnf (Object o) = rnf o
rnf (Array a) = V.foldl' (\x y -> rnf y `seq` x) () a
rnf (String s) = rnf s
rnf (Number n) = rnf n
rnf (Bool b) = rnf b
rnf Null = ()
instance IsString Value where
fromString = String . pack
{-# INLINE fromString #-}
hashValue :: Int -> Value -> Int
hashValue s (Object o) = H.foldl' hashWithSalt
(s `hashWithSalt` (0::Int)) o
hashValue s (Array a) = V.foldl' hashWithSalt
(s `hashWithSalt` (1::Int)) a
hashValue s (String str) = s `hashWithSalt` (2::Int) `hashWithSalt` str
hashValue s (Number n) = s `hashWithSalt` (3::Int) `hashWithSalt` n
hashValue s (Bool b) = s `hashWithSalt` (4::Int) `hashWithSalt` b
hashValue s Null = s `hashWithSalt` (5::Int)
instance Hashable Value where
hashWithSalt = hashValue
-- @since 0.11.0.0
instance TH.Lift Value where
lift Null = [| Null |]
lift (Bool b) = [| Bool b |]
lift (Number n) = [| Number (S.scientific c e) |]
where
c = S.coefficient n
e = S.base10Exponent n
lift (String t) = [| String (pack s) |]
where s = unpack t
lift (Array a) = [| Array (V.fromList a') |]
where a' = V.toList a
lift (Object o) = [| Object (H.fromList . map (first pack) $ o') |]
where o' = map (first unpack) . H.toList $ o
-- | The empty array.
emptyArray :: Value
emptyArray = Array V.empty
-- | Determines if the 'Value' is an empty 'Array'.
-- Note that: @isEmptyArray 'emptyArray'@.
isEmptyArray :: Value -> Bool
isEmptyArray (Array arr) = V.null arr
isEmptyArray _ = False
-- | The empty object.
emptyObject :: Value
emptyObject = Object H.empty
-- | Run a 'Parser'.
parse :: (a -> Parser b) -> a -> Result b
parse m v = runParser (m v) [] (const Error) Success
{-# INLINE parse #-}
-- | Run a 'Parser'.
iparse :: (a -> Parser b) -> a -> IResult b
iparse m v = runParser (m v) [] IError ISuccess
{-# INLINE iparse #-}
-- | Run a 'Parser' with a 'Maybe' result type.
parseMaybe :: (a -> Parser b) -> a -> Maybe b
parseMaybe m v = runParser (m v) [] (\_ _ -> Nothing) Just
{-# INLINE parseMaybe #-}
-- | Run a 'Parser' with an 'Either' result type. If the parse fails,
-- the 'Left' payload will contain an error message.
parseEither :: (a -> Parser b) -> a -> Either String b
parseEither m v = runParser (m v) [] onError Right
where onError path msg = Left (formatError path msg)
{-# INLINE parseEither #-}
-- | Annotate an error message with a
-- <http://goessner.net/articles/JsonPath/ JSONPath> error location.
formatError :: JSONPath -> String -> String
formatError path msg = "Error in " ++ (format "$" path) ++ ": " ++ msg
where
format :: String -> JSONPath -> String
format pfx [] = pfx
format pfx (Index idx:parts) = format (pfx ++ "[" ++ show idx ++ "]") parts
format pfx (Key key:parts) = format (pfx ++ formatKey key) parts
formatKey :: Text -> String
formatKey key
| isIdentifierKey strKey = "." ++ strKey
| otherwise = "['" ++ escapeKey strKey ++ "']"
where strKey = unpack key
isIdentifierKey :: String -> Bool
isIdentifierKey [] = False
isIdentifierKey (x:xs) = isAlpha x && all isAlphaNum xs
escapeKey :: String -> String
escapeKey = concatMap escapeChar
escapeChar :: Char -> String
escapeChar '\'' = "\\'"
escapeChar '\\' = "\\\\"
escapeChar c = [c]
-- | A key\/value pair for an 'Object'.
type Pair = (Text, Value)
-- | Create a 'Value' from a list of name\/value 'Pair's. If duplicate
-- keys arise, earlier keys and their associated values win.
object :: [Pair] -> Value
object = Object . H.fromList
{-# INLINE object #-}
-- | Add JSON Path context to a parser
--
-- When parsing a complex structure, it helps to annotate (sub)parsers
-- with context, so that if an error occurs, you can find its location.
--
-- > withObject "Person" $ \o ->
-- > Person
-- > <$> o .: "name" <?> Key "name"
-- > <*> o .: "age" <?> Key "age"
--
-- (Standard methods like '(.:)' already do this.)
--
-- With such annotations, if an error occurs, you will get a JSON Path
-- location of that error.
--
-- Since 0.10
(<?>) :: Parser a -> JSONPathElement -> Parser a
p <?> pathElem = Parser $ \path kf ks -> runParser p (pathElem:path) kf ks
-- | If the inner @Parser@ failed, modify the failure message using the
-- provided function. This allows you to create more descriptive error messages.
-- For example:
--
-- > parseJSON (Object o) = modifyFailure
-- > ("Parsing of the Foo value failed: " ++)
-- > (Foo <$> o .: "someField")
--
-- Since 0.6.2.0
modifyFailure :: (String -> String) -> Parser a -> Parser a
modifyFailure f (Parser p) = Parser $ \path kf ks -> p path (\p' m -> kf p' (f m)) ks
--------------------------------------------------------------------------------
-- Generic and TH encoding configuration
--------------------------------------------------------------------------------
-- | Options that specify how to encode\/decode your datatype to\/from JSON.
data Options = Options
{ fieldLabelModifier :: String -> String
-- ^ Function applied to field labels.
-- Handy for removing common record prefixes for example.
, constructorTagModifier :: String -> String
-- ^ Function applied to constructor tags which could be handy
-- for lower-casing them for example.
, allNullaryToStringTag :: Bool
-- ^ If 'True' the constructors of a datatype, with /all/
-- nullary constructors, will be encoded to just a string with
-- the constructor tag. If 'False' the encoding will always
-- follow the `sumEncoding`.
, omitNothingFields :: Bool
-- ^ If 'True' record fields with a 'Nothing' value will be
-- omitted from the resulting object. If 'False' the resulting
-- object will include those fields mapping to @null@.
, sumEncoding :: SumEncoding
-- ^ Specifies how to encode constructors of a sum datatype.
, unwrapUnaryRecords :: Bool
-- ^ Hide the field name when a record constructor has only one
-- field, like a newtype.
}
instance Show Options where
show Options{..} = "Options {" ++
"fieldLabelModifier =~ " ++
show (fieldLabelModifier "exampleField") ++ ", " ++
"constructorTagModifier =~ " ++
show (constructorTagModifier "ExampleConstructor") ++ ", " ++
"allNullaryToStringTag = " ++ show allNullaryToStringTag ++ ", " ++
"omitNothingFields = " ++ show omitNothingFields ++ ", " ++
"sumEncoding = " ++ show sumEncoding ++ ", " ++
"unwrapUnaryRecords = " ++ show unwrapUnaryRecords ++
"}"
-- | Specifies how to encode constructors of a sum datatype.
data SumEncoding =
TaggedObject { tagFieldName :: String
, contentsFieldName :: String
}
-- ^ A constructor will be encoded to an object with a field
-- 'tagFieldName' which specifies the constructor tag (modified by
-- the 'constructorTagModifier'). If the constructor is a record
-- the encoded record fields will be unpacked into this object. So
-- make sure that your record doesn't have a field with the same
-- label as the 'tagFieldName'. Otherwise the tag gets overwritten
-- by the encoded value of that field! If the constructor is not a
-- record the encoded constructor contents will be stored under
-- the 'contentsFieldName' field.
| ObjectWithSingleField
-- ^ A constructor will be encoded to an object with a single
-- field named after the constructor tag (modified by the
-- 'constructorTagModifier') which maps to the encoded contents of
-- the constructor.
| TwoElemArray
-- ^ A constructor will be encoded to a 2-element array where the
-- first element is the tag of the constructor (modified by the
-- 'constructorTagModifier') and the second element the encoded
-- contents of the constructor.
deriving (Eq, Show)
-- | Default encoding 'Options':
--
-- @
-- 'Options'
-- { 'fieldLabelModifier' = id
-- , 'constructorTagModifier' = id
-- , 'allNullaryToStringTag' = True
-- , 'omitNothingFields' = False
-- , 'sumEncoding' = 'defaultTaggedObject'
-- }
-- @
defaultOptions :: Options
defaultOptions = Options
{ fieldLabelModifier = id
, constructorTagModifier = id
, allNullaryToStringTag = True
, omitNothingFields = False
, sumEncoding = defaultTaggedObject
, unwrapUnaryRecords = False
}
-- | Default 'TaggedObject' 'SumEncoding' options:
--
-- @
-- defaultTaggedObject = 'TaggedObject'
-- { 'tagFieldName' = \"tag\"
-- , 'contentsFieldName' = \"contents\"
-- }
-- @
defaultTaggedObject :: SumEncoding
defaultTaggedObject = TaggedObject
{ tagFieldName = "tag"
, contentsFieldName = "contents"
}
-- | Converts from CamelCase to another lower case, interspersing
-- the character between all capital letters and their previous
-- entries, except those capital letters that appear together,
-- like 'API'.
--
-- For use by Aeson template haskell calls.
--
-- > camelTo '_' 'CamelCaseAPI' == "camel_case_api"
camelTo :: Char -> String -> String
{-# DEPRECATED camelTo "Use camelTo2 for better results" #-}
camelTo c = lastWasCap True
where
lastWasCap :: Bool -- ^ Previous was a capital letter
-> String -- ^ The remaining string
-> String
lastWasCap _ [] = []
lastWasCap prev (x : xs) = if isUpper x
then if prev
then toLower x : lastWasCap True xs
else c : toLower x : lastWasCap True xs
else x : lastWasCap False xs
-- | Better version of 'camelTo'. Example where it works better:
--
-- > camelTo '_' 'CamelAPICase' == "camel_apicase"
-- > camelTo2 '_' 'CamelAPICase' == "camel_api_case"
camelTo2 :: Char -> String -> String
camelTo2 c = map toLower . go2 . go1
where go1 "" = ""
go1 (x:u:l:xs) | isUpper u && isLower l = x : c : u : l : go1 xs
go1 (x:xs) = x : go1 xs
go2 "" = ""
go2 (l:u:xs) | isLower l && isUpper u = l : c : u : go2 xs
go2 (x:xs) = x : go2 xs
| roelvandijk/aeson | Data/Aeson/Types/Internal.hs | bsd-3-clause | 20,704 | 0 | 25 | 5,736 | 4,609 | 2,548 | 2,061 | 412 | 6 |
{-# LANGUAGE CPP #-}
-- Copyright 2019 Google LLC
--
-- Use of this source code is governed by a BSD-style
-- license that can be found in the LICENSE file or at
-- https://developers.google.com/open-source/licenses/bsd
-- | This module provides combinators for constructing Haskell types.
module GHC.SourceGen.Type
( HsType'
, tyPromotedVar
, stringTy
, numTy
, listTy
, listPromotedTy
, tuplePromotedTy
, (-->)
, forall'
, HsTyVarBndr'
, (==>)
, kindedVar
) where
import Data.String (fromString)
#if MIN_VERSION_ghc(9,0,0)
import GHC.Hs.Type
import GHC.Parser.Annotation
#else
import GHC.Hs.Type
#endif
import GHC.SourceGen.Syntax.Internal
import GHC.SourceGen.Lit.Internal (noSourceText)
import GHC.SourceGen.Name.Internal
import GHC.SourceGen.Type.Internal
-- | A promoted name, for example from the @DataKinds@ extension.
tyPromotedVar :: RdrNameStr -> HsType'
tyPromotedVar = withEpAnnNotUsed HsTyVar promoted . typeRdrName
stringTy :: String -> HsType'
stringTy = noExt HsTyLit . noSourceText HsStrTy . fromString
numTy :: Integer -> HsType'
numTy = noExt HsTyLit . noSourceText HsNumTy
listTy :: HsType' -> HsType'
listTy = withEpAnnNotUsed HsListTy . mkLocated
listPromotedTy :: [HsType'] -> HsType'
-- Lists of two or more elements don't need the explicit tick (`'`).
-- But for consistency, just always add it.
listPromotedTy = withPlaceHolder (withEpAnnNotUsed HsExplicitListTy promoted) . map mkLocated
tuplePromotedTy :: [HsType'] -> HsType'
tuplePromotedTy = withPlaceHolders (withEpAnnNotUsed HsExplicitTupleTy) . map mkLocated
-- | A function type.
--
-- > a -> b
-- > =====
-- > var "a" --> var "b"
(-->) :: HsType' -> HsType' -> HsType'
a --> b = withEpAnnNotUsed HsFunTy
#if MIN_VERSION_ghc(9,0,0)
(HsUnrestrictedArrow NormalSyntax)
#endif
(parenthesizeTypeForFun $ mkLocated a) (mkLocated b)
infixr 0 -->
-- | A type variable binding.
--
-- > forall a . T a
-- > =====
-- > forall' [bvar "a"] $ var "T" @@ var "a"
forall' :: [HsTyVarBndrS'] -> HsType' -> HsType'
forall' ts = noExt hsForAllTy (map mkLocated ts) . mkLocated
where
#if MIN_VERSION_ghc(9,2,0)
hsForAllTy x = HsForAllTy x . withEpAnnNotUsed mkHsForAllInvisTele
#elif MIN_VERSION_ghc(9,0,0)
hsForAllTy x = HsForAllTy x . mkHsForAllInvisTele
#elif MIN_VERSION_ghc(8,10,0)
fvf = ForallInvis -- "Invisible" forall, i.e., with a dot
hsForAllTy x = HsForAllTy x fvf
#else
hsForAllTy = HsForAllTy
#endif
-- | Qualify a type with constraints.
--
-- > (F x, G x) => x
-- > =====
-- > [var "F" @@ var "x", var "G" @@ var "x"] ==> var "x"
(==>) :: [HsType'] -> HsType' -> HsType'
(==>) cs = hsQualTy (mkLocated (map mkLocated cs)) . mkLocated
where
#if MIN_VERSION_ghc(9,2,0)
hsQualTy = noExt HsQualTy . Just
#else
hsQualTy = noExt HsQualTy
#endif
infixr 0 ==>
-- | A type variable with a kind signature.
--
-- > x :: A
-- > =====
-- > kindedVar "x" (var "A")
kindedVar :: OccNameStr -> HsType' -> HsTyVarBndr'
kindedVar v t = withEpAnnNotUsed KindedTyVar
#if MIN_VERSION_ghc(9,0,0)
()
#endif
(typeRdrName $ UnqualStr v) (mkLocated t)
| google/ghc-source-gen | src/GHC/SourceGen/Type.hs | bsd-3-clause | 3,157 | 0 | 10 | 604 | 552 | 322 | 230 | 46 | 1 |
module Main where
import qualified Lib as L
import System.Environment
import Data.Maybe
import Control.Monad
import qualified System.IO.Strict as S
import qualified PackageRank as PR
import qualified Data.Map.Strict as Map
import Data.List (isInfixOf)
import qualified TarUtil as T
import Pipes
import qualified Pipes.Prelude as P
import Text.Show.Pretty
import qualified Data.ByteString.Lazy.UTF8 as LBS
safeHead a [] = a
safeHead _ (a:_) = a
computeRanks indexTarPath = do
entries <- T.tarEntriesForPath indexTarPath
cabals <- P.toListM $ T.pipesTarEntries entries >-> T.pipesSelectCabals >-> T.pipesLatestVersions
-- create a map of PackageInfo records
-- create the map of rankings
let pe_to_info = L.parseCabal . LBS.toString . T.pe_content
pkgInfoMap = Map.fromList [ (T.pe_package pe, pinfo) | pe <- cabals, Just pinfo <- [ pe_to_info pe ] ]
nodes = [ (pkg, L.p_dependencies pinfo) | (pkg, pinfo) <- Map.assocs pkgInfoMap ]
rankingMap = PR.rankingStd nodes
pkgInfoMap' = Map.fromList [ (pkg, pinfo') | (pkg, pinfo) <- Map.assocs pkgInfoMap,
let pinfo' = pinfo { L.p_rank = Map.findWithDefault 0 pkg rankingMap } ]
forM_ (Map.assocs pkgInfoMap') $ \(pkg,pinfo) -> do
putStrLn $ ppShow pinfo
main6 = do
args <- getArgs
case args of
(path:_) -> do computeRanks path
_ -> error "bad usage"
main = main6
| erantapaa/parse-cabal | app/Main.hs | bsd-3-clause | 1,411 | 0 | 19 | 299 | 451 | 249 | 202 | 33 | 2 |
module Main where
import Network.Curl
import System.Environment (getArgs)
postJson :: URLString -> String -> IO CurlResponse
postJson url jsonData = do
writeFile "coverage.json" jsonData
h <- initialize
setopt h (CurlVerbose True)
setopt h (CurlURL url)
-- setopt h (CurlPost True)
-- setopt h (CurlPostFields [jsonData])
-- setopt h (CurlHttpHeaders ["Content-Type: multipart/form-data"])
setopt h (CurlHttpPost [HttpPost "json_file"
Nothing
(ContentFile "coverage.json")
[]
Nothing])
response <- perform_with_response_ h
reset h
case respCurlCode response of
CurlOK -> do
putStrLn $ respBody response
return response
c -> do
putStrLn $ show c
return response
main :: IO ()
main = do
args <- getArgs
case args of
[jobId] -> do
let jsonData = "{\"service_name\":\"travis-ci\", \"service_job_id\":\"" ++ jobId ++ "\", \"source_files\":[{\"coverage\":[null,0,1,2],\"name\":\"file1.hs\",\"source\":\"line1\\nline2\\nline3\\nline4\"}]}"
response <- postJson "https://coveralls.io/api/v1/jobs" jsonData
putStrLn jsonData
putStrLn $ show $ respStatus response
return ()
_ -> error "illegal arguments"
| guillaume-nargeot/hpc-coveralls-experiment | src/Main.hs | bsd-3-clause | 1,322 | 0 | 16 | 362 | 301 | 138 | 163 | 34 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.