code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Lib
( helloWorld
) where
helloWorld :: IO ()
helloWorld = putStrLn "Hello world!"
| stackbuilders/hapistrano | example/src/Lib.hs | mit | 98 | 0 | 6 | 24 | 27 | 15 | 12 | 4 | 1 |
--map takes a function and a list and applies that function to all elements
map' :: (a -> b) -> [a] -> [b]
map' _ [] = []
map' f (x:xs) = f x : map' f xs
map1 = map' (replicate 3) [3..6]
map2 = map (map (^2)) [[1,2],[3,4,5,6],[7,8]]
--filter takes a predicate and returns a list
filter' :: (a -> Bool) -> [a] -> [a]
filter' _ [] = []
filter' p (x:xs)
| p x = x : filter' p xs
| otherwise = filter' p xs
filter1 = filter' (>3) [1,5,3,2,1,6,4,3,2,1]
filter2 = filter' even [1..10]
filter3 = filter' (`elem` ['A'..'Z']) "i lauGh At You BecAuse u r aLL the Same"
--we can achieve a quicksort
quicksort' :: (Ord a) => [a] -> [a]
quicksort' [] = []
quicksort' (x:xs) =
let smallerSorted = quicksort' (filter' (<=x) xs)
biggerSorted = quicksort' (filter' (>x) xs)
in smallerSorted ++ [x] ++ biggerSorted
largestDivisible :: (Integral a) => a
largestDivisible = head (filter p [100000,99999..])
where p x = x `mod` 3829 == 0
-- find the sum of all odd squares that are smaller than 10000
--takeWhile function takes a predicate and a list and returns while the predicate is true
sumOdd = sum (takeWhile (<10000) (filter odd (map (^2) [1..])))
sumOdd' = sum (takeWhile (<10000) [n^2 | n <- [1..], odd(n^2)])
-- collatz
-- how many chains have a length greater than 15
chain :: (Integral a) => a -> [a]
chain 1 = [1]
chain n
| even n = n : chain (div n 2)
| odd n = n : chain (n*3 +1)
numLongChains :: Int
numLongChains = length (filter isLong (map chain [1..100]))
where isLong xs = length xs > 15
-- we can map incomplete functions too
listOfFuns = map (*) [0..]
listOfFuns' = (listOfFuns !! 4) 5 | luisgepeto/HaskellLearning | 06 Higher Order Functions/03_maps_and_filters.hs | mit | 1,688 | 0 | 12 | 406 | 755 | 411 | 344 | 34 | 1 |
module Main where
import System (getArgs)
import System.Console.GetOpt
import System.IO
import System.Directory
import System.FS.RDFS
data Options =
Options
{ optVerbose :: Bool
, optShowVersion :: Bool
, optFiles :: [FilePath]
} deriving Show
defaultOptions =
Options
{ optVerbose = False
, optShowVersion = False
, optFiles = []
}
options :: [OptDescr (Options -> Options)]
options =
[ Option ['v'] ["verbose"]
(NoArg (\ opts -> opts{optVerbose = True}))
"chatty output on stderr"
, Option ['V','?'] ["version"]
(NoArg (\ opts -> opts{optShowVersion = True}))
"show version number"
]
cOpts :: [String] -> IO (Options, [String])
cOpts argv =
case getOpt Permute options argv of
(o,n,[] ) -> return (foldl (flip id) defaultOptions o, n)
(_,_,errs) -> ioError (userError (concat errs ++ usageInfo header options))
where header = "Usage: sugarsync [OPTION...] files..."
main = do
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
args <- getArgs
(opts, fsopts) <- cOpts args
--print $ opts
let fuseopts = ("-f":fsopts)
--print $ fuseopts
rdfsRun "rdfs" fuseopts
printUsage = putStrLn usage
usage = "\nUsage: sugarsync"
printHelp = do
putStrLn $ "Here Help"
putStrLn $ ""
| realdesktop/rdfs | exec/Main.hs | mit | 1,348 | 0 | 13 | 342 | 430 | 238 | 192 | 43 | 2 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGForeignObjectElement
(getX, getY, getWidth, getHeight, SVGForeignObjectElement(..),
gTypeSVGForeignObjectElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGForeignObjectElement.x Mozilla SVGForeignObjectElement.x documentation>
getX ::
(MonadDOM m) => SVGForeignObjectElement -> m SVGAnimatedLength
getX self = liftDOM ((self ^. js "x") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGForeignObjectElement.y Mozilla SVGForeignObjectElement.y documentation>
getY ::
(MonadDOM m) => SVGForeignObjectElement -> m SVGAnimatedLength
getY self = liftDOM ((self ^. js "y") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGForeignObjectElement.width Mozilla SVGForeignObjectElement.width documentation>
getWidth ::
(MonadDOM m) => SVGForeignObjectElement -> m SVGAnimatedLength
getWidth self
= liftDOM ((self ^. js "width") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGForeignObjectElement.height Mozilla SVGForeignObjectElement.height documentation>
getHeight ::
(MonadDOM m) => SVGForeignObjectElement -> m SVGAnimatedLength
getHeight self
= liftDOM ((self ^. js "height") >>= fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SVGForeignObjectElement.hs | mit | 2,220 | 0 | 10 | 268 | 504 | 308 | 196 | 33 | 1 |
{-# LANGUAGE GADTs, DataKinds, TypeFamilies, TypeOperators, PolyKinds #-}
module ListProofs where
import Data.Singletons.Prelude
import Data.Type.Equality
import FunctionProofs
{-
data MapP f l r where
MapNil :: MapP f '[] '[]
MapCons :: MapP f as bs -> MapP f (a ': as) (Apply f a ': bs)
mapP :: SList l -> MapP f l (Map f l)
mapP SNil = MapNil
mapP (SCons _ l) = MapCons (mapP l)
data FoldRP f b l r where
FoldRNil :: FoldRP f b '[] b
FoldRCons :: FoldRP f b l r -> FoldRP f b (a ': l) (f a r)
type ConcatP s l = FoldRP '(:) l s
concatNil :: SList l -> ConcatP l '[] l
concatNil SNil = FoldRNil
concatNil (SCons _ l) = FoldRCons (concatNil l)
--concatAssociative :: Concat s l sl -> Concat sl r slr -> Concat l r lr -> Concat s lr slr
--concatAssociative FoldRNil _ _ = FoldRNil
data FoldLP :: (b -> a -> b) -> b -> [a] -> b -> * where
FoldLNil :: FoldLP f b '[] b
FoldLCons :: FoldLP f (f b a) as r -> FoldLP f b (a ': as) r
-- a zipper with no focus
data ZipperP s l r where
ZipperNil :: ZipperP r '[] r
ZipperCons :: ZipperP (a ': s) l r -> ZipperP s (a ': l) r
type ReverseP = ZipperP '[]
z2 :: SList r -> ZipperP s l r -> ReverseP l l' -> ConcatP s l' r
z2 r ZipperNil ZipperNil = concatNil r
--z2 r (ZipperCons p1) (ZipperCons p2) = FoldRCons _
--z1 :: ZipperP s l r -> ZipperP l s r' -> ReverseP r r'
--z1 ZipperNil ZipperNil =
reverseReflexive :: ReverseP l r -> ReverseP r l
reverseReflexive ZipperNil = ZipperNil
reverseReflexive (ZipperCons p) =
data Length (l :: [k]) where
LZero :: Length '[]
LSucc :: Length l -> Length (a ': l)
-}
appendNil :: SList l -> (l :~: l :++ '[])
appendNil SNil = Refl
appendNil (SCons _ l) = case appendNil l of Refl -> Refl
appendCommutative :: Commutative (:++$)
appendCommutative = Commutative f where
f :: Sing a -> Sing b -> a :++ b :~: b :++ a
f SNil b = appendNil b
blah :: Associative (:++$)
blah = Associative f where
f :: Sing a -> Sing b -> Sing c -> (a :++ b) :++ c :~: a :++ (b :++ c)
f SNil _ _ = Refl
f (SCons _ a) b c = case f a b c of Refl -> Refl
| vladfi1/hs-misc | ListProofs.hs | mit | 2,062 | 0 | 14 | 489 | 270 | 140 | 130 | 17 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Tinc.Sandbox (
PackageConfig
, Sandbox
, findPackageDb
, initSandbox
, recache
, cabalSandboxDirectory
, cabalSandboxBinDirectory
, listPackages
#ifdef TEST
, packageFromPackageConfig
, registerPackage
#endif
) where
import Control.Monad
import Control.Monad.IO.Class
import Data.List
import Data.Maybe
import System.Directory hiding (getDirectoryContents)
import System.FilePath
import Util
import Tinc.Fail
import Tinc.GhcPkg
import Tinc.Package
import Tinc.Process
import Tinc.Types
import Tinc.AddSource
data PackageConfig
data Sandbox
currentDirectory :: Path Sandbox
currentDirectory = "."
initSandbox :: (MonadIO m, Fail m, MonadProcess m) => [Path AddSource] -> [Path PackageConfig] -> m (Path PackageDb)
initSandbox addSourceDependencies packageConfigs = do
deleteSandbox
callProcessM "cabal" ["sandbox", "init"]
packageDb <- findPackageDb currentDirectory
registerPackageConfigs packageDb packageConfigs
mapM_ (\ dep -> callProcessM "cabal" ["sandbox", "add-source", path dep]) addSourceDependencies
liftIO $ createDirectoryIfMissing False cabalSandboxBinDirectory
return packageDb
deleteSandbox :: (MonadIO m, MonadProcess m) => m ()
deleteSandbox = do
exists <- liftIO $ doesDirectoryExist cabalSandboxDirectory
when exists (callProcessM "cabal" ["sandbox", "delete"])
findPackageDb :: (MonadIO m, Fail m) => Path Sandbox -> m (Path PackageDb)
findPackageDb sandbox = do
xs <- liftIO $ getDirectoryContents sandboxDir
case listToMaybe (filter isPackageDb xs) of
Just p -> liftIO $ Path <$> canonicalizePath (sandboxDir </> p)
Nothing -> dieLoc ("No package database found in " ++ show sandboxDir)
where
sandboxDir = path sandbox </> cabalSandboxDirectory
isPackageDb :: FilePath -> Bool
isPackageDb = ("-packages.conf.d" `isSuffixOf`)
cabalSandboxDirectory :: FilePath
cabalSandboxDirectory = ".cabal-sandbox"
cabalSandboxBinDirectory :: FilePath
cabalSandboxBinDirectory = cabalSandboxDirectory </> "bin"
registerPackageConfigs :: (MonadIO m, MonadProcess m) => Path PackageDb -> [Path PackageConfig] -> m ()
registerPackageConfigs _packageDb [] = return ()
registerPackageConfigs packageDb packages = do
liftIO $ forM_ packages (registerPackage packageDb)
recache packageDb
registerPackage :: Path PackageDb -> Path PackageConfig -> IO ()
registerPackage packageDb package = linkFile (path package) (path packageDb)
listPackages :: MonadIO m => Path PackageDb -> m [(Package, Path PackageConfig)]
listPackages p = do
packageConfigs <- liftIO $ filter (".conf" `isSuffixOf`) <$> getDirectoryContents (path p)
absolutePackageConfigs <- liftIO . mapM canonicalizePath $ map (path p </>) packageConfigs
let packages = map packageFromPackageConfig packageConfigs
return (zip packages (map Path absolutePackageConfigs))
packageFromPackageConfig :: FilePath -> Package
packageFromPackageConfig = parsePackage . reverse . drop 1 . dropWhile (/= '-') . reverse
recache :: MonadProcess m => Path PackageDb -> m ()
recache packageDb = callProcessM "ghc-pkg" ["--no-user-package-db", "recache", "--package-db", path packageDb]
| robbinch/tinc | src/Tinc/Sandbox.hs | mit | 3,362 | 0 | 13 | 584 | 908 | 466 | 442 | -1 | -1 |
module Day08 where
import Debug.Trace
import Data.Char
part1 :: IO ()
part1 = do
text <- readFile "lib/day08-input.txt"
let f l = length l - go l
(print . sum . map f . words) text
where
go :: String -> Int
go [] = 0
go ('\"' :xs) = go xs
go ('\\':'x':_:_:xs) = 1 + go xs
go ('\\':'\\' :xs) = 1 + go xs
go ('\\' :xs) = 1 + go xs
go ( _:xs) = 1 + go xs
part2 :: IO ()
part2 = do
text <- readFile "lib/day08-input.txt"
let g l = length (show l) - length l
(print . sum . map g . words) text
| cirquit/Personal-Repository | Haskell/Playground/AdventOfCode/advent-coding/src/Day08.hs | mit | 602 | 0 | 13 | 226 | 297 | 144 | 153 | 20 | 6 |
module Sound.Morse
( encodeString
, encodeMorse
, morseTable
, fromChar
, morseToBool
, Morse(..)
) where
import Data.List (intersperse)
import Data.Char (toLower)
import Data.Maybe (fromMaybe)
import Control.Arrow (first)
import qualified Data.Map as Map
data Morse = Dit | Dah | Pause deriving (Show, Read, Eq)
morseTable :: Map.Map Char [Morse]
morseTable = Map.fromList $ lowers ++ uppers ++ symbols ++ digits
where
lowers = first toLower <$> uppers
uppers = [ ('A', [Dit, Dah])
, ('B', [Dah, Dit, Dit, Dit])
, ('C', [Dah, Dit, Dah, Dit])
, ('D', [Dah, Dit, Dit])
, ('E', [Dit])
, ('F', [Dit, Dit, Dah, Dit])
, ('G', [Dah, Dah, Dit])
, ('H', [Dit, Dit, Dit, Dit])
, ('I', [Dit, Dit])
, ('J', [Dit, Dah, Dah, Dah])
, ('K', [Dah, Dit, Dah])
, ('L', [Dit, Dah, Dit, Dit])
, ('M', [Dah, Dah])
, ('N', [Dah, Dit])
, ('O', [Dah, Dah, Dah])
, ('P', [Dit, Dah, Dah, Dit])
, ('Q', [Dah, Dah, Dit, Dah])
, ('R', [Dit, Dah, Dit])
, ('S', [Dit, Dit, Dit])
, ('T', [Dah])
, ('U', [Dit, Dit, Dah])
, ('V', [Dit, Dit, Dit, Dah])
, ('W', [Dit, Dah, Dah])
, ('X', [Dah, Dit, Dit, Dah])
, ('Y', [Dah, Dit, Dah, Dah])
, ('Z', [Dah, Dah, Dit, Dit])
, ('À', [Dit, Dah, Dah, Dit, Dah])
, ('Å', [Dit, Dah, Dah, Dit, Dah])
, ('Ä', [Dit, Dah, Dit, Dah])
, ('È', [Dit, Dah, Dit, Dit, Dah])
, ('É', [Dit, Dit, Dah, Dit, Dit])
, ('Ö', [Dah, Dah, Dah, Dit])
, ('Ü', [Dit, Dit, Dah, Dah])
, ('ß', [Dit, Dit, Dit, Dah, Dah, Dit, Dit])
]
digits = [ ('0', [Dah, Dah, Dah, Dah, Dah])
, ('1', [Dit, Dah, Dah, Dah, Dah])
, ('2', [Dit, Dit, Dah, Dah, Dah])
, ('3', [Dit, Dit, Dit, Dah, Dah])
, ('4', [Dit, Dit, Dit, Dit, Dah])
, ('5', [Dit, Dit, Dit, Dit, Dit])
, ('6', [Dah, Dit, Dit, Dit, Dit])
, ('7', [Dah, Dah, Dit, Dit, Dit])
, ('8', [Dah, Dah, Dah, Dit, Dit])
, ('9', [Dah, Dah, Dah, Dah, Dit])
]
symbols = [ ('.', [Dit, Dah, Dit, Dah, Dit, Dah])
, (',', [Dah, Dah, Dit, Dit, Dah, Dah])
, (':', [Dah, Dah, Dah, Dit, Dit, Dit])
, (';', [Dah, Dit, Dah, Dit, Dah, Dit])
, ('?', [Dit, Dit, Dah, Dah, Dit, Dah])
, ('-', [Dah, Dit, Dit, Dit, Dit, Dah])
, ('_', [Dit, Dit, Dah, Dah, Dit, Dah])
, ('(', [Dah, Dit, Dah, Dah, Dit])
, (')', [Dah, Dit, Dah, Dah, Dit, Dah])
, ('\'', [Dit, Dah, Dah, Dah, Dah, Dit])
, ('=', [Dah, Dit, Dit, Dit, Dah])
, ('+', [Dit, Dah, Dit, Dah, Dit])
, ('/', [Dah, Dit, Dit, Dah, Dit])
, ('@', [Dit, Dah, Dah, Dit, Dah, Dit])
, (' ', [Pause])
, ('\EOT', [Dit, Dah, Dit, Dah, Dit])
]
fromChar :: Char -> [Morse]
fromChar = fromMaybe [] . (`Map.lookup` morseTable)
boolFilter :: [a] -> [a] -> Int -> [Bool] -> [a]
boolFilter tone silence dit (x:xs)
= let tone' = case x of
True -> tone
False -> silence
in take dit tone' ++ boolFilter (drop dit tone) (drop dit silence) dit xs
boolFilter _ _ _ _ = []
morseToBool :: [Morse] -> [Bool]
morseToBool (Dit:xs) = True : morseToBool xs
morseToBool (Dah:xs) = True : True : True : morseToBool xs
morseToBool (Pause:xs) = False : morseToBool xs
morseToBool _ = []
encodeMorse :: [a] -> [a] -> Int -> [Morse] -> [a]
encodeMorse tone silence dit
= boolFilter tone silence dit . morseToBool
encodeString :: [a] -> [a] -> Int -> String -> [a]
encodeString tone silence dit
= concat
. (encodeMorse tone silence dit
. (++ [Pause, Pause, Pause])
. intersperse Pause
. fromChar <$>)
| fritz0705/morse | Sound/Morse.hs | mit | 4,126 | 0 | 11 | 1,514 | 1,889 | 1,191 | 698 | 100 | 2 |
-- | First-order logic constants.
-----------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
{-# LANGUAGE UnicodeSyntax #-}
-- Adapted from AgdaLight (Plugins.FOL.Constants).
module Apia.FOL.Constants
( lTrue
, lFalse
, lNot
, lAnd
, lOr
, lCond
, lBicond1
, lBicond2
, lForAll
, lExists
, lEquals
) where
------------------------------------------------------------------------------
import Apia.Prelude
------------------------------------------------------------------------------
-- | Identifiers recognized by the logic translator.
lTrue
, lFalse
, lNot
, lAnd
, lOr
, lCond
, lBicond1
, lBicond2
, lExists
, lForAll
, lEquals ∷ String
lTrue = "⊤"
lFalse = "⊥"
lNot = "¬"
lAnd = "∧"
lOr = "∨"
lCond = "⇒" -- The non-dependent function space @→@ can be used
-- instead.
lBicond1 = "↔"
lBicond2 = "⇔"
lExists = "∃"
lForAll = "⋀" -- The dependent function space @∀ x → A@ can be used
-- instead.
lEquals = "≡"
| asr/apia | src/Apia/FOL/Constants.hs | mit | 1,100 | 0 | 4 | 256 | 139 | 99 | 40 | 37 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE DeriveGeneric #-}
import GHC.Generics
import Test.SmallCheck
import Test.SmallCheck.Series
data NList1 a
= Empty
| ConsElem a (NList1 a)
| ConsList (NList1 a) (NList1 a)
deriving (Show, Eq, Generic)
data NList2 a
= Elem a
| List [NList2 a]
deriving (Show, Generic)
instance Eq a => Eq (NList2 a) where
(Elem a) == (Elem b) = a == b
(List a) == (List b) = a == b
a == b = convert a == convert b
instance Serial m a => Serial m (NList1 a)
instance Serial m a => Serial m (NList2 a)
test_list1 :: NList1 Integer
test_list1 = ConsElem 2 $ ConsList (ConsElem 2 $ ConsElem 3 $ Empty)
$ ConsElem 4 Empty
test_list2 :: NList2 Integer
test_list2 = List [Elem 2, List [Elem 2, Elem 3], Elem 4]
from1To2 :: NList1 a -> NList2 a
from1To2 Empty = List []
from1To2 (ConsElem a b) = let List b' = from1To2 b in List $ Elem a : b'
from1To2 (ConsList a b) =
let
List a' = from1To2 a
List b' = from1To2 b
in List $ List a' : b'
from2To1' :: NList2 a -> NList1 a
from2To1' (List []) = Empty
from2To1' (List (Elem a:xs)) = ConsElem a (from2To1 $ List xs)
from2To1' (List (List a:xs)) = ConsList (from2To1 $ List a) (from2To1 $ List xs)
from2To1 = from2To1' . convert
convert (Elem x) = List [Elem x]
convert x = x
main = do
let depth = 4
putStrLn "1 -> 2 -> 1 =?= id"
smallCheck depth $ \l -> from2To1 (from1To2 l) == (l :: NList1 ())
putStrLn "2 -> 1 -> 2 =?= id"
smallCheck depth $ \l -> from1To2 (from2To1 l) == (l :: NList2 ())
| mihaimaruseac/blog-demos | lists/list.hs | mit | 1,554 | 6 | 12 | 351 | 753 | 361 | 392 | 47 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module: Database.Neo4j
-- Copyright: (c) 2014, Antoni Silvestre
-- License: MIT
-- Maintainer: Antoni Silvestre <antoni.silvestre@gmail.com>
-- Stability: experimental
-- Portability: portable
--
-- Library to interact with the Neo4j REST API.
--
module Database.Neo4j (
-- * How to use this library
-- $use
-- * Connection handling objects
Connection, Hostname, Port, Credentials, newConnection, withConnection, newAuthConnection, withAuthConnection,
newSecureConnection, withSecureConnection, newSecureAuthConnection, withSecureAuthConnection,
-- * Main monadic type to handle sequences of commands to Neo4j
Neo4j(..),
-- * Constructing and managing node/relationship properties
Val(..), PropertyValue(..), newval, (|:), Properties, emptyProperties, getProperties, getProperty, setProperties,
setProperty, deleteProperties, deleteProperty,
-- * Managing nodes
Node, getNodeProperties, createNode, getNode, deleteNode, nodeId, nodePath, runNodeIdentifier, NodeIdentifier(..),
NodePath(..),
-- * Managing relationships
Relationship, Direction(..), RelationshipType, createRelationship, getRelationship, deleteRelationship,
getRelationships, relId, relPath, allRelationshipTypes, getRelProperties, getRelType, runRelIdentifier,
getRelationshipFrom, getRelationshipTo, RelIdentifier(..), RelPath(..),
-- * Managing labels and getting nodes by label
EntityIdentifier(..), Label, allLabels, getLabels, getNodesByLabelAndProperty, addLabels, changeLabels,
removeLabel,
-- * Indexes
Index(..), createIndex, getIndexes, dropIndex,
-- * Exceptions
Neo4jException(..),
-- * Database version information
getDatabaseVersion,
) where
import Database.Neo4j.Http
import Database.Neo4j.Index
import Database.Neo4j.Label
import Database.Neo4j.Node
import Database.Neo4j.Property
import Database.Neo4j.Relationship
import Database.Neo4j.Types
import Database.Neo4j.Version
-- $use
--
-- In order to start issuing commands to neo4j you must establish a connection, in order to do that you can use
-- the function 'withConnection':
--
-- > withConnection "127.0.0.1" 7474 $ do
-- > neo <- createNode M.empty
-- > cypher <- createNode M.empty
-- > r <- createRelationship "KNOWS" M.empty neo cypher
-- > ...
--
-- Also most calls have a batch analogue version, with batch mode you can issue several commands to Neo4j at once.
-- In order to issue batches you must use the "Database.Neo4j.Batch" monad, parameters in batch mode can be actual
-- entities already obtained by issuing regular commands or previous batch commands, or even batch futures,
-- that is you can refer to entities created in the same batch, for instance:
--
-- > withConnection "127.0.0.1" 7474 $ do
-- > g <- B.runBatch $ do
-- > neo <- B.createNode M.empty
-- > cypher <- B.createNode M.empty
-- > B.createRelationship "KNOWS" M.empty neo cypher
-- > ...
--
-- As you can see this example does the same thing the previous one does but it will be more efficient as it will
-- be translated into only one request to the database.
--
-- Batch commands return a "Database.Neo4j.Graph" object that holds all the information about relationships, nodes
-- and their labels that can be inferred from running a batch command.
--
-- Another example with batches would be for instance remove all the nodes in a "Database.Neo4j.Graph" object
--
-- > withConnection "127.0.0.1" 7474 $ do
-- > ...
-- > B.runBatch $ mapM_ B.deleteNode (G.getNodes gp)
--
-- For more information about batch commands and graph objects you can refer to their "Database.Neo4j.Batch" and
-- "Database.Neo4j.Graph" modules.
--
-- Properties are hashmaps with key 'Data.Text' and values a custom type called 'PropertyValue'.
-- This custom type tries to use Haskell's type system to match property values to what Neo4j expects, we only allow
-- 'Int64', 'Double', 'Bool' and 'Text' like values and one-level arrays of these.
-- The only restriction we cannot guarantee with these types is that arrays of values must be of the same type.
--
-- In order to create a 'PropertyValue' from a literal or a value of one of the allowed types you can use the 'newval'
-- function or the operator '|:' to create pairs of key values:
--
-- > import qualified Data.HashMap.Lazy as M
-- >
-- > myval = newval False
-- > someProperties = M.fromList ["mytext" |: ("mytext" :: T.Text),
-- > "textarrayprop" |: ["a" :: T.Text, "", "adeu"],
-- > "int" |: (-12 :: Int64),
-- > "intarray" |: [1 :: Int64, 2],
-- > "double" |: (-12.23 :: Double),
-- > "doublearray" |: [0.1, -12.23 :: Double],
-- > "bool" |: False,
-- > "aboolproparray" |: [False, True]
-- > ]
--
-- When unexpected errors occur a 'Neo4jException' will be raised, sometimes with a specific exception value like for
-- instance 'Neo4jNoEntityException', or more generic ones like 'Neo4jHttpException' or 'Neo4jParseException'
-- if the server returns something totally unexpected. (I'm sure there's still work to do here preparing the code
-- to return more specific exceptions for known scenarios)
--
-- About Cypher support for now we allow sending queries with parameters, the result is a collection of column headers
-- and JSON data values, the Graph object has the function addCypher that tries to find
-- nodes and relationships in a cypher query result and insert them in a "Database.Neo4j.Graph" object
--
-- > import qualified Database.Neo4j.Cypher as C
-- >
-- > withConnection host port $ do
-- > ...
-- > -- Run a cypher query with parameters
-- > res <- C.cypher "CREATE (n:Person { name : {name} }) RETURN n" M.fromList [("name", C.newparam ("Pep" :: T.Text))]
-- >
-- > -- Get all nodes and relationships that this query returned and insert them in a Graph object
-- > let graph = G.addCypher (C.fromSuccess res) G.empty
-- >
-- > -- Get the column headers
-- > let columnHeaders = C.cols $ C.fromSuccess res
-- >
-- > -- Get the rows of JSON values received
-- > let values = C.vals $ C.fromSuccess res
| asilvestre/haskell-neo4j-rest-client | src/Database/Neo4j.hs | mit | 6,370 | 0 | 5 | 1,339 | 406 | 310 | 96 | 25 | 0 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances, OverloadedStrings,
ScopedTypeVariables, DeriveDataTypeable, CPP #-}
#if MIN_VERSION_base(4,8,0)
#else
{-# LANGUAGE OverlappingInstances #-}
#endif
{- Copyright (C) 2012-2015 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.Custom
Copyright : Copyright (C) 2012-2015 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to custom markup using
a lua writer.
-}
module Text.Pandoc.Writers.Custom ( writeCustom ) where
import Text.Pandoc.Definition
import Text.Pandoc.Options
import Data.List ( intersperse )
import Data.Char ( toLower )
import Data.Typeable
import Scripting.Lua (LuaState, StackValue, callfunc)
import Text.Pandoc.Writers.Shared
import qualified Scripting.Lua as Lua
import qualified Text.Pandoc.UTF8 as UTF8
import Data.Monoid
import Control.Monad (when)
import Control.Exception
import qualified Data.Map as M
import Text.Pandoc.Templates
import GHC.IO.Encoding (getForeignEncoding,setForeignEncoding, utf8)
attrToMap :: Attr -> M.Map String String
attrToMap (id',classes,keyvals) = M.fromList
$ ("id", id')
: ("class", unwords classes)
: keyvals
getList :: StackValue a => LuaState -> Int -> IO [a]
getList lua i' = do
continue <- Lua.next lua i'
if continue
then do
next <- Lua.peek lua (-1)
Lua.pop lua 1
x <- maybe (fail "peek returned Nothing") return next
rest <- getList lua i'
return (x : rest)
else return []
#if MIN_VERSION_hslua(0,4,0)
instance {-# OVERLAPS #-} StackValue [Char] where
push lua cs = Lua.push lua (UTF8.fromString cs)
peek lua i = do
res <- Lua.peek lua i
return $ UTF8.toString `fmap` res
#else
#if MIN_VERSION_base(4,8,0)
instance {-# OVERLAPS #-} StackValue a => StackValue [a] where
#else
instance StackValue a => StackValue [a] where
#endif
push lua xs = do
Lua.createtable lua (length xs + 1) 0
let addValue (i, x) = Lua.push lua x >> Lua.rawseti lua (-2) i
mapM_ addValue $ zip [1..] xs
peek lua i = do
top <- Lua.gettop lua
let i' = if i < 0 then top + i + 1 else i
Lua.pushnil lua
lst <- getList lua i'
Lua.pop lua 1
return (Just lst)
valuetype _ = Lua.TTABLE
#endif
instance StackValue Format where
push lua (Format f) = Lua.push lua (map toLower f)
peek l n = fmap Format `fmap` Lua.peek l n
valuetype _ = Lua.TSTRING
instance (StackValue a, StackValue b) => StackValue (M.Map a b) where
push lua m = do
let xs = M.toList m
Lua.createtable lua (length xs + 1) 0
let addValue (k, v) = Lua.push lua k >> Lua.push lua v >>
Lua.rawset lua (-3)
mapM_ addValue xs
peek _ _ = undefined -- not needed for our purposes
valuetype _ = Lua.TTABLE
instance (StackValue a, StackValue b) => StackValue (a,b) where
push lua (k,v) = do
Lua.createtable lua 2 0
Lua.push lua k
Lua.push lua v
Lua.rawset lua (-3)
peek _ _ = undefined -- not needed for our purposes
valuetype _ = Lua.TTABLE
#if MIN_VERSION_base(4,8,0)
instance {-# OVERLAPS #-} StackValue [Inline] where
#else
instance StackValue [Inline] where
#endif
push l ils = Lua.push l =<< inlineListToCustom l ils
peek _ _ = undefined
valuetype _ = Lua.TSTRING
#if MIN_VERSION_base(4,8,0)
instance {-# OVERLAPS #-} StackValue [Block] where
#else
instance StackValue [Block] where
#endif
push l ils = Lua.push l =<< blockListToCustom l ils
peek _ _ = undefined
valuetype _ = Lua.TSTRING
instance StackValue MetaValue where
push l (MetaMap m) = Lua.push l m
push l (MetaList xs) = Lua.push l xs
push l (MetaBool x) = Lua.push l x
push l (MetaString s) = Lua.push l s
push l (MetaInlines ils) = Lua.push l ils
push l (MetaBlocks bs) = Lua.push l bs
peek _ _ = undefined
valuetype (MetaMap _) = Lua.TTABLE
valuetype (MetaList _) = Lua.TTABLE
valuetype (MetaBool _) = Lua.TBOOLEAN
valuetype (MetaString _) = Lua.TSTRING
valuetype (MetaInlines _) = Lua.TSTRING
valuetype (MetaBlocks _) = Lua.TSTRING
instance StackValue Citation where
push lua cit = do
Lua.createtable lua 6 0
let addValue (k :: String, v) = Lua.push lua k >> Lua.push lua v >>
Lua.rawset lua (-3)
addValue ("citationId", citationId cit)
addValue ("citationPrefix", citationPrefix cit)
addValue ("citationSuffix", citationSuffix cit)
addValue ("citationMode", show (citationMode cit))
addValue ("citationNoteNum", citationNoteNum cit)
addValue ("citationHash", citationHash cit)
peek = undefined
valuetype _ = Lua.TTABLE
data PandocLuaException = PandocLuaException String
deriving (Show, Typeable)
instance Exception PandocLuaException
-- | Convert Pandoc to custom markup.
writeCustom :: FilePath -> WriterOptions -> Pandoc -> IO String
writeCustom luaFile opts doc@(Pandoc meta _) = do
luaScript <- UTF8.readFile luaFile
enc <- getForeignEncoding
setForeignEncoding utf8
lua <- Lua.newstate
Lua.openlibs lua
status <- Lua.loadstring lua luaScript luaFile
-- check for error in lua script (later we'll change the return type
-- to handle this more gracefully):
when (status /= 0) $
#if MIN_VERSION_hslua(0,4,0)
Lua.tostring lua 1 >>= throw . PandocLuaException . UTF8.toString
#else
Lua.tostring lua 1 >>= throw . PandocLuaException
#endif
Lua.call lua 0 0
-- TODO - call hierarchicalize, so we have that info
rendered <- docToCustom lua opts doc
context <- metaToJSON opts
(blockListToCustom lua)
(inlineListToCustom lua)
meta
Lua.close lua
setForeignEncoding enc
let body = rendered
if writerStandalone opts
then do
let context' = setField "body" body context
return $ renderTemplate' (writerTemplate opts) context'
else return body
docToCustom :: LuaState -> WriterOptions -> Pandoc -> IO String
docToCustom lua opts (Pandoc (Meta metamap) blocks) = do
body <- blockListToCustom lua blocks
callfunc lua "Doc" body metamap (writerVariables opts)
-- | Convert Pandoc block element to Custom.
blockToCustom :: LuaState -- ^ Lua state
-> Block -- ^ Block element
-> IO String
blockToCustom _ Null = return ""
blockToCustom lua (Plain inlines) = callfunc lua "Plain" inlines
blockToCustom lua (Para [Image txt (src,tit)]) =
callfunc lua "CaptionedImage" src tit txt
blockToCustom lua (Para inlines) = callfunc lua "Para" inlines
blockToCustom lua (RawBlock format str) =
callfunc lua "RawBlock" format str
blockToCustom lua HorizontalRule = callfunc lua "HorizontalRule"
blockToCustom lua (Header level attr inlines) =
callfunc lua "Header" level inlines (attrToMap attr)
blockToCustom lua (CodeBlock attr str) =
callfunc lua "CodeBlock" str (attrToMap attr)
blockToCustom lua (BlockQuote blocks) = callfunc lua "BlockQuote" blocks
blockToCustom lua (Table capt aligns widths headers rows') =
callfunc lua "Table" capt (map show aligns) widths headers rows'
blockToCustom lua (BulletList items) = callfunc lua "BulletList" items
blockToCustom lua (OrderedList (num,sty,delim) items) =
callfunc lua "OrderedList" items num (show sty) (show delim)
blockToCustom lua (DefinitionList items) =
callfunc lua "DefinitionList" items
blockToCustom lua (Div attr items) =
callfunc lua "Div" items (attrToMap attr)
-- | Convert list of Pandoc block elements to Custom.
blockListToCustom :: LuaState -- ^ Options
-> [Block] -- ^ List of block elements
-> IO String
blockListToCustom lua xs = do
blocksep <- callfunc lua "Blocksep"
bs <- mapM (blockToCustom lua) xs
return $ mconcat $ intersperse blocksep bs
-- | Convert list of Pandoc inline elements to Custom.
inlineListToCustom :: LuaState -> [Inline] -> IO String
inlineListToCustom lua lst = do
xs <- mapM (inlineToCustom lua) lst
return $ concat xs
-- | Convert Pandoc inline element to Custom.
inlineToCustom :: LuaState -> Inline -> IO String
inlineToCustom lua (Str str) = callfunc lua "Str" str
inlineToCustom lua Space = callfunc lua "Space"
inlineToCustom lua (Emph lst) = callfunc lua "Emph" lst
inlineToCustom lua (Strong lst) = callfunc lua "Strong" lst
inlineToCustom lua (Strikeout lst) = callfunc lua "Strikeout" lst
inlineToCustom lua (Superscript lst) = callfunc lua "Superscript" lst
inlineToCustom lua (Subscript lst) = callfunc lua "Subscript" lst
inlineToCustom lua (SmallCaps lst) = callfunc lua "SmallCaps" lst
inlineToCustom lua (Quoted SingleQuote lst) = callfunc lua "SingleQuoted" lst
inlineToCustom lua (Quoted DoubleQuote lst) = callfunc lua "DoubleQuoted" lst
inlineToCustom lua (Cite cs lst) = callfunc lua "Cite" lst cs
inlineToCustom lua (Code attr str) =
callfunc lua "Code" str (attrToMap attr)
inlineToCustom lua (Math DisplayMath str) =
callfunc lua "DisplayMath" str
inlineToCustom lua (Math InlineMath str) =
callfunc lua "InlineMath" str
inlineToCustom lua (RawInline format str) =
callfunc lua "RawInline" format str
inlineToCustom lua (LineBreak) = callfunc lua "LineBreak"
inlineToCustom lua (Link txt (src,tit)) =
callfunc lua "Link" txt src tit
inlineToCustom lua (Image alt (src,tit)) =
callfunc lua "Image" alt src tit
inlineToCustom lua (Note contents) = callfunc lua "Note" contents
inlineToCustom lua (Span attr items) =
callfunc lua "Span" items (attrToMap attr)
| Thell/pandoc | src/Text/Pandoc/Writers/Custom.hs | gpl-2.0 | 10,247 | 0 | 14 | 2,106 | 3,012 | 1,493 | 1,519 | 200 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Nova.Config
where
import Config (Database(..), ServerType(..), BaseConfig(..))
import Data.Aeson.TH (deriveJSON, defaultOptions)
import Data.Default (Default(..))
import System.Log.Logger (Priority(NOTICE))
data NovaConfig = NovaConfig
{ certificateFile :: FilePath -- TLS runner checks if this file exists
, keyFile :: FilePath -- TLS runner checks if this file exists
, port :: Int -- Port won't bind if it's busy
, endpoint :: Maybe String
, database :: Database
, logLevel :: Priority
, serverType :: ServerType
}
defaultConfig :: NovaConfig
defaultConfig = NovaConfig
{ certificateFile = "server.crt"
, keyFile = "server.key"
, port = defaultPort
, endpoint = Nothing
, database = Database
{ dbHost = "localhost"
, dbPort = 27017
, dbName = "nova"
}
, logLevel = NOTICE
, serverType = Plain
}
where defaultPort = 8774
instance Default NovaConfig where
def = defaultConfig
instance BaseConfig NovaConfig where
getEndpoint = endpoint
getServerType = serverType
confFileName :: String
confFileName = "nova.conf"
$(deriveJSON defaultOptions ''NovaConfig)
| VictorDenisov/keystone | src/Nova/Config.hs | gpl-2.0 | 1,690 | 0 | 9 | 751 | 269 | 169 | 100 | 35 | 1 |
module Interpreter.Repl
(getInputLine)
where
import System.IO
import System.Console.ANSI
import Control.Applicative
import Data.Maybe (fromMaybe)
import Interpreter.Types
keycodes :: [(String, Key)]
keycodes = [ ("[D", KeyLeft)
, ("[C", KeyRight)
, ("[A", KeyUp)
, ("[B", KeyDown)
, ("[7", KeyPos1)
, ("OH", KeyPos1)
, ("OF", KeyEnd)
, ("[8", KeyEnd)
, ("[3", KeyDel)
-- xterm:
, ("[1;5D", KeyLeft)
, ("[1;5C", KeyRight)
-- rxvt
, ("[OD", KeyLeft)
, ("[OC", KeyRight) ]
getChars :: IO String
getChars = do
x <- getChar
loop [x]
where
loop xs = do
r <- hReady stdin
case r of
False -> return $ reverse xs
True -> do
x <- getChar
loop (x:xs)
parseInput :: String -> Input
parseInput ('\ESC':xs) = Special $ fromMaybe KeyUnknown (lookup xs keycodes)
parseInput ('\DEL':[]) = Special KeyBack
parseInput ('\n':[]) = Special KeyEnter
parseInput ('\t':[]) = Special KeyTab
parseInput ('\EOT':[]) = Normal "\EOT"
parseInput x = Normal x
getInput :: IO Input
getInput = parseInput <$> getChars
getInputLine :: String -> [String] -> IO String
getInputLine pre buf = putStr pre *> (handle [] 0 =<< getInput)
where
handle _ _ (Normal "\EOT") = putStr "\n" *> return "\EOT"
handle xs bufpos (Normal x) = putStr x *> get (x ++ xs) bufpos
handle xs _ (Special KeyEnter) = putStr "\n" *> return (reverse xs)
handle xs bufpos (Special KeyBack) = delChar xs bufpos
handle x bufpos (Special KeyUp)
| bufpos +1 == buflen = get x bufpos
| otherwise = replaceLine pre (buffer !! (bufpos +1)) *> get (buffer !! (bufpos + 1)) (bufpos + 1)
handle x bufpos (Special KeyDown)
| bufpos == 0 = get x bufpos
| otherwise = replaceLine pre (buffer !! (bufpos -1)) *> get (buffer !! (bufpos - 1)) (bufpos - 1)
handle xs bufpos _ = get xs bufpos
get x bufpos = handle x bufpos =<< getInput
delChar [] bufpos = get [] bufpos
delChar xs bufpos = termDelChar >> get (tail xs) bufpos
buflen = length buffer
buffer = "" : buf
replaceLine :: String -> String -> IO ()
replaceLine pre re = clearLine *> cursorBackward 255 *> (putStr $ pre ++ re)
termDelChar :: IO ()
termDelChar = cursorBackward 1 >> clearFromCursorToLineEnd
| felixsch/simplelisp | src/Interpreter/Repl.hs | gpl-2.0 | 2,506 | 0 | 16 | 780 | 953 | 496 | 457 | 63 | 8 |
module Game.Folivora.Sound where
import Control.Concurrent
import Data.IORef
import Data.List
import Control.Monad ( when, unless )
import Sound.ALUT
type SoundCommand = Maybe String
type Sound = SoundCommand
data Channel = ChannelPlaying ThreadId | ChannelToPlay SoundCommand | ChannelGarbage deriving (Eq, Show)
type NowPlaying = [Channel]
newPlayerState :: IO (IORef NowPlaying)
newPlayerState = newIORef []
renderSound :: IORef NowPlaying -> SoundCommand -> IO ()
renderSound _ Nothing = return ()
renderSound ref sound = atomicModifyIORef' ref $ \lst -> (ChannelToPlay sound : lst, ())
playerThread :: IORef NowPlaying -> IO ThreadId
playerThread nowPlayingRef = forkIO $ withProgNameAndArgs runALUT $ \_ _ -> thread nowPlayingRef
where
thread nowPlayingRef = do
channels <- atomicModifyIORef nowPlayingRef $
\chans ->
let chans' = filter (/= ChannelGarbage) chans
(toPlay, playing) = partition isToPlay chans'
isToPlay (ChannelToPlay _) = True
isToPlay _ = False
in
(playing, toPlay)
channels' <- mapM processChannel channels
atomicModifyIORef nowPlayingRef $ \chans -> (chans ++ channels', ())
thread nowPlayingRef
processChannel :: Channel -> IO Channel
processChannel (ChannelToPlay (Just sound)) = do
tid <- playFileThreaded nowPlayingRef sound
return $ ChannelPlaying tid
processChannel (ChannelToPlay Nothing) = return ChannelGarbage
processChannel x = return x
playFileThreaded :: IORef NowPlaying -> FilePath -> IO ThreadId
playFileThreaded np fp = do
tidRef <- newEmptyMVar
tid <- forkIO $ do
tid <- takeMVar tidRef
playFile np tid fp
putMVar tidRef tid
return tid
-- taken from ALUT examples..
playFile :: IORef NowPlaying -> ThreadId -> FilePath -> IO ()
playFile nowPlayingRef tid fileName = do
-- Create an AL buffer from the given sound file.
buf <- createBuffer (File fileName)
-- Generate a single source, attach the buffer to it and start playing.
source <- genObjectName
buffer source $= Just buf
play [source]
-- Check every 0.1 seconds if the sound is still playing.
let waitWhilePlaying = do
sleep 0.1
state <- get (sourceState source)
when (state == Playing) $
waitWhilePlaying
waitWhilePlaying
atomicModifyIORef nowPlayingRef $ \chans -> (map (removeMe tid) chans, ())
where
removeMe :: ThreadId -> Channel -> Channel
removeMe tid (ChannelPlaying tid') | tid == tid' = ChannelGarbage
removeMe _ x = x
| caryoscelus/folivora-ge | src/Game/Folivora/Sound.hs | gpl-3.0 | 2,882 | 0 | 18 | 895 | 757 | 372 | 385 | 58 | 4 |
{-# LANGUAGE CPP, GADTs, RankNTypes, ScopedTypeVariables, TypeFamilies #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Trustworthy #-}
#endif
-- | Utilities for clients of Hoopl, not used internally.
module Compiler.Hoopl.XUtil
(
-- * Utilities for clients
firstXfer, distributeXfer
, distributeFact, distributeFactBwd
, successorFacts
, joinFacts
, joinOutFacts -- deprecated
, joinMaps
, analyzeAndRewriteFwdBody, analyzeAndRewriteBwdBody
, analyzeAndRewriteFwdOx, analyzeAndRewriteBwdOx
)
where
import qualified Data.Map as M
import Data.Maybe
import Compiler.Hoopl.Collections
import Compiler.Hoopl.Checkpoint
import Compiler.Hoopl.Dataflow
import Compiler.Hoopl.Block
import Compiler.Hoopl.Graph
import Compiler.Hoopl.Label
-----------------------------------------------------------------------------
-- | Forward dataflow analysis and rewriting for the special case of a Body.
-- A set of entry points must be supplied; blocks not reachable from
-- the set are thrown away.
analyzeAndRewriteFwdBody
:: forall m n f entries. (CheckpointMonad m, NonLocal n, LabelsPtr entries)
=> FwdPass m n f
-> entries -> Body n -> FactBase f
-> m (Body n, FactBase f)
-- | Backward dataflow analysis and rewriting for the special case of a Body.
-- A set of entry points must be supplied; blocks not reachable from
-- the set are thrown away.
analyzeAndRewriteBwdBody
:: forall m n f entries. (CheckpointMonad m, NonLocal n, LabelsPtr entries)
=> BwdPass m n f
-> entries -> Body n -> FactBase f
-> m (Body n, FactBase f)
analyzeAndRewriteFwdBody pass en = mapBodyFacts (analyzeAndRewriteFwd pass (JustC en))
analyzeAndRewriteBwdBody pass en = mapBodyFacts (analyzeAndRewriteBwd pass (JustC en))
mapBodyFacts :: (Monad m)
=> (Graph n C C -> Fact C f -> m (Graph n C C, Fact C f, MaybeO C f))
-> (Body n -> FactBase f -> m (Body n, FactBase f))
-- ^ Internal utility; should not escape
mapBodyFacts anal b f = anal (GMany NothingO b NothingO) f >>= bodyFacts
where -- the type constraint is needed for the pattern match;
-- if it were not, we would use do-notation here.
bodyFacts :: Monad m => (Graph n C C, Fact C f, MaybeO C f) -> m (Body n, Fact C f)
bodyFacts (GMany NothingO body NothingO, fb, NothingO) = return (body, fb)
{-
Can't write:
do (GMany NothingO body NothingO, fb, NothingO) <- anal (....) f
return (body, fb)
because we need an explicit type signature in order to do the GADT
pattern matches on NothingO
-}
-- | Forward dataflow analysis and rewriting for the special case of a
-- graph open at the entry. This special case relieves the client
-- from having to specify a type signature for 'NothingO', which beginners
-- might find confusing and experts might find annoying.
analyzeAndRewriteFwdOx
:: forall m n f x. (CheckpointMonad m, NonLocal n)
=> FwdPass m n f -> Graph n O x -> f -> m (Graph n O x, FactBase f, MaybeO x f)
-- | Backward dataflow analysis and rewriting for the special case of a
-- graph open at the entry. This special case relieves the client
-- from having to specify a type signature for 'NothingO', which beginners
-- might find confusing and experts might find annoying.
analyzeAndRewriteBwdOx
:: forall m n f x. (CheckpointMonad m, NonLocal n)
=> BwdPass m n f -> Graph n O x -> Fact x f -> m (Graph n O x, FactBase f, f)
-- | A value that can be used for the entry point of a graph open at the entry.
noEntries :: MaybeC O Label
noEntries = NothingC
analyzeAndRewriteFwdOx pass g f = analyzeAndRewriteFwd pass noEntries g f
analyzeAndRewriteBwdOx pass g fb = analyzeAndRewriteBwd pass noEntries g fb >>= strip
where strip :: forall m a b c . Monad m => (a, b, MaybeO O c) -> m (a, b, c)
strip (a, b, JustO c) = return (a, b, c)
-- | A utility function so that a transfer function for a first
-- node can be given just a fact; we handle the lookup. This
-- function is planned to be made obsolete by changes in the dataflow
-- interface.
firstXfer :: NonLocal n => (n C O -> f -> f) -> (n C O -> FactBase f -> f)
firstXfer xfer n fb = xfer n $ fromJust $ lookupFact (entryLabel n) fb
-- | This utility function handles a common case in which a transfer function
-- produces a single fact out of a last node, which is then distributed
-- over the outgoing edges.
distributeXfer :: NonLocal n
=> DataflowLattice f -> (n O C -> f -> f) -> (n O C -> f -> FactBase f)
distributeXfer lattice xfer n f =
mkFactBase lattice [ (l, xfer n f) | l <- successors n ]
-- | This utility function handles a common case in which a transfer function
-- for a last node takes the incoming fact unchanged and simply distributes
-- that fact over the outgoing edges.
distributeFact :: NonLocal n => n O C -> f -> FactBase f
distributeFact n f = mapFromList [ (l, f) | l <- successors n ]
-- because the same fact goes out on every edge,
-- there's no need for 'mkFactBase' here.
-- | This utility function handles a common case in which a backward transfer
-- function takes the incoming fact unchanged and tags it with the node's label.
distributeFactBwd :: NonLocal n => n C O -> f -> FactBase f
distributeFactBwd n f = mapSingleton (entryLabel n) f
-- | List of (unlabelled) facts from the successors of a last node
successorFacts :: NonLocal n => n O C -> FactBase f -> [f]
successorFacts n fb = [ f | id <- successors n, let Just f = lookupFact id fb ]
-- | Join a list of facts.
joinFacts :: DataflowLattice f -> Label -> [f] -> f
joinFacts lat inBlock = foldr extend (fact_bot lat)
where extend new old = snd $ fact_join lat inBlock (OldFact old) (NewFact new)
{-# DEPRECATED joinOutFacts
"should be replaced by 'joinFacts lat l (successorFacts n f)'; as is, it uses the wrong Label" #-}
joinOutFacts :: (NonLocal node) => DataflowLattice f -> node O C -> FactBase f -> f
joinOutFacts lat n f = foldr join (fact_bot lat) facts
where join (lbl, new) old = snd $ fact_join lat lbl (OldFact old) (NewFact new)
facts = [(s, fromJust fact) | s <- successors n, let fact = lookupFact s f, isJust fact]
-- | It's common to represent dataflow facts as a map from variables
-- to some fact about the locations. For these maps, the join
-- operation on the map can be expressed in terms of the join on each
-- element of the codomain:
joinMaps :: Ord k => JoinFun v -> JoinFun (M.Map k v)
joinMaps eltJoin l (OldFact old) (NewFact new) = M.foldrWithKey add (NoChange, old) new
where
add k new_v (ch, joinmap) =
case M.lookup k joinmap of
Nothing -> (SomeChange, M.insert k new_v joinmap)
Just old_v -> case eltJoin l (OldFact old_v) (NewFact new_v) of
(SomeChange, v') -> (SomeChange, M.insert k v' joinmap)
(NoChange, _) -> (ch, joinmap)
| jwiegley/ghc-release | libraries/hoopl/src/Compiler/Hoopl/XUtil.hs | gpl-3.0 | 6,850 | 0 | 15 | 1,463 | 1,695 | 892 | 803 | 77 | 3 |
import Control.Monad as M
import Data.List as L
import System.Directory
import System.Environment
import System.FilePath
import System.Random
import Text.Printf
{-# INLINE shuffle #-}
shuffle :: [a] -> IO [a]
shuffle xs = do
ys <- M.replicateM (L.length xs) randomIO :: IO [Double]
return . fst . L.unzip . L.sortOn snd $ L.zip xs ys
{-# INLINE pathGenerator #-}
pathGenerator :: FilePath -> Int -> String -> Int -> FilePath
pathGenerator folderPath freq "distractor" n =
printf "%s/%dFC/distractor/amoeba_0_%d_%05d.png" folderPath freq freq n
pathGenerator folderPath freq "target" n =
printf "%s/%dFC/target/amoeba_1_%d_%05d.png" folderPath freq freq n
pathGenerator folderPath freq "amoeba" n =
printf "%s/%dFC/amoeba/amoeba_2_%d_%05d.png" folderPath freq freq n
pathGenerator _ _ x _ = error $ printf "pathGenerator: type %s not found.\n" x
main = do
(folderPath:writeFolderPath:nullImagePath:totalNumStr:numTrainStr:xs) <-
getArgs
let fcs = L.map (\x -> read x :: Int) xs
totalNum = read totalNumStr :: Int
numTrain = read numTrainStr :: Int
numFile <-
L.length <$>
listDirectory (folderPath </> (L.head xs L.++ "FC") </> "target")
unless
(totalNum == numFile)
(error $
printf
"TotalNum:%d\n%d files in %s\n"
totalNum
numFile
(folderPath </> (L.head xs L.++ "FC") </> "target"))
when
(numTrain >= totalNum)
(error $ printf "numTrain: %d\ntotalNum: %d\n" numTrain totalNum)
createDirectoryIfMissing True (writeFolderPath </> numTrainStr)
let trainIndex = [1 .. numTrain]
testIndex = [(numTrain + 1) .. totalNum]
-- Train
let amoebaPathList =
L.concatMap
(\fc ->
L.map
(\n ->
( 1
, pathGenerator folderPath fc "target" n
, pathGenerator folderPath fc "amoeba" n))
trainIndex)
fcs
distractorPathList =
L.concatMap
(\fc ->
L.map
(\n ->
(0, pathGenerator folderPath fc "distractor" n, nullImagePath))
trainIndex)
fcs
shuffledPairs <- shuffle $ amoebaPathList L.++ distractorPathList
writeFile (writeFolderPath </> numTrainStr </> "trainImageListAmoebaOnly.txt") .
L.unlines . L.map (\(_, x, _) -> x) $
amoebaPathList
writeFile
(writeFolderPath </> numTrainStr </> "trainAmoebaListAmoebaOnly.txt") .
L.unlines . L.map (\(_, _, x) -> x) $
amoebaPathList
writeFile (writeFolderPath </> numTrainStr </> "trainImageList.txt") .
L.unlines . L.map (\(_, x, _) -> x) $
shuffledPairs
writeFile (writeFolderPath </> numTrainStr </> "trainAmoebaList.txt") .
L.unlines . L.map (\(_, _, x) -> x) $
shuffledPairs
writeFile (writeFolderPath </> numTrainStr </> "trainLabelList.txt") .
L.unlines . L.map (\(x, _, _) -> show x) $
shuffledPairs
-- Test
let amoebaPathList =
L.concatMap
(\fc ->
L.map
(\n ->
( 1
, pathGenerator folderPath fc "target" n
, pathGenerator folderPath fc "amoeba" n))
testIndex)
fcs
distractorPathList =
L.concatMap
(\fc ->
L.map
(\n ->
(0, pathGenerator folderPath fc "distractor" n, nullImagePath))
testIndex)
fcs
shuffledPairs <- shuffle $ amoebaPathList L.++ distractorPathList
writeFile (writeFolderPath </> numTrainStr </> "testImageList.txt") .
L.unlines . L.map (\(_, x, _) -> x) $
shuffledPairs
writeFile (writeFolderPath </> numTrainStr </> "testAmoebaList.txt") .
L.unlines . L.map (\(_, _, x) -> x) $
shuffledPairs
writeFile (writeFolderPath </> numTrainStr </> "testLabelList.txt") .
L.unlines . L.map (\(x, _, _) -> show x) $
shuffledPairs
| XinhuaZhang/PetaVisionHaskell | Application/Amoeba/GeneratePath.hs | gpl-3.0 | 3,991 | 0 | 17 | 1,180 | 1,216 | 632 | 584 | 107 | 1 |
module HLinear.Hook
( EchelonForm(..)
, EchelonTransformation(..)
, LeftTransformation(..)
, PLEHook(..)
, PLUEHook(..)
, UEHook(..)
)
where
import HLinear.Hook.EchelonForm
import HLinear.Hook.EchelonTransformation
import HLinear.Hook.LeftTransformation
import HLinear.Hook.PLEHook
| martinra/hlinear | src/HLinear/Hook.hs | gpl-3.0 | 297 | 0 | 5 | 39 | 74 | 51 | 23 | 11 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Logging.Organizations.Locations.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets information about a location.
--
-- /See:/ <https://cloud.google.com/logging/docs/ Cloud Logging API Reference> for @logging.organizations.locations.get@.
module Network.Google.Resource.Logging.Organizations.Locations.Get
(
-- * REST Resource
OrganizationsLocationsGetResource
-- * Creating a Request
, organizationsLocationsGet
, OrganizationsLocationsGet
-- * Request Lenses
, olgXgafv
, olgUploadProtocol
, olgAccessToken
, olgUploadType
, olgName
, olgCallback
) where
import Network.Google.Logging.Types
import Network.Google.Prelude
-- | A resource alias for @logging.organizations.locations.get@ method which the
-- 'OrganizationsLocationsGet' request conforms to.
type OrganizationsLocationsGetResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Location
-- | Gets information about a location.
--
-- /See:/ 'organizationsLocationsGet' smart constructor.
data OrganizationsLocationsGet =
OrganizationsLocationsGet'
{ _olgXgafv :: !(Maybe Xgafv)
, _olgUploadProtocol :: !(Maybe Text)
, _olgAccessToken :: !(Maybe Text)
, _olgUploadType :: !(Maybe Text)
, _olgName :: !Text
, _olgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsLocationsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olgXgafv'
--
-- * 'olgUploadProtocol'
--
-- * 'olgAccessToken'
--
-- * 'olgUploadType'
--
-- * 'olgName'
--
-- * 'olgCallback'
organizationsLocationsGet
:: Text -- ^ 'olgName'
-> OrganizationsLocationsGet
organizationsLocationsGet pOlgName_ =
OrganizationsLocationsGet'
{ _olgXgafv = Nothing
, _olgUploadProtocol = Nothing
, _olgAccessToken = Nothing
, _olgUploadType = Nothing
, _olgName = pOlgName_
, _olgCallback = Nothing
}
-- | V1 error format.
olgXgafv :: Lens' OrganizationsLocationsGet (Maybe Xgafv)
olgXgafv = lens _olgXgafv (\ s a -> s{_olgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
olgUploadProtocol :: Lens' OrganizationsLocationsGet (Maybe Text)
olgUploadProtocol
= lens _olgUploadProtocol
(\ s a -> s{_olgUploadProtocol = a})
-- | OAuth access token.
olgAccessToken :: Lens' OrganizationsLocationsGet (Maybe Text)
olgAccessToken
= lens _olgAccessToken
(\ s a -> s{_olgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
olgUploadType :: Lens' OrganizationsLocationsGet (Maybe Text)
olgUploadType
= lens _olgUploadType
(\ s a -> s{_olgUploadType = a})
-- | Resource name for the location.
olgName :: Lens' OrganizationsLocationsGet Text
olgName = lens _olgName (\ s a -> s{_olgName = a})
-- | JSONP
olgCallback :: Lens' OrganizationsLocationsGet (Maybe Text)
olgCallback
= lens _olgCallback (\ s a -> s{_olgCallback = a})
instance GoogleRequest OrganizationsLocationsGet
where
type Rs OrganizationsLocationsGet = Location
type Scopes OrganizationsLocationsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/logging.admin",
"https://www.googleapis.com/auth/logging.read"]
requestClient OrganizationsLocationsGet'{..}
= go _olgName _olgXgafv _olgUploadProtocol
_olgAccessToken
_olgUploadType
_olgCallback
(Just AltJSON)
loggingService
where go
= buildClient
(Proxy :: Proxy OrganizationsLocationsGetResource)
mempty
| brendanhay/gogol | gogol-logging/gen/Network/Google/Resource/Logging/Organizations/Locations/Get.hs | mpl-2.0 | 4,830 | 0 | 15 | 1,078 | 704 | 412 | 292 | 103 | 1 |
{-# OPTIONS_GHC -F -pgmF ../../scripts/local-htfpp #-}
import "HTF" Test.Framework
-- error must be in line 7
test_foo :: Int
test_foo = 5
main :: IO ()
main = return ()
| skogsbaer/HTF | tests/compile-errors/Test3.hs | lgpl-2.1 | 173 | 0 | 6 | 33 | 39 | 22 | 17 | -1 | -1 |
{-# LANGUAGE RecordWildCards, TupleSections, LambdaCase #-}
module ViperVM.Platform.Kernel where
import Control.Applicative ((<$>))
import ViperVM.Platform.Proc
import ViperVM.Platform.KernelParameter
import ViperVM.Platform.KernelConstraint
import ViperVM.Platform.ProcessorCapabilities
import qualified ViperVM.Platform.Peer.KernelPeer as Peer
-- | A kernel
data Kernel = Kernel {
kernelPeer :: Peer.KernelPeer
} deriving (Ord,Eq)
instance Show Kernel where
show k = show (kernelPeer k)
initKernelsIO :: [IO Peer.KernelPeer] -> IO [Kernel]
initKernelsIO peers = fmap Kernel <$> sequence peers
-- | Constraints of the kernel
kernelConstraints :: Kernel -> [KernelConstraint]
kernelConstraints = Peer.kernelConstraints . kernelPeer
-- | Indicate if a processor supports given constraints
supportConstraints :: [KernelConstraint] -> Proc -> Bool
supportConstraints cs p = all (`supportConstraint` p) cs
-- | Indicate if a processor supports a given constraint
supportConstraint :: KernelConstraint -> Proc -> Bool
supportConstraint DoublePrecisionSupport proc = procSupports proc DoubleFloatingPoint
-- | Indicate if a processor can execute a given kernel
canExecute :: Proc -> Kernel -> Bool
canExecute p k = supportConstraints (kernelConstraints k) p
-- | Execute a kernel on a given processor synchronously
execute :: Proc -> Kernel -> [KernelParameter] -> IO ()
execute p k params = do
Peer.kernelExecute (kernelPeer k) params (procPeer p)
| hsyl20/HViperVM | lib/ViperVM/Platform/Kernel.hs | lgpl-3.0 | 1,465 | 0 | 9 | 209 | 337 | 187 | 150 | 26 | 1 |
import Data.List (sort)
main = do
let test = [[2],[3,4],[6,5,7],[4,1,8,3]]
print $ minimumTotal 0 test
print $ minimumTotal 0 [[2]]
minimumTotal :: Integer -> [[Integer]] -> Integer
minimumTotal = foldl (\a b -> (head.sort) $ map (+ a) b)
| ccqpein/Arithmetic-Exercises | Triangle/Triangle2.hs | apache-2.0 | 258 | 0 | 11 | 58 | 152 | 86 | 66 | 7 | 1 |
rank (m:e:j:_)
| (m==100) || (e==100) || (j==100) = 'A'
| m+e >= 180 = 'A'
| m+e+j >= 240 = 'A'
| m+e+j >= 210 = 'B'
|(m+e+j >= 150) &&((m>=80) || (e>=80)) = 'B'
| otherwise = 'C'
ans ([0]:_) = []
ans (n:x) =
let n'= (n!!0)
o = map rank $ take n' x
x'= drop n' x
in
o++(ans x')
main = do
c <- getContents
let i = map (map read) $ map words $ lines c :: [[Int]]
o = ans i
mapM_ putStrLn $ map (\c->[c]) o
| a143753/AOJ | 0218.hs | apache-2.0 | 512 | 1 | 14 | 203 | 359 | 181 | 178 | 18 | 1 |
-- | The 'MakeReflections' module takes the 'FileDescriptorProto'
-- output from 'Resolve' and produces a 'ProtoInfo' from
-- 'Reflections'. This also takes a Haskell module prefix and the
-- proto's package namespace as input. The output is suitable
-- for passing to the 'Gen' module to produce the files.
--
-- This acheives several things: It moves the data from a nested tree
-- to flat lists and maps. It moves the group information from the
-- parent Descriptor to the actual Descriptor. It moves the data out
-- of Maybe types. It converts Utf8 to String. Keys known to extend
-- a Descriptor are listed in that Descriptor.
--
-- In building the reflection info new things are computed. It changes
-- dotted names to ProtoName using the translator from
-- 'makeNameMaps'. It parses the default value from the ByteString to
-- a Haskell type. For fields, the value of the tag on the wire is
-- computed and so is its size on the wire.
module Text.ProtocolBuffers.ProtoCompile.MakeReflections(makeProtoInfo,serializeFDP) where
import qualified Text.DescriptorProtos.DescriptorProto as D(DescriptorProto)
import qualified Text.DescriptorProtos.DescriptorProto as D.DescriptorProto(DescriptorProto(..))
import qualified Text.DescriptorProtos.DescriptorProto.ExtensionRange as D.DescriptorProto(ExtensionRange(ExtensionRange))
import qualified Text.DescriptorProtos.DescriptorProto.ExtensionRange as D.DescriptorProto.ExtensionRange(ExtensionRange(..))
import qualified Text.DescriptorProtos.EnumDescriptorProto as D(EnumDescriptorProto)
import qualified Text.DescriptorProtos.EnumDescriptorProto as D.EnumDescriptorProto(EnumDescriptorProto(..))
import qualified Text.DescriptorProtos.EnumValueDescriptorProto as D(EnumValueDescriptorProto)
import qualified Text.DescriptorProtos.EnumValueDescriptorProto as D.EnumValueDescriptorProto(EnumValueDescriptorProto(..))
import qualified Text.DescriptorProtos.FieldDescriptorProto as D(FieldDescriptorProto)
import qualified Text.DescriptorProtos.FieldDescriptorProto as D.FieldDescriptorProto(FieldDescriptorProto(..))
-- import qualified Text.DescriptorProtos.FieldDescriptorProto.Label as D.FieldDescriptorProto(Label)
import Text.DescriptorProtos.FieldDescriptorProto.Label as D.FieldDescriptorProto.Label(Label(..))
-- import qualified Text.DescriptorProtos.FieldDescriptorProto.Type as D.FieldDescriptorProto(Type)
import Text.DescriptorProtos.FieldDescriptorProto.Type as D.FieldDescriptorProto.Type(Type(..))
import qualified Text.DescriptorProtos.FieldOptions as D(FieldOptions(FieldOptions))
import qualified Text.DescriptorProtos.FieldOptions as D.FieldOptions(FieldOptions(..))
import qualified Text.DescriptorProtos.FileDescriptorProto as D(FileDescriptorProto(FileDescriptorProto))
import qualified Text.DescriptorProtos.FileDescriptorProto as D.FileDescriptorProto(FileDescriptorProto(..))
import Text.ProtocolBuffers.Basic
import Text.ProtocolBuffers.Identifiers
import Text.ProtocolBuffers.Reflections
import Text.ProtocolBuffers.WireMessage(size'WireTag,toWireTag,toPackedWireTag,runPut,Wire(..))
import Text.ProtocolBuffers.ProtoCompile.Resolve(ReMap,NameMap(..),PackageID(..))
import qualified Data.Foldable as F(foldr,toList)
import qualified Data.Sequence as Seq(fromList,empty,singleton,null)
import Numeric(readHex,readOct,readDec)
import Data.Monoid(mconcat,mappend)
import qualified Data.Map as M(fromListWith,lookup,keys)
import Data.Maybe(fromMaybe,catMaybes,fromJust)
import System.FilePath
--import Debug.Trace (trace)
imp :: String -> a
imp msg = error $ "Text.ProtocolBuffers.ProtoCompile.MakeReflections: Impossible?\n "++msg
pnPath :: ProtoName -> [FilePath]
pnPath (ProtoName _ a b c) = splitDirectories .flip addExtension "hs" . joinPath . map mName $ a++b++[c]
serializeFDP :: D.FileDescriptorProto -> ByteString
serializeFDP fdp = runPut (wirePut 11 fdp)
toHaskell :: ReMap -> FIName Utf8 -> ProtoName
toHaskell reMap k = case M.lookup k reMap of
Nothing -> imp $ "toHaskell failed to find "++show k++" among "++show (M.keys reMap)
Just pn -> pn
makeProtoInfo :: (Bool,Bool) -- unknownField and lazyFields for makeDescriptorInfo'
-> NameMap
-> D.FileDescriptorProto
-> ProtoInfo
makeProtoInfo (unknownField,lazyFieldsOpt) (NameMap (packageID,hPrefix,hParent) reMap)
fdp@(D.FileDescriptorProto { D.FileDescriptorProto.name = Just rawName })
= ProtoInfo protoName (pnPath protoName) (toString rawName) keyInfos allMessages allEnums allKeys where
packageName = getPackageID packageID :: FIName (Utf8)
protoName = case hParent of
[] -> case hPrefix of
[] -> imp $ "makeProtoInfo: no hPrefix or hParent in NameMap for: "++show fdp
_ -> ProtoName packageName (init hPrefix) [] (last hPrefix)
_ -> ProtoName packageName hPrefix (init hParent) (last hParent)
keyInfos = Seq.fromList . map (\f -> (keyExtendee' reMap f,toFieldInfo' reMap packageName f))
. F.toList . D.FileDescriptorProto.extension $ fdp
allMessages = concatMap (processMSG packageName False) (F.toList $ D.FileDescriptorProto.message_type fdp)
allEnums = map (makeEnumInfo' reMap packageName) (F.toList $ D.FileDescriptorProto.enum_type fdp)
++ concatMap (processENM packageName) (F.toList $ D.FileDescriptorProto.message_type fdp)
allKeys = M.fromListWith mappend . map (\(k,a) -> (k,Seq.singleton a))
. F.toList . mconcat $ keyInfos : map keys allMessages
processMSG parent msgIsGroup msg =
let getKnownKeys protoName' = fromMaybe Seq.empty (M.lookup protoName' allKeys)
groups = collectedGroups msg
checkGroup x = elem (fromMaybe (imp $ "no message name in makeProtoInfo.processMSG.checkGroup:\n"++show msg)
(D.DescriptorProto.name x))
groups
parent' = fqAppend parent [IName (fromJust (D.DescriptorProto.name msg))]
in makeDescriptorInfo' reMap parent getKnownKeys msgIsGroup (unknownField,lazyFieldsOpt) msg
: concatMap (\x -> processMSG parent' (checkGroup x) x)
(F.toList (D.DescriptorProto.nested_type msg))
processENM parent msg = foldr ((:) . makeEnumInfo' reMap parent') nested
(F.toList (D.DescriptorProto.enum_type msg))
where parent' = fqAppend parent [IName (fromJust (D.DescriptorProto.name msg))]
nested = concatMap (processENM parent') (F.toList (D.DescriptorProto.nested_type msg))
makeProtoInfo _ _ _ = imp $ "makeProtoInfo: missing name or package"
makeEnumInfo' :: ReMap -> FIName Utf8 -> D.EnumDescriptorProto -> EnumInfo
makeEnumInfo' reMap parent
e@(D.EnumDescriptorProto.EnumDescriptorProto
{ D.EnumDescriptorProto.name = Just rawName
, D.EnumDescriptorProto.value = value })
= if Seq.null value then imp $ "enum has no values: "++show e
else EnumInfo protoName (pnPath protoName) enumVals
where protoName = toHaskell reMap $ fqAppend parent [IName rawName]
enumVals ::[(EnumCode,String)]
enumVals = F.foldr ((:) . oneValue) [] value
where oneValue :: D.EnumValueDescriptorProto -> (EnumCode,String)
oneValue (D.EnumValueDescriptorProto.EnumValueDescriptorProto
{ D.EnumValueDescriptorProto.name = Just name
, D.EnumValueDescriptorProto.number = Just number })
= (EnumCode number,mName . baseName . toHaskell reMap $ fqAppend (protobufName protoName) [IName name])
oneValue evdp = imp $ "no name or number for evdp passed to makeEnumInfo.oneValue: "++show evdp
makeEnumInfo' _ _ _ = imp "makeEnumInfo: missing name"
keyExtendee' :: ReMap -> D.FieldDescriptorProto.FieldDescriptorProto -> ProtoName
keyExtendee' reMap f = case D.FieldDescriptorProto.extendee f of
Nothing -> imp $ "keyExtendee expected Just but found Nothing: "++show f
Just extName -> toHaskell reMap (FIName extName)
makeDescriptorInfo' :: ReMap -> FIName Utf8
-> (ProtoName -> Seq FieldInfo)
-> Bool -- msgIsGroup
-> (Bool,Bool) -- unknownField and lazyFields
-> D.DescriptorProto -> DescriptorInfo
makeDescriptorInfo' reMap parent getKnownKeys msgIsGroup (unknownField,lazyFieldsOpt)
(D.DescriptorProto.DescriptorProto
{ D.DescriptorProto.name = Just rawName
, D.DescriptorProto.field = rawFields
, D.DescriptorProto.extension = rawKeys
, D.DescriptorProto.extension_range = extension_range })
= let di = DescriptorInfo protoName (pnPath protoName) msgIsGroup
fieldInfos keyInfos extRangeList (getKnownKeys protoName)
unknownField lazyFieldsOpt
in di -- trace (toString rawName ++ "\n" ++ show di ++ "\n\n") $ di
where protoName = toHaskell reMap $ fqAppend parent [IName rawName]
fieldInfos = fmap (toFieldInfo' reMap (protobufName protoName)) rawFields
keyInfos = fmap (\f -> (keyExtendee' reMap f,toFieldInfo' reMap (protobufName protoName) f)) rawKeys
extRangeList = concatMap check unchecked
where check x@(lo,hi) | hi < lo = []
| hi<19000 || 19999<lo = [x]
| otherwise = concatMap check [(lo,18999),(20000,hi)]
unchecked = F.foldr ((:) . extToPair) [] extension_range
extToPair (D.DescriptorProto.ExtensionRange
{ D.DescriptorProto.ExtensionRange.start = mStart
, D.DescriptorProto.ExtensionRange.end = mEnd }) =
(maybe minBound FieldId mStart, maybe maxBound (FieldId . pred) mEnd)
makeDescriptorInfo' _ _ _ _ _ _ = imp $ "makeDescriptorInfo: missing name"
toFieldInfo' :: ReMap -> FIName Utf8 -> D.FieldDescriptorProto -> FieldInfo
toFieldInfo' reMap parent
f@(D.FieldDescriptorProto.FieldDescriptorProto
{ D.FieldDescriptorProto.name = Just name
, D.FieldDescriptorProto.number = Just number
, D.FieldDescriptorProto.label = Just label
, D.FieldDescriptorProto.type' = Just type'
, D.FieldDescriptorProto.type_name = mayTypeName
, D.FieldDescriptorProto.default_value = mayRawDef
, D.FieldDescriptorProto.options = mayOpt })
= fieldInfo
where mayDef = parseDefaultValue f
fieldInfo = let (ProtoName x a b c) = toHaskell reMap $ fqAppend parent [IName name]
protoFName = ProtoFName x a b (mangle c)
fieldId = (FieldId (fromIntegral number))
fieldType = (FieldType (fromEnum type'))
{- removed to update 1.5.5 to be compatible with protobuf-2.3.0
wt | packedOption = toPackedWireTag fieldId
| otherwise = toWireTag fieldId fieldType
-}
wt | packedOption = toPackedWireTag fieldId -- write packed
| otherwise = toWireTag fieldId fieldType -- write unpacked
wt2 | validPacked = Just (toWireTag fieldId fieldType -- read unpacked
,toPackedWireTag fieldId) -- read packed
| otherwise = Nothing
wtLength = size'WireTag wt
packedOption = case mayOpt of
Just (D.FieldOptions { D.FieldOptions.packed = Just True }) -> True
_ -> False
validPacked = isValidPacked label fieldType
in FieldInfo protoFName
fieldId
wt
wt2
wtLength
packedOption
(label == LABEL_REQUIRED)
(label == LABEL_REPEATED)
validPacked
fieldType
(fmap (toHaskell reMap . FIName) mayTypeName)
(fmap utf8 mayRawDef)
mayDef
toFieldInfo' _ _ f = imp $ "toFieldInfo: missing info in "++show f
collectedGroups :: D.DescriptorProto -> [Utf8]
collectedGroups = catMaybes
. map D.FieldDescriptorProto.type_name
. filter (\f -> D.FieldDescriptorProto.type' f == Just TYPE_GROUP)
. F.toList
. D.DescriptorProto.field
-- "Nothing" means no value specified
-- A failure to parse a provided value will result in an error at the moment
parseDefaultValue :: D.FieldDescriptorProto -> Maybe HsDefault
parseDefaultValue f@(D.FieldDescriptorProto.FieldDescriptorProto
{ D.FieldDescriptorProto.type' = type'
, D.FieldDescriptorProto.default_value = mayRawDef })
= do bs <- mayRawDef
t <- type'
todo <- case t of
TYPE_MESSAGE -> Nothing
TYPE_GROUP -> Nothing
TYPE_ENUM -> Just parseDefEnum
TYPE_BOOL -> Just parseDefBool
TYPE_BYTES -> Just parseDefBytes
TYPE_DOUBLE -> Just parseDefDouble
TYPE_FLOAT -> Just parseDefFloat
TYPE_STRING -> Just parseDefString
_ -> Just parseDefInteger
case todo bs of
Nothing -> error $ "Could not parse as type "++ show t ++" the default value (raw) is "++ show mayRawDef ++" for field "++show f
Just value -> return value
--- From here down is code used to parse the format of the default values in the .proto files
-- On 25 August 2010 20:12, George van den Driessche <georgevdd@google.com> sent Chris Kuklewicz a
-- patch to MakeReflections.parseDefEnum to ensure that HsDef'Enum holds the mangled form of the
-- name.
parseDefEnum :: Utf8 -> Maybe HsDefault
parseDefEnum = Just . HsDef'Enum . mName . mangle . IName . uToString
{-# INLINE mayRead #-}
mayRead :: ReadS a -> String -> Maybe a
mayRead f s = case f s of [(a,"")] -> Just a; _ -> Nothing
parseDefDouble :: Utf8 -> Maybe HsDefault
parseDefDouble bs = case (uToString bs) of
"nan" -> Just (HsDef'RealFloat SRF'nan)
"-inf" -> Just (HsDef'RealFloat SRF'ninf)
"inf" -> Just (HsDef'RealFloat SRF'inf)
s -> fmap (HsDef'RealFloat . SRF'Rational . toRational) . mayRead reads'$ s
where reads' :: ReadS Double
reads' = readSigned' reads
{-
parseDefDouble :: Utf8 -> Maybe HsDefault
parseDefDouble bs |
| otherwise = fmap (HsDef'Rational . toRational)
. mayRead reads' . uToString $ bs
-}
parseDefFloat :: Utf8 -> Maybe HsDefault
parseDefFloat bs = case (uToString bs) of
"nan" -> Just (HsDef'RealFloat SRF'nan)
"-inf" -> Just (HsDef'RealFloat SRF'ninf)
"inf" -> Just (HsDef'RealFloat SRF'inf)
s -> fmap (HsDef'RealFloat . SRF'Rational . toRational) . mayRead reads'$ s
where reads' :: ReadS Float
reads' = readSigned' reads
{-
parseDefFloat :: Utf8 -> Maybe HsDefault
parseDefFloat bs = fmap (HsDef'Rational . toRational)
. mayRead reads' . uToString $ bs
where reads' :: ReadS Float
reads' = readSigned' reads
-}
parseDefString :: Utf8 -> Maybe HsDefault
parseDefString bs = Just (HsDef'ByteString (utf8 bs))
parseDefBytes :: Utf8 -> Maybe HsDefault
parseDefBytes bs = Just (HsDef'ByteString (utf8 bs))
parseDefInteger :: Utf8 -> Maybe HsDefault
parseDefInteger bs = fmap HsDef'Integer . mayRead checkSign . uToString $ bs
where checkSign = readSigned' checkBase
checkBase ('0':'x':xs@(_:_)) = readHex xs
checkBase ('0':xs@(_:_)) = readOct xs
checkBase xs = readDec xs
parseDefBool :: Utf8 -> Maybe HsDefault
parseDefBool bs | bs == uFromString "true" = Just (HsDef'Bool True)
| bs == uFromString "false" = Just (HsDef'Bool False)
| otherwise = Nothing
-- The Numeric.readSigned does not handle '+' for some odd reason
readSigned' :: (Num a) => ([Char] -> [(a, t)]) -> [Char] -> [(a, t)]
readSigned' f ('-':xs) = map (\(v,s) -> (-v,s)) . f $ xs
readSigned' f ('+':xs) = f xs
readSigned' f xs = f xs
-- Must keep synchronized with Parser.isValidPacked
isValidPacked :: Label -> FieldType -> Bool
isValidPacked LABEL_REPEATED fieldType =
case fieldType of
9 -> False
10 -> False
11 -> False -- Impossible value for typeCode from parseType, but here for completeness
12 -> False
_ -> True
isValidPacked _ _ = False
| alphaHeavy/protocol-buffers | hprotoc/Text/ProtocolBuffers/ProtoCompile/MakeReflections.hs | apache-2.0 | 17,414 | 1 | 19 | 4,907 | 3,884 | 2,096 | 1,788 | 236 | 10 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.Eureka.Application (
lookupByAppName
, lookupByAppNameAll
, lookupAllApplications
) where
import Control.Monad (mzero)
import Control.Monad.Logger (MonadLoggerIO)
import Data.Aeson (FromJSON (parseJSON),
Value (Object, Array), eitherDecode,
(.:))
import Data.Aeson.Types (parseEither)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text.Encoding (encodeUtf8)
import qualified Data.Vector as V
import Network.HTTP.Client (responseBody, httpLbs, Request(requestHeaders), responseStatus)
import Network.HTTP.Types.Status (notFound404)
import Network.Eureka.Types (InstanceInfo(..), EurekaConnection(..), InstanceStatus(..))
import Network.Eureka.Util (parseUrlWithAdded)
import Network.Eureka.Request (makeRequest)
-- | Look up instance information for the given App Name.
-- NOTE: Only returns the instances which are up.
lookupByAppName :: (MonadLoggerIO io)
=> EurekaConnection
-> String
-> io [InstanceInfo]
lookupByAppName c n = filter isUp <$> lookupByAppNameAll c n
where
isUp = (==) Up . instanceInfoStatus
-- | Like @lookupByAppName@, but returns all instances, even DOWN and OUT_OF_SERVICE.
lookupByAppNameAll :: (MonadLoggerIO io)
=> EurekaConnection
-> String
-> io [InstanceInfo]
lookupByAppNameAll eConn@EurekaConnection { eConnManager } appName = do
result <- makeRequest eConn getByAppName
either error (return . applicationInstanceInfos) result
where
getByAppName url = do
resp <- httpLbs (request url) eConnManager
if responseStatus resp == notFound404
then
return (Right $ Application "" [])
else
let response = (eitherDecode . responseBody) resp
in return $ parseEither (.: "application") =<< response
request url = requestJSON $ parseUrlWithAdded url $ "apps/" ++ appName
{- |
Returns all instances of all applications that eureka knows about,
arranged by application name.
-}
lookupAllApplications :: (MonadLoggerIO io)
=> EurekaConnection
-> io (Map String [InstanceInfo])
lookupAllApplications eConn@EurekaConnection {eConnManager} = do
result <- makeRequest eConn getAllApps
either error (return . toAppMap) result
where
getAllApps :: String -> IO (Either String Applications)
getAllApps url =
eitherDecode . responseBody <$> httpLbs request eConnManager
where
request = requestJSON (parseUrlWithAdded url "apps")
toAppMap :: Applications -> Map String [InstanceInfo]
toAppMap = Map.fromList . fmap appToTuple . applications
appToTuple :: Application -> (String, [InstanceInfo])
appToTuple (Application name infos) = (name, infos)
{- |
Response type from Eureka "apps/" API.
-}
newtype Applications = Applications {applications :: [Application]}
instance FromJSON Applications where
parseJSON (Object o) = do
-- The design of the structured data coming out of Eureka is
-- perplexing, to say the least.
Object o2 <- o.: "applications"
Array ary <- o2 .: "application"
Applications <$> mapM parseJSON (V.toList ary)
parseJSON v =
fail (
"Failed to parse list of all instances registered with \
\Eureka. Bad value was " ++ show v ++ " when it should \
\have been an object."
)
-- | Response type from Eureka "apps/APP_NAME" API.
data Application = Application {
_applicationName :: String,
applicationInstanceInfos :: [InstanceInfo]
} deriving Show
instance FromJSON Application where
parseJSON (Object v) = do
name <- v .: "name"
instanceOneOrMany <- v .: "instance"
instanceData <- case instanceOneOrMany of
(Array ary) -> mapM parseJSON (V.toList ary)
o@(Object _) -> do
instanceInfo <- parseJSON o
return [instanceInfo]
other -> fail $ "instance data was of a strange format: " ++ show other
return $ Application name instanceData
parseJSON _ = mzero
requestJSON :: Request -> Request
requestJSON r = r {
requestHeaders = ("Accept", encodeUtf8 "application/json") : requestHeaders r
}
| SumAll/haskell-eureka-client | src/Network/Eureka/Application.hs | apache-2.0 | 4,451 | 0 | 16 | 1,150 | 1,023 | 553 | 470 | 84 | 2 |
module Main where
import TAEval
import TAType
import TACheck
import TAParser
import TAPretty
import CalcSyntax
import Data.Maybe
import Control.Monad.Trans
import System.Console.Haskeline
eval' :: Expr -> Expr
eval' = fromJust . eval
process :: String -> IO ()
process line = do
let res = parseExpr line
case res of
Left err -> print err
Right ex -> do
let chk = check ex
case chk of
Left err -> print err
Right ty -> putStrLn $ (ppexpr $ eval' ex) ++ " : " ++ (pptype ty)
main :: IO ()
main = runInputT defaultSettings loop
where
loop = do
minput <- getInputLine "TArith> "
case minput of
Nothing -> outputStrLn "Goodbye."
Just input -> liftIO (process input) >> loop
| toonn/wyah | src/TA.hs | bsd-2-clause | 749 | 0 | 20 | 200 | 262 | 130 | 132 | 29 | 3 |
-- | Tests for substring functions (@take@, @split@, @isInfixOf@, etc.)
{-# OPTIONS_GHC -fno-enable-rewrite-rules -fno-warn-missing-signatures #-}
module Tests.Properties.Substrings
( testSubstrings
) where
import Data.Char (isSpace)
import Test.QuickCheck
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.QuickCheck (testProperty)
import Test.QuickCheck.Unicode (char)
import Tests.QuickCheckUtils
import Text.Show.Functions ()
import qualified Data.List as L
import qualified Data.Text as T
import qualified Data.Text.Internal.Fusion as S
import qualified Data.Text.Internal.Fusion.Common as S
import qualified Data.Text.Internal.Lazy.Fusion as SL
import qualified Data.Text.Lazy as TL
import qualified Tests.SlowFunctions as Slow
s_take n = L.take n `eqP` (unpackS . S.take n)
s_take_s m = L.take n `eqP` (unpackS . S.unstream . S.take n)
where n = small m
sf_take p n = (L.take n . L.filter p) `eqP`
(unpackS . S.take n . S.filter p)
t_take n = L.take n `eqP` (unpackS . T.take n)
t_takeEnd n = (L.reverse . L.take n . L.reverse) `eqP`
(unpackS . T.takeEnd n)
tl_take n = L.take n `eqP` (unpackS . TL.take (fromIntegral n))
tl_takeEnd n = (L.reverse . L.take (fromIntegral n) . L.reverse) `eqP`
(unpackS . TL.takeEnd n)
s_drop n = L.drop n `eqP` (unpackS . S.drop n)
s_drop_s m = L.drop n `eqP` (unpackS . S.unstream . S.drop n)
where n = small m
sf_drop p n = (L.drop n . L.filter p) `eqP`
(unpackS . S.drop n . S.filter p)
t_drop n = L.drop n `eqP` (unpackS . T.drop n)
t_dropEnd n = (L.reverse . L.drop n . L.reverse) `eqP`
(unpackS . T.dropEnd n)
tl_drop n = L.drop n `eqP` (unpackS . TL.drop (fromIntegral n))
tl_dropEnd n = (L.reverse . L.drop n . L.reverse) `eqP`
(unpackS . TL.dropEnd (fromIntegral n))
s_take_drop m = (L.take n . L.drop n) `eqP` (unpackS . S.take n . S.drop n)
where n = small m
s_take_drop_s m = (L.take n . L.drop n) `eqP`
(unpackS . S.unstream . S.take n . S.drop n)
where n = small m
s_takeWhile p = L.takeWhile p `eqP` (unpackS . S.takeWhile p)
s_takeWhile_s p = L.takeWhile p `eqP` (unpackS . S.unstream . S.takeWhile p)
sf_takeWhile q p = (L.takeWhile p . L.filter q) `eqP`
(unpackS . S.takeWhile p . S.filter q)
noMatch = do
c <- char
d <- suchThat char (/= c)
return (c,d)
t_takeWhile p = L.takeWhile p `eqP` (unpackS . T.takeWhile p)
tl_takeWhile p = L.takeWhile p `eqP` (unpackS . TL.takeWhile p)
t_takeWhileEnd p = (L.reverse . L.takeWhile p . L.reverse) `eqP`
(unpackS . T.takeWhileEnd p)
t_takeWhileEnd_null t = forAll noMatch $ \(c,d) -> T.null $
T.takeWhileEnd (==d) (T.snoc t c)
tl_takeWhileEnd p = (L.reverse . L.takeWhile p . L.reverse) `eqP`
(unpackS . TL.takeWhileEnd p)
tl_takeWhileEnd_null t = forAll noMatch $ \(c,d) -> TL.null $
TL.takeWhileEnd (==d) (TL.snoc t c)
s_dropWhile p = L.dropWhile p `eqP` (unpackS . S.dropWhile p)
s_dropWhile_s p = L.dropWhile p `eqP` (unpackS . S.unstream . S.dropWhile p)
sf_dropWhile q p = (L.dropWhile p . L.filter q) `eqP`
(unpackS . S.dropWhile p . S.filter q)
t_dropWhile p = L.dropWhile p `eqP` (unpackS . T.dropWhile p)
tl_dropWhile p = L.dropWhile p `eqP` (unpackS . S.dropWhile p)
t_dropWhileEnd p = (L.reverse . L.dropWhile p . L.reverse) `eqP`
(unpackS . T.dropWhileEnd p)
tl_dropWhileEnd p = (L.reverse . L.dropWhile p . L.reverse) `eqP`
(unpackS . TL.dropWhileEnd p)
t_dropAround p = (L.dropWhile p . L.reverse . L.dropWhile p . L.reverse)
`eqP` (unpackS . T.dropAround p)
tl_dropAround p = (L.dropWhile p . L.reverse . L.dropWhile p . L.reverse)
`eqP` (unpackS . TL.dropAround p)
t_stripStart = T.dropWhile isSpace `eq` T.stripStart
tl_stripStart = TL.dropWhile isSpace `eq` TL.stripStart
t_stripEnd = T.dropWhileEnd isSpace `eq` T.stripEnd
tl_stripEnd = TL.dropWhileEnd isSpace `eq` TL.stripEnd
t_strip = T.dropAround isSpace `eq` T.strip
tl_strip = TL.dropAround isSpace `eq` TL.strip
t_splitAt n = L.splitAt n `eqP` (unpack2 . T.splitAt n)
tl_splitAt n = L.splitAt n `eqP` (unpack2 . TL.splitAt (fromIntegral n))
t_span p = L.span p `eqP` (unpack2 . T.span p)
tl_span p = L.span p `eqP` (unpack2 . TL.span p)
t_breakOn_id s = squid `eq` (uncurry T.append . T.breakOn s)
where squid t | T.null s = error "empty"
| otherwise = t
tl_breakOn_id s = squid `eq` (uncurry TL.append . TL.breakOn s)
where squid t | TL.null s = error "empty"
| otherwise = t
t_breakOn_start (NotEmpty s) t =
let (k,m) = T.breakOn s t
in k `T.isPrefixOf` t && (T.null m || s `T.isPrefixOf` m)
tl_breakOn_start (NotEmpty s) t =
let (k,m) = TL.breakOn s t
in k `TL.isPrefixOf` t && TL.null m || s `TL.isPrefixOf` m
t_breakOnEnd_end (NotEmpty s) t =
let (m,k) = T.breakOnEnd s t
in k `T.isSuffixOf` t && (T.null m || s `T.isSuffixOf` m)
tl_breakOnEnd_end (NotEmpty s) t =
let (m,k) = TL.breakOnEnd s t
in k `TL.isSuffixOf` t && (TL.null m || s `TL.isSuffixOf` m)
t_break p = L.break p `eqP` (unpack2 . T.break p)
tl_break p = L.break p `eqP` (unpack2 . TL.break p)
t_group = L.group `eqP` (map unpackS . T.group)
tl_group = L.group `eqP` (map unpackS . TL.group)
t_groupBy p = L.groupBy p `eqP` (map unpackS . T.groupBy p)
tl_groupBy p = L.groupBy p `eqP` (map unpackS . TL.groupBy p)
t_inits = L.inits `eqP` (map unpackS . T.inits)
tl_inits = L.inits `eqP` (map unpackS . TL.inits)
t_tails = L.tails `eqP` (map unpackS . T.tails)
tl_tails = unsquare $
L.tails `eqP` (map unpackS . TL.tails)
t_findAppendId = unsquare $ \(NotEmpty s) ts ->
let t = T.intercalate s ts
in all (==t) $ map (uncurry T.append) (T.breakOnAll s t)
tl_findAppendId = unsquare $ \(NotEmpty s) ts ->
let t = TL.intercalate s ts
in all (==t) $ map (uncurry TL.append) (TL.breakOnAll s t)
t_findContains = unsquare $ \(NotEmpty s) ->
all (T.isPrefixOf s . snd) . T.breakOnAll s . T.intercalate s
tl_findContains = unsquare $ \(NotEmpty s) -> all (TL.isPrefixOf s . snd) .
TL.breakOnAll s . TL.intercalate s
sl_filterCount c = (L.genericLength . L.filter (==c)) `eqP` SL.countChar c
t_findCount s = (L.length . T.breakOnAll s) `eq` T.count s
tl_findCount s = (L.genericLength . TL.breakOnAll s) `eq` TL.count s
t_splitOn_split s = unsquare $
(T.splitOn s `eq` Slow.splitOn s) . T.intercalate s
tl_splitOn_split s = unsquare $
((TL.splitOn (TL.fromStrict s) . TL.fromStrict) `eq`
(map TL.fromStrict . T.splitOn s)) . T.intercalate s
t_splitOn_i (NotEmpty t) = id `eq` (T.intercalate t . T.splitOn t)
tl_splitOn_i (NotEmpty t) = id `eq` (TL.intercalate t . TL.splitOn t)
t_split p = split p `eqP` (map unpackS . T.split p)
t_split_count c = (L.length . T.split (==c)) `eq`
((1+) . T.count (T.singleton c))
t_split_splitOn c = T.split (==c) `eq` T.splitOn (T.singleton c)
tl_split p = split p `eqP` (map unpackS . TL.split p)
split :: (a -> Bool) -> [a] -> [[a]]
split _ [] = [[]]
split p xs = loop xs
where loop s | null s' = [l]
| otherwise = l : loop (tail s')
where (l, s') = break p s
t_chunksOf_same_lengths k = all ((==k) . T.length) . ini . T.chunksOf k
where ini [] = []
ini xs = init xs
t_chunksOf_length k t = len == T.length t || (k <= 0 && len == 0)
where len = L.sum . L.map T.length $ T.chunksOf k t
tl_chunksOf k = T.chunksOf k `eq` (map (T.concat . TL.toChunks) .
TL.chunksOf (fromIntegral k) . TL.fromStrict)
t_lines = L.lines `eqP` (map unpackS . T.lines)
tl_lines = L.lines `eqP` (map unpackS . TL.lines)
{-
t_lines' = lines' `eqP` (map unpackS . T.lines')
where lines' "" = []
lines' s = let (l, s') = break eol s
in l : case s' of
[] -> []
('\r':'\n':s'') -> lines' s''
(_:s'') -> lines' s''
eol c = c == '\r' || c == '\n'
-}
t_words = L.words `eqP` (map unpackS . T.words)
tl_words = L.words `eqP` (map unpackS . TL.words)
t_unlines = unsquare $
L.unlines `eq` (unpackS . T.unlines . map packS)
tl_unlines = unsquare $
L.unlines `eq` (unpackS . TL.unlines . map packS)
t_unwords = unsquare $
L.unwords `eq` (unpackS . T.unwords . map packS)
tl_unwords = unsquare $
L.unwords `eq` (unpackS . TL.unwords . map packS)
s_isPrefixOf s = L.isPrefixOf s `eqP`
(S.isPrefixOf (S.stream $ packS s) . S.stream)
sf_isPrefixOf p s = (L.isPrefixOf s . L.filter p) `eqP`
(S.isPrefixOf (S.stream $ packS s) . S.filter p . S.stream)
t_isPrefixOf s = L.isPrefixOf s`eqP` T.isPrefixOf (packS s)
tl_isPrefixOf s = L.isPrefixOf s`eqP` TL.isPrefixOf (packS s)
t_isSuffixOf s = L.isSuffixOf s`eqP` T.isSuffixOf (packS s)
tl_isSuffixOf s = L.isSuffixOf s`eqP` TL.isSuffixOf (packS s)
t_isInfixOf s = L.isInfixOf s `eqP` T.isInfixOf (packS s)
tl_isInfixOf s = L.isInfixOf s `eqP` TL.isInfixOf (packS s)
t_stripPrefix s = (fmap packS . L.stripPrefix s) `eqP` T.stripPrefix (packS s)
tl_stripPrefix s = (fmap packS . L.stripPrefix s) `eqP` TL.stripPrefix (packS s)
stripSuffix p t = reverse `fmap` L.stripPrefix (reverse p) (reverse t)
t_stripSuffix s = (fmap packS . stripSuffix s) `eqP` T.stripSuffix (packS s)
tl_stripSuffix s = (fmap packS . stripSuffix s) `eqP` TL.stripSuffix (packS s)
commonPrefixes a0@(_:_) b0@(_:_) = Just (go a0 b0 [])
where go (a:as) (b:bs) ps
| a == b = go as bs (a:ps)
go as bs ps = (reverse ps,as,bs)
commonPrefixes _ _ = Nothing
t_commonPrefixes a b (NonEmpty p)
= commonPrefixes pa pb ==
repack `fmap` T.commonPrefixes (packS pa) (packS pb)
where repack (x,y,z) = (unpackS x,unpackS y,unpackS z)
pa = p ++ a
pb = p ++ b
tl_commonPrefixes a b (NonEmpty p)
= commonPrefixes pa pb ==
repack `fmap` TL.commonPrefixes (packS pa) (packS pb)
where repack (x,y,z) = (unpackS x,unpackS y,unpackS z)
pa = p ++ a
pb = p ++ b
testSubstrings :: TestTree
testSubstrings =
testGroup "substrings" [
testGroup "breaking" [
testProperty "s_take" s_take,
testProperty "s_take_s" s_take_s,
testProperty "sf_take" sf_take,
testProperty "t_take" t_take,
testProperty "t_takeEnd" t_takeEnd,
testProperty "tl_take" tl_take,
testProperty "tl_takeEnd" tl_takeEnd,
testProperty "s_drop" s_drop,
testProperty "s_drop_s" s_drop_s,
testProperty "sf_drop" sf_drop,
testProperty "t_drop" t_drop,
testProperty "t_dropEnd" t_dropEnd,
testProperty "tl_drop" tl_drop,
testProperty "tl_dropEnd" tl_dropEnd,
testProperty "s_take_drop" s_take_drop,
testProperty "s_take_drop_s" s_take_drop_s,
testProperty "s_takeWhile" s_takeWhile,
testProperty "s_takeWhile_s" s_takeWhile_s,
testProperty "sf_takeWhile" sf_takeWhile,
testProperty "t_takeWhile" t_takeWhile,
testProperty "tl_takeWhile" tl_takeWhile,
testProperty "t_takeWhileEnd" t_takeWhileEnd,
testProperty "t_takeWhileEnd_null" t_takeWhileEnd_null,
testProperty "tl_takeWhileEnd" tl_takeWhileEnd,
testProperty "tl_takeWhileEnd_null" tl_takeWhileEnd_null,
testProperty "sf_dropWhile" sf_dropWhile,
testProperty "s_dropWhile" s_dropWhile,
testProperty "s_dropWhile_s" s_dropWhile_s,
testProperty "t_dropWhile" t_dropWhile,
testProperty "tl_dropWhile" tl_dropWhile,
testProperty "t_dropWhileEnd" t_dropWhileEnd,
testProperty "tl_dropWhileEnd" tl_dropWhileEnd,
testProperty "t_dropAround" t_dropAround,
testProperty "tl_dropAround" tl_dropAround,
testProperty "t_stripStart" t_stripStart,
testProperty "tl_stripStart" tl_stripStart,
testProperty "t_stripEnd" t_stripEnd,
testProperty "tl_stripEnd" tl_stripEnd,
testProperty "t_strip" t_strip,
testProperty "tl_strip" tl_strip,
testProperty "t_splitAt" t_splitAt,
testProperty "tl_splitAt" tl_splitAt,
testProperty "t_span" t_span,
testProperty "tl_span" tl_span,
testProperty "t_breakOn_id" t_breakOn_id,
testProperty "tl_breakOn_id" tl_breakOn_id,
testProperty "t_breakOn_start" t_breakOn_start,
testProperty "tl_breakOn_start" tl_breakOn_start,
testProperty "t_breakOnEnd_end" t_breakOnEnd_end,
testProperty "tl_breakOnEnd_end" tl_breakOnEnd_end,
testProperty "t_break" t_break,
testProperty "tl_break" tl_break,
testProperty "t_group" t_group,
testProperty "tl_group" tl_group,
testProperty "t_groupBy" t_groupBy,
testProperty "tl_groupBy" tl_groupBy,
testProperty "t_inits" t_inits,
testProperty "tl_inits" tl_inits,
testProperty "t_tails" t_tails,
testProperty "tl_tails" tl_tails
],
testGroup "breaking many" [
testProperty "t_findAppendId" t_findAppendId,
testProperty "tl_findAppendId" tl_findAppendId,
testProperty "t_findContains" t_findContains,
testProperty "tl_findContains" tl_findContains,
testProperty "sl_filterCount" sl_filterCount,
testProperty "t_findCount" t_findCount,
testProperty "tl_findCount" tl_findCount,
testProperty "t_splitOn_split" t_splitOn_split,
testProperty "tl_splitOn_split" tl_splitOn_split,
testProperty "t_splitOn_i" t_splitOn_i,
testProperty "tl_splitOn_i" tl_splitOn_i,
testProperty "t_split" t_split,
testProperty "t_split_count" t_split_count,
testProperty "t_split_splitOn" t_split_splitOn,
testProperty "tl_split" tl_split,
testProperty "t_chunksOf_same_lengths" t_chunksOf_same_lengths,
testProperty "t_chunksOf_length" t_chunksOf_length,
testProperty "tl_chunksOf" tl_chunksOf
],
testGroup "lines and words" [
testProperty "t_lines" t_lines,
testProperty "tl_lines" tl_lines,
--testProperty "t_lines'" t_lines',
testProperty "t_words" t_words,
testProperty "tl_words" tl_words,
testProperty "t_unlines" t_unlines,
testProperty "tl_unlines" tl_unlines,
testProperty "t_unwords" t_unwords,
testProperty "tl_unwords" tl_unwords
],
testGroup "predicates" [
testProperty "s_isPrefixOf" s_isPrefixOf,
testProperty "sf_isPrefixOf" sf_isPrefixOf,
testProperty "t_isPrefixOf" t_isPrefixOf,
testProperty "tl_isPrefixOf" tl_isPrefixOf,
testProperty "t_isSuffixOf" t_isSuffixOf,
testProperty "tl_isSuffixOf" tl_isSuffixOf,
testProperty "t_isInfixOf" t_isInfixOf,
testProperty "tl_isInfixOf" tl_isInfixOf,
testGroup "view" [
testProperty "t_stripPrefix" t_stripPrefix,
testProperty "tl_stripPrefix" tl_stripPrefix,
testProperty "t_stripSuffix" t_stripSuffix,
testProperty "tl_stripSuffix" tl_stripSuffix,
testProperty "t_commonPrefixes" t_commonPrefixes,
testProperty "tl_commonPrefixes" tl_commonPrefixes
]
]
]
| bos/text | tests/Tests/Properties/Substrings.hs | bsd-2-clause | 15,864 | 0 | 14 | 4,173 | 5,540 | 2,869 | 2,671 | 305 | 2 |
{-# LANGUAGE PackageImports #-}
import "mini-scilab-site" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "dist/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| marcotmarcot/mini-scilab-site | devel.hs | bsd-2-clause | 710 | 0 | 10 | 123 | 186 | 101 | 85 | 23 | 2 |
{-# LANGUAGE CPP, OverloadedStrings #-}
module Demo.JS ( readInputState
, writeInputState
, mkRandomInput
, sDrawButton
, printHighError
, sCellsDiv
, sNumCells
, printLowError
, cullErrors
, mark
, sRandomButton
, sLowerControls
, drawList
, placeValues
, displayOutput
, scaleMax
, sCellGen
, getGenInfo
, mkCanvas ) where
import Control.Monad
import Control.Applicative
import Text.Read (readMaybe)
import JavaScript.JQuery
import JavaScript.Canvas hiding (Left, Right)
import GHCJS.Types
import GHCJS.Foreign
import Data.Text (pack, unpack, Text)
import qualified Data.Map as M (empty, insert)
import Data.Maybe (fromJust)
import Demo.Types
import Demo.Links
import Demo.Random
-- Easily Configurable!
canvasXPadding = 1 :: Double
canvasYPadding = 1 :: Double
scaleMax = 100 :: Double
minCanHeight = 160
sLowerControls = select "#c"
sNumCells = select "#numcells"
sStartHead = select "#starthead"
sCellGen = select "#generatenew"
sRandomButton = select "#randomButton"
sSizeDiv = select "#size"
sStartDiv = select "#start"
sCellsDiv = select "#boxbox"
sDrawButton = select "#drawButton"
sHeadInput = select "#head"
sCanvasBox = select "#drawingbox"
sCanvas = select "#theCanvas" -- dont forget to make it!
sCellNum i = select (pack (template (cellMkName i)))
where template n = "<div class=\"outer\"><div class=\"inner\">"
++ (show i)
++ "</div><input id=\""
++ n
++ "\" type=\"text\" name=\"a\" /></div>"
mark :: (Bool, [Int]) -> IO ()
mark (b,is) = markHead b >> unMarkCells >> markCells is
markHead :: Bool -> IO ()
markHead b =
if b
then sHeadInput
>>= setAttr "style" "border-color: red;"
>> return ()
else sHeadInput
>>= setAttr "style" "border-color: black;"
>> return ()
unMarkCells = do start <- pullVal sStartDiv
size <- pullVal sSizeDiv
let f a = select (pack ("#hey" ++ (show a)))
>>= setAttr "style" "border-color: black;"
>> return ()
r i s = if i < s
then f i >> r (i + 1) s
else return ()
r start (start + size)
markCells is = do let r :: [Int] -> IO ()
r (i:is) = f i >> r is
r [] = return ()
f a = select (pack ("#hey" ++ (show a)))
>>= setAttr "style" "border-color: red;"
>> return ()
r is
getGenInfo :: IO (Either String (Int, Int))
getGenInfo =
do start <- fmap unpack (sStartHead >>= getVal)
size <- fmap unpack (sNumCells >>= getVal)
case (readMaybe start, readMaybe size) of
(Nothing,_) -> return (Left "\"Starting Index\" requires Integer")
(Just _, Nothing) ->
return (Left "\"Number of Memory Cells\" requires Integer")
(Just i, Just s) -> return (Right (i,s))
placeValues :: Int -> Int -> IO ()
placeValues start size =
let f = (pack . show)
s val jq = jq >>= setVal (f val)
in s start sStartHead >> s size sNumCells >> return ()
{- There are two of these because when you are creating elements,
you must omit the "#" that you use to later select them.
I was using the "#" in the name to both create AND select them
before, and was getting a "TypeError" in the firefox console
as a result. Almost drove me crazy :/ -}
cellName :: Int -> String
cellName i = "#hey" ++ (show i)
cellMkName :: Int -> String
cellMkName i = "hey" ++ (show i)
printList :: Either String [LElem] -> IO ()
printList = print . show
showVal sel = fmap (print . unpack) (sel >>= getVal)
pullVal :: IO JQuery -> IO Int
pullVal sel = do s <- fmap unpack (sel >>= getVal)
print $ "this pullval: " ++ s
return (read s)
readInputState :: IO InputState
readInputState = do start <- pullVal sStartDiv
size <- pullVal sSizeDiv
print "readinputstate"
h <- getHead
m <- getMemSt start size
return (InSt start size h m)
mkRandomInput :: IO InputState
mkRandomInput = do showVal sStartDiv
showVal sSizeDiv
start <- pullVal sStartDiv
size <- pullVal sSizeDiv
print (pack ("mkrandom " ++ (show start) ++ ":" ++ (show size)))
ri <- randomInput start size
writeInputState ri
return ri
getHead :: IO String
getHead = fmap unpack (getVal =<< sHeadInput)
getMemSt :: Int -> Int -> IO MemSt
getMemSt start size = fmap mkMemSt (r start)
where r i = if i < (start + size)
then do c <- readCell i
fmap (c:) (r (i+1)) --liftM (:) (readCell i) (r (i+1))
else return []
writeInputState :: InputState -> IO ()
writeInputState (InSt i s h m) = mkBoxes i s m >> setHead h
setHead :: String -> IO ()
setHead h = sHeadInput >>= setVal (pack h) >> return ()
readMemSt :: [Cell] -> MemSt
readMemSt = foldr (\(i,s) -> M.insert i (i,s)) M.empty
readCell :: Int -> IO Cell
readCell i = let name = pack (cellName i)
in fmap (((,) i) . unpack) (print (cellName i) >> (print "ah" >> select name >>= getVal))
writeCell :: Int -> String -> IO ()
writeCell i s = select (pack (cellName i)) >>= setVal (pack s) >> return ()
mkBoxes :: Int -> Int -> MemSt -> IO ()
mkBoxes start size m = clear >> note start size >> r start 0
where note :: Int -> Int -> IO ()
note i s = let f x = (setVal ((pack . show) x))
in sSizeDiv >>= f s >> sStartDiv >>= f i >> return ()
r :: Int -> Int -> IO ()
r s i = if i < size
then do print $ "making box number " ++ (show (s + i))
box <- sCellNum (s + i)
parent <- sCellsDiv
appendJQuery box parent
writeCell (s + i) (stringAtIndex (s + i) m)
r s (i + 1)
else return ()
clear :: IO ()
clear = sCellsDiv >>= children >>= remove >> return ()
getCanvasDimensions :: IO (Int,Int)
getCanvasDimensions = do
sh <- getHeight =<< select "#s"
ah <- getHeight =<< select "#a"
bh <- getHeight =<< select "#b"
ch <- getHeight =<< select "#c"
dw <- getWidth =<< select "#drawingbox"
let h = max minCanHeight (floor (sh - ah - bh - ch - 170))
w = floor $ dw - 13 -- not sure why i need this...
return (w,h)
mkCanvas :: IO ()
mkCanvas = do
(w,h) <- getCanvasDimensions
p <- sCanvasBox
children p >>= remove
c <- select $ pack $ "<canvas id=\"theCanvas\" width=\""
++ show w
++ "\" height=\""
++ show h
++ "\"></canvas>"
appendJQuery c p
return ()
displayOutput :: Either String Layout -> IO ()
displayOutput l = cullErrors >> case l of
Left er -> printLowError er
Right ls -> drawList ls
withPadding :: (Double, Double) -> (Double, Double)
withPadding (x,y) = (x - (2 * canvasXPadding), y - (2 * canvasYPadding))
addOffsets :: Double -> (Double, Double) -> Layout -> LayoutD
addOffsets scale (cx,cy) ls = foldr f [] ls
where f (e, (x, y), os) = let sx = scale * (fromIntegral (fst (getRect ls)))
sy = scale * (fromIntegral (snd (getRect ls)))
fx = ((cx - sx) / 2) + canvasXPadding
fy = ((cy - sy) / 2) + canvasYPadding
dx = scale * (fromIntegral x)
dy = scale * (fromIntegral y)
in (:) (e, (dx + fx, dy + fy), nmap ((* scale) . fromIntegral) os)
type Coord = (Double, Double)
drawList :: Layout -> IO ()
drawList ls = do cints <- getCanvasDimensions
let csize = nmap fromIntegral cints
cdims = withPadding csize
scale = min scaleMax (findScale cdims (getRect ls))
(h,w) = csize
c <- sCanvas >>= indexArray 0 . castRef >>= getContext
save c
clearRect 0 0 h w c
restore c
let dls = addOffsets scale csize ls
r (l:ls) = (drawElem c scale) l >> r ls
r _ = return ()
r dls
drawElem :: Context -> Double -> (DElem, (Double, Double), (Double, Double)) -> IO ()
drawElem c scale elem =
let ((t,i,v), (x, y), (xo, yo)) = elem
in case t of
Box -> do save c
print ("The scale is: " ++ (show scale))
-- the following magic numbers were experimentally chosen...
lineWidth (scale * 5 / 64) c
strokeRect x (y + (yo / 3)) xo (yo * 2 / 3) c
drawTextFloor ( (x + (xo / 2))
, (y + (yo / 3) - (yo / 9)))
(xo / 2)
(yo / 7)
i c
drawTextCenter ( (x + (xo / 2)
, (y + (yo * 8 / 12))))
(xo * 4 / 5)
(yo * 7 / 18)
v c
restore c
Arrow -> do save c
let endX = (x + (xo * 10 / 12))
endY = (y + (yo * 12 / 18))
lineWidth (scale * 8 / 64) c
beginPath c
moveTo (x + (xo * 2 / 12)) endY c
lineTo (endX - (xo * 2 / 12)) endY c
stroke c
lineWidth (scale * 2 / 64) c
beginPath c
moveTo (x + (xo * 8 / 12)) (y + (yo * 13.5 / 18)) c
lineTo (endX + (xo * 0.5 / 12)) endY c
lineTo (x + (xo * 8 / 12)) (y + (yo * 10.5 / 18)) c
closePath c
stroke c
fill c
drawTextFloor ( (x + (xo / 2))
, (y + (yo * 9.5 / 18)))
(xo / 2)
(yo / 7)
i c
restore c
LoopBack z -> do let zd = (fromIntegral z) :: Double
save c
lineWidth (scale * 8 / 64) c
beginPath c
let yu = (yo / 10)
horiz = (y + 4 * yu)
moveTo (x + (xo * 2 / 12)) horiz c
lineTo (x + (xo / 2)) horiz c
lineTo (x + (xo / 2)) (horiz + 5 * yu) c
-- Here, we move back one width and then jump
-- two widths at a time (arrow + box)
let tarX = ((x + (xo / 2)) - xo - (zd * 2 * xo))
lineTo tarX (horiz + 5 * yu) c
lineTo tarX (horiz + 4 * yu) c
stroke c
lineWidth (scale * 2 / 64) c
beginPath c
moveTo (tarX - (xo * 1.5 / 12)) (horiz + 4 * yu) c
lineTo tarX (horiz + (3 * yu)) c
lineTo (tarX + (xo * 1.5 / 12)) (horiz + 4 * yu) c
closePath c
stroke c
fill c
drawTextFloor ( (x + (xo / 2))
, (y + (yu * 10 * (3 / 5) * 9.5 / 18)))
(xo / 2)
(yu * 8 / (7 * (4/3)))
i c
restore c
cullErrors = select "#lowError" >>= remove
>> select "#highError" >>= remove
>> return ()
printHighError = printError "highError" "#b"
printLowError = printError "lowError" "#c"
printError a b e =
do err <- select (pack
("<p class=\"errors\" id=\"" ++ a ++ "\">Error: (" ++ e ++ ")</p>"))
par <- select (pack b)
appendJQuery err par
return ()
drawTextCenter :: Coord -- location at which to center the text
-> Double -- maximum width of the text
-> Double -- maximum height of the text
-> String -- the text to be drawn
-> Context -- the canvas context
-> IO ()
drawTextCenter (x,y) maxW maxH s c =
do (a,b) <- setFont maxH maxW s c
fillText (pack s) (x - (a / 2)) (y + (b / 2)) c
-- same as drawTextCenter, but floors the text at the coordinates
drawTextFloor :: Coord -> Double -> Double -> String -> Context -> IO ()
drawTextFloor (x,y) maxW maxH s c =
do (a,_) <- setFont maxH maxW s c
fillText (pack s) (x - (a / 2)) y c
setFont :: Double -> Double -> String -> Context -> IO (Double, Double)
setFont maxHeight maxWidth s c = try maxWidth maxHeight s c
fontPrecision = 6 -- size of steps taken when choosing a font
panicSize = 1 -- size to choose if algorithm bottoms out
try d f s c = do font (pack ((show ((floor f)::Int)) ++ "pt Calibri")) c
x <- measureText (pack s) c
if x > d
then if x > 0
then try d (f - fontPrecision) s c
else print ("hit bottom..")
>> return (panicSize,f)
else print (show (floor f)) >> return (x,f)
| RoboNickBot/linked-list-web-demo | src/Demo/JS.hs | bsd-2-clause | 14,296 | 0 | 21 | 6,215 | 4,791 | 2,416 | 2,375 | 314 | 3 |
{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-}
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.InterfaceFile
-- Copyright : (c) David Waern 2006-2009,
-- Mateusz Kowalczyk 2013
-- License : BSD-like
--
-- Maintainer : haddock@projects.haskell.org
-- Stability : experimental
-- Portability : portable
--
-- Reading and writing the .haddock interface file
-----------------------------------------------------------------------------
module Haddock.InterfaceFile (
InterfaceFile(..), ifUnitId, ifModule,
readInterfaceFile, nameCacheFromGhc, freshNameCache, NameCacheAccessor,
writeInterfaceFile, binaryInterfaceVersion, binaryInterfaceVersionCompatibility
) where
import Haddock.Types
import Control.Monad
import Control.Monad.IO.Class ( MonadIO(..) )
import Data.Array
import Data.IORef
import Data.List (mapAccumR)
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Word
import GHC.Iface.Binary (getSymtabName, getDictFastString)
import GHC.Utils.Binary
import GHC.Data.FastMutInt
import GHC.Data.FastString
import GHC hiding (NoLink)
import GHC.Driver.Monad (withSession)
import GHC.Driver.Env
import GHC.Types.Name.Cache
import GHC.Iface.Env
import GHC.Types.Name
import GHC.Types.Unique.FM
import GHC.Types.Unique.Supply
import GHC.Types.Unique
data InterfaceFile = InterfaceFile {
ifLinkEnv :: LinkEnv,
ifInstalledIfaces :: [InstalledInterface]
}
ifModule :: InterfaceFile -> Module
ifModule if_ =
case ifInstalledIfaces if_ of
[] -> error "empty InterfaceFile"
iface:_ -> instMod iface
ifUnitId :: InterfaceFile -> Unit
ifUnitId if_ =
case ifInstalledIfaces if_ of
[] -> error "empty InterfaceFile"
iface:_ -> moduleUnit $ instMod iface
binaryInterfaceMagic :: Word32
binaryInterfaceMagic = 0xD0Cface
-- Note [The DocModule story]
--
-- Breaking changes to the DocH type result in Haddock being unable to read
-- existing interfaces. This is especially painful for interfaces shipped
-- with GHC distributions since there is no easy way to regenerate them!
--
-- PR #1315 introduced a breaking change to the DocModule constructor. To
-- maintain backward compatibility we
--
-- Parse the old DocModule constructor format (tag 5) and parse the contained
-- string into a proper ModLink structure. When writing interfaces we exclusively
-- use the new DocModule format (tag 24)
-- IMPORTANT: Since datatypes in the GHC API might change between major
-- versions, and because we store GHC datatypes in our interface files, we need
-- to make sure we version our interface files accordingly.
--
-- If you change the interface file format or adapt Haddock to work with a new
-- major version of GHC (so that the format changes indirectly) *you* need to
-- follow these steps:
--
-- (1) increase `binaryInterfaceVersion`
--
-- (2) set `binaryInterfaceVersionCompatibility` to [binaryInterfaceVersion]
--
binaryInterfaceVersion :: Word16
#if MIN_VERSION_ghc(9,2,0) && !MIN_VERSION_ghc(9,3,0)
binaryInterfaceVersion = 38
binaryInterfaceVersionCompatibility :: [Word16]
binaryInterfaceVersionCompatibility = [37, binaryInterfaceVersion]
#else
#error Unsupported GHC version
#endif
initBinMemSize :: Int
initBinMemSize = 1024*1024
writeInterfaceFile :: FilePath -> InterfaceFile -> IO ()
writeInterfaceFile filename iface = do
bh0 <- openBinMem initBinMemSize
put_ bh0 binaryInterfaceMagic
put_ bh0 binaryInterfaceVersion
-- remember where the dictionary pointer will go
dict_p_p <- tellBin bh0
put_ bh0 dict_p_p
-- remember where the symbol table pointer will go
symtab_p_p <- tellBin bh0
put_ bh0 symtab_p_p
-- Make some intial state
symtab_next <- newFastMutInt 0
symtab_map <- newIORef emptyUFM
let bin_symtab = BinSymbolTable {
bin_symtab_next = symtab_next,
bin_symtab_map = symtab_map }
dict_next_ref <- newFastMutInt 0
dict_map_ref <- newIORef emptyUFM
let bin_dict = BinDictionary {
bin_dict_next = dict_next_ref,
bin_dict_map = dict_map_ref }
-- put the main thing
let bh = setUserData bh0 $ newWriteState (putName bin_symtab)
(putName bin_symtab)
(putFastString bin_dict)
put_ bh iface
-- write the symtab pointer at the front of the file
symtab_p <- tellBin bh
putAt bh symtab_p_p symtab_p
seekBin bh symtab_p
-- write the symbol table itself
symtab_next' <- readFastMutInt symtab_next
symtab_map' <- readIORef symtab_map
putSymbolTable bh symtab_next' symtab_map'
-- write the dictionary pointer at the fornt of the file
dict_p <- tellBin bh
putAt bh dict_p_p dict_p
seekBin bh dict_p
-- write the dictionary itself
dict_next <- readFastMutInt dict_next_ref
dict_map <- readIORef dict_map_ref
putDictionary bh dict_next dict_map
-- and send the result to the file
writeBinMem bh filename
return ()
type NameCacheAccessor m = (m NameCache, NameCache -> m ())
nameCacheFromGhc :: forall m. GhcMonad m => NameCacheAccessor m
nameCacheFromGhc = ( read_from_session , write_to_session )
where
read_from_session = do
ref <- withSession (return . hsc_NC)
liftIO $ readIORef ref
write_to_session nc' = do
ref <- withSession (return . hsc_NC)
liftIO $ writeIORef ref nc'
freshNameCache :: NameCacheAccessor IO
freshNameCache = ( create_fresh_nc , \_ -> return () )
where
create_fresh_nc = do
u <- mkSplitUniqSupply 'a' -- ??
return (initNameCache u [])
-- | Read a Haddock (@.haddock@) interface file. Return either an
-- 'InterfaceFile' or an error message.
--
-- This function can be called in two ways. Within a GHC session it will
-- update the use and update the session's name cache. Outside a GHC session
-- a new empty name cache is used. The function is therefore generic in the
-- monad being used. The exact monad is whichever monad the first
-- argument, the getter and setter of the name cache, requires.
--
readInterfaceFile :: forall m.
MonadIO m
=> NameCacheAccessor m
-> FilePath
-> Bool -- ^ Disable version check. Can cause runtime crash.
-> m (Either String InterfaceFile)
readInterfaceFile (get_name_cache, set_name_cache) filename bypass_checks = do
bh0 <- liftIO $ readBinMem filename
magic <- liftIO $ get bh0
version <- liftIO $ get bh0
case () of
_ | magic /= binaryInterfaceMagic -> return . Left $
"Magic number mismatch: couldn't load interface file: " ++ filename
| not bypass_checks
, (version `notElem` binaryInterfaceVersionCompatibility) -> return . Left $
"Interface file is of wrong version: " ++ filename
| otherwise -> with_name_cache $ \update_nc -> do
dict <- get_dictionary bh0
-- read the symbol table so we are capable of reading the actual data
bh1 <- do
let bh1 = setUserData bh0 $ newReadState (error "getSymtabName")
(getDictFastString dict)
symtab <- update_nc (get_symbol_table bh1)
return $ setUserData bh1 $ newReadState (getSymtabName (NCU (\f -> update_nc (return . f))) dict symtab)
(getDictFastString dict)
-- load the actual data
iface <- liftIO $ get bh1
return (Right iface)
where
with_name_cache :: forall a.
((forall n b. MonadIO n
=> (NameCache -> n (NameCache, b))
-> n b)
-> m a)
-> m a
with_name_cache act = do
nc_var <- get_name_cache >>= (liftIO . newIORef)
x <- act $ \f -> do
nc <- liftIO $ readIORef nc_var
(nc', x) <- f nc
liftIO $ writeIORef nc_var nc'
return x
liftIO (readIORef nc_var) >>= set_name_cache
return x
get_dictionary bin_handle = liftIO $ do
dict_p <- get bin_handle
data_p <- tellBin bin_handle
seekBin bin_handle dict_p
dict <- getDictionary bin_handle
seekBin bin_handle data_p
return dict
get_symbol_table bh1 theNC = liftIO $ do
symtab_p <- get bh1
data_p' <- tellBin bh1
seekBin bh1 symtab_p
(nc', symtab) <- getSymbolTable bh1 theNC
seekBin bh1 data_p'
return (nc', symtab)
-------------------------------------------------------------------------------
-- * Symbol table
-------------------------------------------------------------------------------
putName :: BinSymbolTable -> BinHandle -> Name -> IO ()
putName BinSymbolTable{
bin_symtab_map = symtab_map_ref,
bin_symtab_next = symtab_next } bh name
= do
symtab_map <- readIORef symtab_map_ref
case lookupUFM symtab_map name of
Just (off,_) -> put_ bh (fromIntegral off :: Word32)
Nothing -> do
off <- readFastMutInt symtab_next
writeFastMutInt symtab_next (off+1)
writeIORef symtab_map_ref
$! addToUFM symtab_map name (off,name)
put_ bh (fromIntegral off :: Word32)
data BinSymbolTable = BinSymbolTable {
bin_symtab_next :: !FastMutInt, -- The next index to use
bin_symtab_map :: !(IORef (UniqFM Name (Int,Name)))
-- indexed by Name
}
putFastString :: BinDictionary -> BinHandle -> FastString -> IO ()
putFastString BinDictionary { bin_dict_next = j_r,
bin_dict_map = out_r} bh f
= do
out <- readIORef out_r
let !unique = getUnique f
case lookupUFM_Directly out unique of
Just (j, _) -> put_ bh (fromIntegral j :: Word32)
Nothing -> do
j <- readFastMutInt j_r
put_ bh (fromIntegral j :: Word32)
writeFastMutInt j_r (j + 1)
writeIORef out_r $! addToUFM_Directly out unique (j, f)
data BinDictionary = BinDictionary {
bin_dict_next :: !FastMutInt, -- The next index to use
bin_dict_map :: !(IORef (UniqFM FastString (Int,FastString)))
-- indexed by FastString
}
putSymbolTable :: BinHandle -> Int -> UniqFM Name (Int,Name) -> IO ()
putSymbolTable bh next_off symtab = do
put_ bh next_off
let names = elems (array (0,next_off-1) (eltsUFM symtab))
mapM_ (\n -> serialiseName bh n symtab) names
getSymbolTable :: BinHandle -> NameCache -> IO (NameCache, Array Int Name)
getSymbolTable bh namecache = do
sz <- get bh
od_names <- replicateM sz (get bh)
let arr = listArray (0,sz-1) names
(namecache', names) = mapAccumR (fromOnDiskName arr) namecache od_names
return (namecache', arr)
type OnDiskName = (Unit, ModuleName, OccName)
fromOnDiskName
:: Array Int Name
-> NameCache
-> OnDiskName
-> (NameCache, Name)
fromOnDiskName _ nc (pid, mod_name, occ) =
let
modu = mkModule pid mod_name
cache = nsNames nc
in
case lookupOrigNameCache cache modu occ of
Just name -> (nc, name)
Nothing ->
let
us = nsUniqs nc
u = uniqFromSupply us
name = mkExternalName u modu occ noSrcSpan
new_cache = extendNameCache cache modu occ name
in
case splitUniqSupply us of { (us',_) ->
( nc{ nsUniqs = us', nsNames = new_cache }, name )
}
serialiseName :: BinHandle -> Name -> UniqFM Name (Int,Name) -> IO ()
serialiseName bh name _ = do
let modu = nameModule name
put_ bh (moduleUnit modu, moduleName modu, nameOccName name)
-------------------------------------------------------------------------------
-- * GhcBinary instances
-------------------------------------------------------------------------------
instance (Ord k, Binary k, Binary v) => Binary (Map k v) where
put_ bh m = put_ bh (Map.toList m)
get bh = fmap (Map.fromList) (get bh)
instance Binary InterfaceFile where
put_ bh (InterfaceFile env ifaces) = do
put_ bh env
put_ bh ifaces
get bh = do
env <- get bh
ifaces <- get bh
return (InterfaceFile env ifaces)
instance Binary InstalledInterface where
put_ bh (InstalledInterface modu is_sig info docMap argMap
exps visExps opts fixMap) = do
put_ bh modu
put_ bh is_sig
put_ bh info
lazyPut bh (docMap, argMap)
put_ bh exps
put_ bh visExps
put_ bh opts
put_ bh fixMap
get bh = do
modu <- get bh
is_sig <- get bh
info <- get bh
~(docMap, argMap) <- lazyGet bh
exps <- get bh
visExps <- get bh
opts <- get bh
fixMap <- get bh
return (InstalledInterface modu is_sig info docMap argMap
exps visExps opts fixMap)
instance Binary DocOption where
put_ bh OptHide = do
putByte bh 0
put_ bh OptPrune = do
putByte bh 1
put_ bh OptIgnoreExports = do
putByte bh 2
put_ bh OptNotHome = do
putByte bh 3
put_ bh OptShowExtensions = do
putByte bh 4
get bh = do
h <- getByte bh
case h of
0 -> do
return OptHide
1 -> do
return OptPrune
2 -> do
return OptIgnoreExports
3 -> do
return OptNotHome
4 -> do
return OptShowExtensions
_ -> fail "invalid binary data found"
instance Binary Example where
put_ bh (Example expression result) = do
put_ bh expression
put_ bh result
get bh = do
expression <- get bh
result <- get bh
return (Example expression result)
instance Binary a => Binary (Hyperlink a) where
put_ bh (Hyperlink url label) = do
put_ bh url
put_ bh label
get bh = do
url <- get bh
label <- get bh
return (Hyperlink url label)
instance Binary a => Binary (ModLink a) where
put_ bh (ModLink m label) = do
put_ bh m
put_ bh label
get bh = do
m <- get bh
label <- get bh
return (ModLink m label)
instance Binary Picture where
put_ bh (Picture uri title) = do
put_ bh uri
put_ bh title
get bh = do
uri <- get bh
title <- get bh
return (Picture uri title)
instance Binary a => Binary (Header a) where
put_ bh (Header l t) = do
put_ bh l
put_ bh t
get bh = do
l <- get bh
t <- get bh
return (Header l t)
instance Binary a => Binary (Table a) where
put_ bh (Table h b) = do
put_ bh h
put_ bh b
get bh = do
h <- get bh
b <- get bh
return (Table h b)
instance Binary a => Binary (TableRow a) where
put_ bh (TableRow cs) = put_ bh cs
get bh = do
cs <- get bh
return (TableRow cs)
instance Binary a => Binary (TableCell a) where
put_ bh (TableCell i j c) = do
put_ bh i
put_ bh j
put_ bh c
get bh = do
i <- get bh
j <- get bh
c <- get bh
return (TableCell i j c)
instance Binary Meta where
put_ bh (Meta v p) = do
put_ bh v
put_ bh p
get bh = do
v <- get bh
p <- get bh
return (Meta v p)
instance (Binary mod, Binary id) => Binary (MetaDoc mod id) where
put_ bh MetaDoc { _meta = m, _doc = d } = do
put_ bh m
put_ bh d
get bh = do
m <- get bh
d <- get bh
return $ MetaDoc { _meta = m, _doc = d }
instance (Binary mod, Binary id) => Binary (DocH mod id) where
put_ bh DocEmpty = do
putByte bh 0
put_ bh (DocAppend aa ab) = do
putByte bh 1
put_ bh aa
put_ bh ab
put_ bh (DocString ac) = do
putByte bh 2
put_ bh ac
put_ bh (DocParagraph ad) = do
putByte bh 3
put_ bh ad
put_ bh (DocIdentifier ae) = do
putByte bh 4
put_ bh ae
put_ bh (DocEmphasis ag) = do
putByte bh 6
put_ bh ag
put_ bh (DocMonospaced ah) = do
putByte bh 7
put_ bh ah
put_ bh (DocUnorderedList ai) = do
putByte bh 8
put_ bh ai
put_ bh (DocOrderedList aj) = do
putByte bh 9
put_ bh aj
put_ bh (DocDefList ak) = do
putByte bh 10
put_ bh ak
put_ bh (DocCodeBlock al) = do
putByte bh 11
put_ bh al
put_ bh (DocHyperlink am) = do
putByte bh 12
put_ bh am
put_ bh (DocPic x) = do
putByte bh 13
put_ bh x
put_ bh (DocAName an) = do
putByte bh 14
put_ bh an
put_ bh (DocExamples ao) = do
putByte bh 15
put_ bh ao
put_ bh (DocIdentifierUnchecked x) = do
putByte bh 16
put_ bh x
put_ bh (DocWarning ag) = do
putByte bh 17
put_ bh ag
put_ bh (DocProperty x) = do
putByte bh 18
put_ bh x
put_ bh (DocBold x) = do
putByte bh 19
put_ bh x
put_ bh (DocHeader aa) = do
putByte bh 20
put_ bh aa
put_ bh (DocMathInline x) = do
putByte bh 21
put_ bh x
put_ bh (DocMathDisplay x) = do
putByte bh 22
put_ bh x
put_ bh (DocTable x) = do
putByte bh 23
put_ bh x
-- See note [The DocModule story]
put_ bh (DocModule af) = do
putByte bh 24
put_ bh af
get bh = do
h <- getByte bh
case h of
0 -> do
return DocEmpty
1 -> do
aa <- get bh
ab <- get bh
return (DocAppend aa ab)
2 -> do
ac <- get bh
return (DocString ac)
3 -> do
ad <- get bh
return (DocParagraph ad)
4 -> do
ae <- get bh
return (DocIdentifier ae)
-- See note [The DocModule story]
5 -> do
af <- get bh
return $ DocModule ModLink
{ modLinkName = af
, modLinkLabel = Nothing
}
6 -> do
ag <- get bh
return (DocEmphasis ag)
7 -> do
ah <- get bh
return (DocMonospaced ah)
8 -> do
ai <- get bh
return (DocUnorderedList ai)
9 -> do
aj <- get bh
return (DocOrderedList aj)
10 -> do
ak <- get bh
return (DocDefList ak)
11 -> do
al <- get bh
return (DocCodeBlock al)
12 -> do
am <- get bh
return (DocHyperlink am)
13 -> do
x <- get bh
return (DocPic x)
14 -> do
an <- get bh
return (DocAName an)
15 -> do
ao <- get bh
return (DocExamples ao)
16 -> do
x <- get bh
return (DocIdentifierUnchecked x)
17 -> do
ag <- get bh
return (DocWarning ag)
18 -> do
x <- get bh
return (DocProperty x)
19 -> do
x <- get bh
return (DocBold x)
20 -> do
aa <- get bh
return (DocHeader aa)
21 -> do
x <- get bh
return (DocMathInline x)
22 -> do
x <- get bh
return (DocMathDisplay x)
23 -> do
x <- get bh
return (DocTable x)
-- See note [The DocModule story]
24 -> do
af <- get bh
return (DocModule af)
_ -> error "invalid binary data found in the interface file"
instance Binary name => Binary (HaddockModInfo name) where
put_ bh hmi = do
put_ bh (hmi_description hmi)
put_ bh (hmi_copyright hmi)
put_ bh (hmi_license hmi)
put_ bh (hmi_maintainer hmi)
put_ bh (hmi_stability hmi)
put_ bh (hmi_portability hmi)
put_ bh (hmi_safety hmi)
put_ bh (fromEnum <$> hmi_language hmi)
put_ bh (map fromEnum $ hmi_extensions hmi)
get bh = do
descr <- get bh
copyr <- get bh
licen <- get bh
maint <- get bh
stabi <- get bh
porta <- get bh
safet <- get bh
langu <- fmap toEnum <$> get bh
exten <- map toEnum <$> get bh
return (HaddockModInfo descr copyr licen maint stabi porta safet langu exten)
instance Binary DocName where
put_ bh (Documented name modu) = do
putByte bh 0
put_ bh name
put_ bh modu
put_ bh (Undocumented name) = do
putByte bh 1
put_ bh name
get bh = do
h <- getByte bh
case h of
0 -> do
name <- get bh
modu <- get bh
return (Documented name modu)
1 -> do
name <- get bh
return (Undocumented name)
_ -> error "get DocName: Bad h"
instance Binary n => Binary (Wrap n) where
put_ bh (Unadorned n) = do
putByte bh 0
put_ bh n
put_ bh (Parenthesized n) = do
putByte bh 1
put_ bh n
put_ bh (Backticked n) = do
putByte bh 2
put_ bh n
get bh = do
h <- getByte bh
case h of
0 -> do
name <- get bh
return (Unadorned name)
1 -> do
name <- get bh
return (Parenthesized name)
2 -> do
name <- get bh
return (Backticked name)
_ -> error "get Wrap: Bad h"
| haskell/haddock | haddock-api/src/Haddock/InterfaceFile.hs | bsd-2-clause | 22,415 | 0 | 28 | 7,907 | 6,462 | 3,039 | 3,423 | -1 | -1 |
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings
( widgetFile
, PersistConfig
, staticRoot
, staticDir
, booksDir
, Extra (..)
, parseExtra
) where
import Prelude
import Text.Shakespeare.Text (st)
import Language.Haskell.TH.Syntax
import Database.Persist.Sqlite (SqliteConf)
import Yesod.Default.Config
import qualified Yesod.Default.Util
import Data.Text (Text)
import Data.Yaml
import Control.Applicative
import System.FilePath (combine)
import Settings.Development
-- | Which Persistent backend this site is using.
type PersistConfig = SqliteConf
-- Static setting below. Changing these requires a recompile
-- | The location of static files on your system. This is a file system
-- path. The default value works properly with your scaffolded site.
staticDir :: FilePath
staticDir = "static"
booksDir :: FilePath
booksDir = combine staticDir "books"
-- | The base URL for your static files. As you can see by the default
-- value, this can simply be "static" appended to your application root.
-- A powerful optimization can be serving static files from a separate
-- domain name. This allows you to use a web server optimized for static
-- files, more easily set expires and cache values, and avoid possibly
-- costly transference of cookies on static files. For more information,
-- please see:
-- http://code.google.com/speed/page-speed/docs/request.html#ServeFromCookielessDomain
--
-- If you change the resource pattern for StaticR in Foundation.hs, you will
-- have to make a corresponding change here.
--
-- To see how this value is used, see urlRenderOverride in Foundation.hs
staticRoot :: AppConfig DefaultEnv x -> Text
staticRoot conf = [st|#{appRoot conf}/static|]
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = if development then Yesod.Default.Util.widgetFileReload
else Yesod.Default.Util.widgetFileNoReload
data Extra = Extra
{ extraCopyright :: Text
, extraAnalytics :: Maybe Text -- ^ Google Analytics
} deriving Show
parseExtra :: DefaultEnv -> Object -> Parser Extra
parseExtra _ o = Extra
<$> o .: "copyright"
<*> o .:? "analytics"
| thlorenz/WebToInk | webtoink/Settings.hs | bsd-2-clause | 2,518 | 0 | 9 | 454 | 295 | 187 | 108 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
--{-# LANGUAGE QuasiQuotes #-}
module Main where
import MyLib (app)
import Control.Monad (mzero)
import Data.Aeson
import Test.Hspec
import Test.Hspec.Wai
import Network.HTTP.Types (methodPost, hContentType)
--import Test.Hspec.Wai.JSON
--instance of Message
data ResponseMsg = ResponseMsg { name :: String
, message :: String
} deriving (Eq, Show)
instance FromJSON ResponseMsg where
parseJSON (Object o) =
ResponseMsg <$> o .: "name"
<*> o .: "message"
parseJSON _ = mzero
instance ToJSON ResponseMsg where
-- this generates a Value
toJSON (ResponseMsg n m) =
object ["name" .= n, "message" .= m]
--instance of respnseData
data ResponseRespData = ResponseRespData { response :: String
} deriving (Eq, Show)
instance FromJSON ResponseRespData where
parseJSON (Object o) =
ResponseRespData <$> o .: "response"
parseJSON _ = mzero
instance ToJSON ResponseRespData where
-- this generates a Value
toJSON (ResponseRespData r) =
object ["response" .= r]
--instance of LastCommitDetails
data ResonseLastCommit = ResonseLastCommit { commit_url :: String
, last_commit_hash_value :: String
} deriving (Eq, Show)
instance FromJSON ResonseLastCommit where
parseJSON (Object o) =
ResonseLastCommit <$> o .: "commit_url"
<*> o .: "last_commit_hash_value"
parseJSON _ = mzero
instance ToJSON ResonseLastCommit where
-- this generates a Value
toJSON (ResonseLastCommit c l) =
object ["commit_url" .= c, "last_commit_hash_value" .= l]
--instance of metadata
data ResonseMetadata = ResonseMetadata { url :: String
, no_of_commits :: String
, last_commit_hash :: String
} deriving (Eq, Show)
instance FromJSON ResonseMetadata where
parseJSON (Object v) =
ResonseMetadata <$> v .: "url"
<*> v .: "no_of_commits"
<*> v .: "last_commit_hash"
-- A non-Object value is of the wrong type, so fail.
parseJSON _ = mzero
instance ToJSON ResonseMetadata where
-- this generates a Value
toJSON (ResonseMetadata u n l) =
object ["url" .= u, "no_of_commits" .= n, "last_commit_hash" .= l]
--instance of complexity
data ResonseComplexity = ResonseComplexity { repo_url :: String
, complexity :: String
} deriving (Eq, Show)
instance FromJSON ResonseComplexity where
parseJSON (Object v) =
ResonseComplexity <$> v .: "repo_url"
<*> v .: "complexity"
-- A non-Object value is of the wrong type, so fail.
parseJSON _ = mzero
instance ToJSON ResonseComplexity where
-- this generates a Value
toJSON (ResonseComplexity r c) =
object ["repo_url" .= r, "complexity" .= c]
main :: IO ()
main = do
hspec spec
spec :: Spec
spec = with (return app) $ do
--test case for sample storage message
describe "POST /storeMessage true" $ do
it "responds with storeMessage" $ do
let postJson p = Test.Hspec.Wai.request methodPost p [
(hContentType, "application/json;charset=utf-8")
]
(postJson "/storeMessage" $ encode $ toJSON $ ResponseMsg "ecky" "hello") `shouldRespondWith` "true" {matchHeaders = ["Content-Type" <:> "application/json"]}
--test case for sample searchMessage, when search null message should return empty list
describe "GET /searchMessage name is null" $ do
it "responds with searchMessage" $ do
get "/searchMessage?name=\"\"" `shouldRespondWith` "[]" {matchStatus = 200}
--test case for sample searchMessage, when search not existed message should return empty list
describe "GET /searchMessage name is null" $ do
it "responds with searchMessage" $ do
get "/searchMessage?name=notexit" `shouldRespondWith` "[]" {matchStatus = 200}
--test case for sample search message with key ecky stored from previous case
describe "GET /searchMessage?name=ecky" $ do
it "responds with searchMessage" $ do
get "/searchMessage?name=ecky" `shouldRespondWith` "[{\"name\":\"ecky\",\"message\":\"hello\"}]" {matchStatus = 200}
--test case for storeMetaData API
describe "POST /storeMetaData true" $ do
it "responds with storeMetaData" $ do
let postJson p = Test.Hspec.Wai.request methodPost p [
(hContentType, "application/json;charset=utf-8")
]
(postJson "/storeMetaData" $ encode $ toJSON $ ResonseMetadata "https://github.com/sagarsachdeva/database-service" "15" "HESBDGADHBSD") `shouldRespondWith` "true" {matchHeaders = ["Content-Type" <:> "application/json"]}
--test case for getLastCommitDetails, when get url is null should return empty
describe "GET /getLastCommitDetails url is null" $ do
it "responds with getLastCommitDetails" $ do
get "/getLastCommitDetails?url=\"\"" `shouldRespondWith` "[]" {matchStatus = 200}
--test case for getLastCommitDetails, when get url is not stored in dbs should return empty
describe "GET /getLastCommitDetails url is null" $ do
it "responds with getLastCommitDetails" $ do
get "/getLastCommitDetails?url=notextes" `shouldRespondWith` "[]" {matchStatus = 200}
--test case for getLastCommitDetails, when get url is the value stored from the last test case
describe "GET /getLastCommitDetails url is www.storeMetaDataTest.com" $ do
it "responds with getLastCommitDetails" $ do
get "/getLastCommitDetails?url=https://github.com/sagarsachdeva/database-service" `shouldRespondWith` "[{\"last_commit_hash_value\":\"HESBDGADHBSD\",\"commit_url\":\"https://github.com/sagarsachdeva/database-service\"}]" {matchStatus = 200}
--test case for storeMetaData API
describe "POST /storeComplexity true" $ do
it "responds with storeComplexity" $ do
let postJson p = Test.Hspec.Wai.request methodPost p [
(hContentType, "application/json;charset=utf-8")
]
(postJson "/storeComplexity" $ encode $ toJSON $ ResonseComplexity "https://github.com/sagarsachdeva/database-service" "10") `shouldRespondWith` "true" {matchHeaders = ["Content-Type" <:> "application/json"]}
--test case for getRepoMetrics, when get url is null should return empty
describe "GET /getRepoMetrics url is null" $ do
it "responds with getRepoMetrics" $ do
get "/getRepoMetrics?url=\"\"" `shouldRespondWith` "[]" {matchStatus = 200}
--test case for getRepoMetrics, when get url is not stored in dbs should return empty
describe "GET /getRepoMetrics url is null" $ do
it "responds with getRepoMetrics" $ do
get "/getRepoMetrics?url=notextes" `shouldRespondWith` "[]" {matchStatus = 200}
--test case for getRepoMetrics, when get url is the value stored from the last test case
describe "GET /getRepoMetrics url is https://github.com/sagarsachdeva/database-service" $ do
it "responds with getRepoMetrics" $ do
get "/getRepoMetrics?url=https://github.com/sagarsachdeva/database-service" `shouldRespondWith` "[{\"url\":\"https://github.com/sagarsachdeva/database-service\",\"last_commit_hash\":\"HESBDGADHBSD\",\"complexity\":\"10\",\"no_of_commits\":\"15\"}]" {matchStatus = 200}
| sagarsachdeva/database-service | test/Main.hs | bsd-3-clause | 7,843 | 0 | 19 | 2,108 | 1,377 | 719 | 658 | 110 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
-----------------------------------------------------------------
-- Auto-generated by regenClassifiers
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ranking.Classifiers.PL_XX (classifiers) where
import Data.String
import Prelude
import qualified Data.HashMap.Strict as HashMap
import Duckling.Ranking.Types
classifiers :: Classifiers
classifiers
= HashMap.fromList
[("five",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Thursday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (numeric)",
Classifier{okData =
ClassData{prior = -0.5658077581833438, unseen = -4.941642422609305,
likelihoods = HashMap.fromList [("", 0.0)], n = 138},
koData =
ClassData{prior = -0.8391010931830252, unseen = -4.672828834461907,
likelihoods = HashMap.fromList [("", 0.0)], n = 105}}),
("exactly <time-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -1.3862943611198906),
("<integer> (latent time-of-day)", -1.791759469228055),
("hour", -0.8754687373538999),
("<time-of-day> popo\322udniu/wieczorem/w nocy",
-1.791759469228055)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("<cycle> before <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("dayday", -0.6931471805599453),
("day (grain)yesterday", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Father's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> <cycle> <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.258096538021482,
likelihoods =
HashMap.fromList
[("third ordinalday (grain)October", -2.5257286443082556),
("daymonth", -2.120263536200091),
("quarteryear", -2.5257286443082556),
("third ordinalday (grain)on <date>", -2.5257286443082556),
("weekmonth", -1.4271163556401458),
("ordinal (digits)quarter (grain)year", -2.5257286443082556),
("first ordinalweek (grain)intersect", -2.120263536200091),
("first ordinalweek (grain)October", -2.5257286443082556),
("first ordinalweek (grain)on <date>", -2.120263536200091)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [], n = 0}}),
("Easter Monday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("zima",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> <part-of-day>",
Classifier{okData =
ClassData{prior = -0.7922380832041762,
unseen = -4.9344739331306915,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)after <time-of-day>", -3.828641396489095),
("dayhour", -2.847812143477369),
("Wednesdayevening|night", -4.23410650459726),
("<ordinal> (as hour)evening|night", -2.5293584123588344),
("<ordinal> (as hour)on <date>", -4.23410650459726),
("yesterdayevening|night", -4.23410650459726),
("hourhour", -1.213681618452897),
("after <time-of-day>after <time-of-day>", -3.828641396489095),
("until <time-of-day>morning", -3.828641396489095),
("until <time-of-day>after <time-of-day>", -4.23410650459726),
("minutehour", -4.23410650459726),
("todayevening|night", -3.828641396489095),
("at <time-of-day>evening|night", -4.23410650459726),
("intersecton <date>", -4.23410650459726),
("<integer> (latent time-of-day)this <part-of-day>",
-4.23410650459726),
("hh:mmon <date>", -4.23410650459726),
("<integer> (latent time-of-day)morning", -4.23410650459726),
("at <time-of-day>on <date>", -4.23410650459726),
("intersectmorning", -2.981343536101891),
("<integer> (latent time-of-day)evening|night",
-2.981343536101891),
("from <datetime> - <datetime> (interval)morning",
-4.23410650459726),
("from <time-of-day> - <time-of-day> (interval)morning",
-4.23410650459726),
("Mondaymorning", -4.23410650459726),
("on <date>morning", -4.23410650459726),
("at <time-of-day>morning", -3.828641396489095),
("tomorrowevening|night", -3.828641396489095)],
n = 48},
koData =
ClassData{prior = -0.6029960835656478,
unseen = -5.0689042022202315,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)after <time-of-day>", -4.3694478524670215),
("dayhour", -2.018072595303544),
("yearhour", -2.0668627594729756),
("year (latent)on <date>", -4.3694478524670215),
("<day-of-month> (ordinal)on <date>", -4.3694478524670215),
("nograinhour", -3.1166848839716534),
("<time-of-day> - <time-of-day> (interval)morning",
-4.3694478524670215),
("<day-of-month> (ordinal)evening|night", -2.6646997602285962),
("by the end of <time>morning", -4.3694478524670215),
("year (latent)evening|night", -3.1166848839716534),
("hourhour", -2.423537703411708),
("after <time-of-day>after <time-of-day>", -3.963982744358857),
("<day-of-month> (ordinal)morning", -3.963982744358857),
("<day-of-month> (ordinal)after <time-of-day>",
-3.270835563798912),
("until <time-of-day>morning", -4.3694478524670215),
("about <time-of-day>after <time-of-day>", -4.3694478524670215),
("by <time>morning", -4.3694478524670215),
("<integer> (latent time-of-day)after <time-of-day>",
-3.963982744358857),
("<integer> (latent time-of-day)morning", -3.270835563798912),
("intersectmorning", -3.270835563798912),
("year (latent)morning", -2.6646997602285962),
("year (latent)after <time-of-day>", -3.963982744358857),
("at <time-of-day>after <time-of-day>", -4.3694478524670215)],
n = 58}}),
("today",
Classifier{okData =
ClassData{prior = -0.2006706954621511,
unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -1.7047480922384253,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("mm/dd",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("at <time-of-day>",
Classifier{okData =
ClassData{prior = -0.13580154115906176,
unseen = -4.787491742782046,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -1.4832866271072003),
("<integer> (latent time-of-day)", -2.381228220313159),
("about <time-of-day>", -4.085976312551584),
("hh:mm", -3.6805112044434196),
("<time-of-day> rano", -2.987364023883474),
("hour", -0.827879774530102),
("<time-of-day> popo\322udniu/wieczorem/w nocy",
-2.2942168433235293),
("minute", -3.169685580677429)],
n = 55},
koData =
ClassData{prior = -2.063693184711697, unseen = -3.258096538021482,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -1.8325814637483102),
("<integer> (latent time-of-day)", -1.8325814637483102),
("relative minutes after|past <integer> (hour-of-day)",
-2.5257286443082556),
("hour", -1.1394342831883648),
("<time-of-day> popo\322udniu/wieczorem/w nocy",
-2.5257286443082556),
("minute", -2.5257286443082556)],
n = 8}}),
("absorption of , after named day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.970291913552122,
likelihoods =
HashMap.fromList
[("Wednesday", -2.159484249353372),
("Saturday", -3.258096538021482),
("Monday", -1.754019141245208), ("Friday", -2.5649493574615367),
("day", -0.7731898882334817), ("Sunday", -2.005333569526114)],
n = 23},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("September",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("11th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("on <date>",
Classifier{okData =
ClassData{prior = -7.79615414697118e-2,
unseen = -4.543294782270004,
likelihoods =
HashMap.fromList
[("week", -2.9231615807191553),
("Thursday", -3.4339872044851463),
("<time> <part-of-day>", -3.8394523125933104),
("September", -3.4339872044851463),
("October", -2.5866893440979424),
("intersect", -2.0476928433652555),
("Saturday", -3.4339872044851463),
("next <cycle>", -3.146305132033365),
("half to|till|before <integer> (hour-of-day)",
-3.8394523125933104),
("day", -2.740840023925201), ("afternoon", -3.146305132033365),
("this <cycle>", -2.9231615807191553),
("year", -3.146305132033365), ("March", -3.8394523125933104),
("hour", -2.3353749158170367), ("month", -1.6993861490970399),
("minute", -3.8394523125933104),
("this <time>", -3.8394523125933104)],
n = 37},
koData =
ClassData{prior = -2.5902671654458267, unseen = -3.258096538021482,
likelihoods =
HashMap.fromList
[("noon", -1.8325814637483102), ("hour", -1.8325814637483102)],
n = 3}}),
("8th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Good Friday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("October",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [("", 0.0)], n = 14},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("month (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods = HashMap.fromList [("", 0.0)], n = 11},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> o'clock",
Classifier{okData =
ClassData{prior = -0.3184537311185346, unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -0.8649974374866046),
("<integer> (latent time-of-day)", -2.2512917986064953),
("hour", -0.7472144018302211)],
n = 8},
koData =
ClassData{prior = -1.2992829841302609,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -1.5040773967762742),
("<integer> (latent time-of-day)", -1.0986122886681098),
("hour", -0.8109302162163288)],
n = 3}}),
("on a named-day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("Thursday", -1.2992829841302609),
("Saturday", -1.2992829841302609),
("day", -0.7884573603642702)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("nth <time> <time>",
Classifier{okData =
ClassData{prior = -0.4989911661189879,
unseen = -3.9889840465642745,
likelihoods =
HashMap.fromList
[("third ordinalTuesdayon <date>", -3.2771447329921766),
("daymonth", -1.4853852637641216),
("dayyear", -2.0243817644968085),
("second ordinalWednesdayon <date>", -3.2771447329921766),
("first ordinalWednesdayintersect", -2.871679624884012),
("first ordinalWednesdayon <date>", -3.2771447329921766),
("second ordinalWednesdayintersect", -2.871679624884012),
("third ordinalintersectyear", -2.871679624884012),
("first ordinalTuesdayon <date>", -3.2771447329921766),
("first ordinalTuesdayOctober", -3.2771447329921766),
("first ordinalintersectyear", -2.871679624884012),
("third ordinalTuesdayintersect", -2.871679624884012),
("second ordinalintersectyear", -2.871679624884012)],
n = 17},
koData =
ClassData{prior = -0.9343092373768334,
unseen = -3.7376696182833684,
likelihoods =
HashMap.fromList
[("third ordinalTuesdaySeptember", -3.0204248861443626),
("third ordinalTuesdayon <date>", -3.0204248861443626),
("daymonth", -1.7676619176489945),
("14th ordinalApril<integer> (latent time-of-day)",
-2.6149597780361984),
("monthhour", -1.9218125974762528),
("second ordinalWednesdayon <date>", -3.0204248861443626),
("first ordinalWednesdayon <date>", -3.0204248861443626),
("ordinal (digits)April<integer> (latent time-of-day)",
-2.327277705584417),
("second ordinalWednesdayOctober", -3.0204248861443626),
("first ordinalWednesdayOctober", -3.0204248861443626)],
n = 11}}),
("Wednesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.9444389791664407,
likelihoods = HashMap.fromList [("", 0.0)], n = 17},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> (as hour)",
Classifier{okData =
ClassData{prior = -2.985296314968116e-2,
unseen = -4.454347296253507,
likelihoods =
HashMap.fromList
[("11th ordinal", -3.056356895370426),
("8th ordinal", -3.3440389678222067),
("21st ordinal no space", -3.7495040759303713),
("20th ordinal", -3.7495040759303713),
("third ordinal", -1.8035939268750578),
("16th ordinal", -3.3440389678222067),
("18th ordinal", -3.3440389678222067),
("fifth ordinal", -3.7495040759303713),
("seventh ordinal", -3.3440389678222067),
("19th ordinal", -3.3440389678222067),
("21-29th ordinal", -3.3440389678222067),
("sixth ordinal", -3.3440389678222067),
("15th ordinal", -3.3440389678222067),
("second ordinal", -2.1400661634962708),
("ordinal (digits)", -2.3632097148104805),
("10th ordinal", -2.496741107435003),
("9th ordinal", -2.3632097148104805),
("first ordinal", -3.7495040759303713),
("23rd ordinal no space", -3.7495040759303713)],
n = 66},
koData =
ClassData{prior = -3.5263605246161616,
unseen = -3.0910424533583156,
likelihoods =
HashMap.fromList
[("20th ordinal", -2.3513752571634776),
("first ordinal", -2.3513752571634776)],
n = 2}}),
("November",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("July",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hour (grain)",
Classifier{okData =
ClassData{prior = -0.8472978603872037,
unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -0.5596157879354228, unseen = -2.639057329615259,
likelihoods = HashMap.fromList [("", 0.0)], n = 12}}),
("21st ordinal no space",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> quarter",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("quarter", -0.916290731874155),
("third ordinalquarter (grain)", -0.916290731874155)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -0.916290731874155),
("quarter", -0.916290731874155)],
n = 1}}),
("May",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("intersect",
Classifier{okData =
ClassData{prior = -0.29394383367575705,
unseen = -6.278521424165844,
likelihoods =
HashMap.fromList
[("Tuesdayon <date>", -4.667205576907544),
("Sundaylast <cycle>", -5.1780312006735345),
("Fridaynext <cycle>", -5.583496308781699),
("<datetime> - <datetime> (interval)on <date>",
-5.1780312006735345),
("mm/dd<time-of-day> popo\322udniu/wieczorem/w nocy",
-5.583496308781699),
("<hour-of-day> - <hour-of-day> (interval)on <date>",
-5.1780312006735345),
("<time-of-day> - <time-of-day> (interval)on <date>",
-5.1780312006735345),
("hourday", -3.791736839553644),
("dayhour", -2.7212954278522306),
("daymonth", -3.791736839553644),
("<time-of-day> popo\322udniu/wieczorem/w nocyabsorption of , after named day",
-4.667205576907544),
("TuesdaySeptember", -5.583496308781699),
("monthyear", -3.0985896589936988),
("Wednesdayintersect", -5.1780312006735345),
("from <hour-of-day> - <hour-of-day> (interval)on a named-day",
-5.583496308781699),
("from <time-of-day> - <time-of-day> (interval)on a named-day",
-5.583496308781699),
("intersecthh:mm", -5.583496308781699),
("from <datetime> - <datetime> (interval)on a named-day",
-5.583496308781699),
("Wednesdaynext <cycle>", -5.1780312006735345),
("Tuesdaythis <cycle>", -5.583496308781699),
("Saturday<time> <part-of-day>", -5.583496308781699),
("Marchyear", -5.1780312006735345),
("Saturdayat <time-of-day>", -5.1780312006735345),
("on a named-dayat <time-of-day>", -5.1780312006735345),
("at <time-of-day>on a named-day", -5.583496308781699),
("<time> <part-of-day>on a named-day", -5.1780312006735345),
("Wednesdayon <date>", -4.484884020113589),
("on a named-day<time> <part-of-day>", -5.583496308781699),
("last <day-of-week> of <time>year", -5.583496308781699),
("today<time> <part-of-day>", -5.583496308781699),
("todayat <time-of-day>", -5.583496308781699),
("on <date>at <time-of-day>", -5.1780312006735345),
("dayday", -2.9444389791664407),
("on <date><time> <part-of-day>", -5.583496308781699),
("intersect by \",\"hh:mm", -4.667205576907544),
("mm/ddat <time-of-day>", -4.890349128221754),
("WednesdayOctober", -5.1780312006735345),
("last <cycle> <time>year", -4.667205576907544),
("intersect<named-month> <day-of-month> (non ordinal)",
-4.890349128221754),
("intersect<day-of-month> (non ordinal) <named-month>",
-4.890349128221754),
("dayyear", -3.5685932882394344),
("Wednesday<day-of-month>(ordinal) <named-month>",
-5.583496308781699),
("Thursday<time> <part-of-day>", -5.1780312006735345),
("<day-of-month>(ordinal) <named-month>year",
-5.1780312006735345),
("day-after-tomorrow (single-word)at <time-of-day>",
-5.583496308781699),
("absorption of , after named day<day-of-month>(ordinal) <named-month>",
-4.079418912005425),
("Tuesdaynext <cycle>", -5.583496308781699),
("tomorrow<time-of-day> popo\322udniu/wieczorem/w nocy",
-5.583496308781699),
("absorption of , after named day<named-month> <day-of-month> (non ordinal)",
-4.197201947661808),
("Thursdayfrom <datetime> - <datetime> (interval)",
-4.667205576907544),
("Thursdayfrom <time-of-day> - <time-of-day> (interval)",
-4.667205576907544),
("tomorrowuntil <time-of-day>", -5.1780312006735345),
("absorption of , after named day<day-of-month> (non ordinal) <named-month>",
-4.079418912005425),
("<time-of-day> popo\322udniu/wieczorem/w nocyintersect by \",\"",
-4.667205576907544),
("TuesdayOctober", -5.583496308781699),
("last <day-of-week> <time>year", -5.1780312006735345),
("Monday<day-of-month>(ordinal) <named-month>",
-5.1780312006735345),
("Mondaythis <cycle>", -5.583496308781699),
("<time-of-day> ranoon <date>", -5.1780312006735345),
("on <date><time-of-day> rano", -5.583496308781699),
("<time-of-day> popo\322udniu/wieczorem/w nocytomorrow",
-5.583496308781699),
("from <time-of-day> - <time-of-day> (interval)on <date>",
-5.583496308781699),
("<time-of-day> popo\322udniu/wieczorem/w nocyFriday",
-4.667205576907544),
("at <time-of-day>intersect", -5.1780312006735345),
("dayminute", -3.791736839553644),
("<time-of-day> ranoon a named-day", -5.1780312006735345),
("from <hour-of-day> - <hour-of-day> (interval)on <date>",
-5.583496308781699),
("from <datetime> - <datetime> (interval)on <date>",
-5.583496308781699),
("intersectyear", -5.1780312006735345),
("on a named-day<time-of-day> rano", -5.583496308781699),
("<ordinal> <cycle> of <time>year", -5.583496308781699),
("minuteday", -2.384823191231018),
("absorption of , after named dayintersect",
-5.583496308781699),
("Saturday<time-of-day> rano", -5.583496308781699),
("Octoberyear", -3.878748216543274),
("yearhh:mm", -5.583496308781699),
("at <time-of-day>intersect by \",\"", -5.1780312006735345),
("absorption of , after named dayintersect by \",\"",
-5.1780312006735345),
("tomorrowexactly <time-of-day>", -4.890349128221754),
("at <time-of-day>absorption of , after named day",
-5.1780312006735345),
("Septemberyear", -4.667205576907544),
("at <time-of-day>on <date>", -5.583496308781699),
("on <date>year", -4.197201947661808),
("dayweek", -3.6375861597263857),
("Tuesdayin <duration>", -5.583496308781699),
("<time> <part-of-day>on <date>", -5.1780312006735345),
("weekyear", -4.330733340286331),
("<ordinal> <cycle> <time>year", -5.1780312006735345),
("tomorrowat <time-of-day>", -5.583496308781699),
("tomorrow<time> <part-of-day>", -5.583496308781699),
("at <time-of-day>Friday", -5.1780312006735345),
("<time-of-day> popo\322udniu/wieczorem/w nocyintersect",
-4.667205576907544),
("<named-month> <day-of-month> (ordinal)year",
-5.583496308781699),
("tomorrow<time-of-day> rano", -5.583496308781699),
("<datetime> - <datetime> (interval)on a named-day",
-5.1780312006735345),
("last <cycle> of <time>year", -5.1780312006735345),
("<named-month> <day-of-month> (non ordinal)year",
-5.583496308781699),
("<time-of-day> - <time-of-day> (interval)on a named-day",
-5.1780312006735345),
("<day-of-month> (non ordinal) <named-month>year",
-5.1780312006735345),
("<hour-of-day> - <hour-of-day> (interval)on a named-day",
-5.1780312006735345),
("yearminute", -5.583496308781699)],
n = 199},
koData =
ClassData{prior = -1.3677409532241427, unseen = -5.602118820879701,
likelihoods =
HashMap.fromList
[("Tuesdayon <date>", -4.90527477843843),
("<time-of-day> ranoby <time>", -4.499809670330265),
("Julyrelative minutes to|till|before <integer> (hour-of-day)",
-4.90527477843843),
("dayhour", -3.8066624897703196),
("daymonth", -2.6026896854443837),
("monthyear", -3.9889840465642745),
("Wednesdayintersect", -4.499809670330265),
("<time> <part-of-day>until <time-of-day>", -4.499809670330265),
("hournograin", -3.033472601536838),
("Marchyear", -4.212127597878484),
("Wednesdayon <date>", -4.90527477843843),
("absorption of , after named dayJuly", -4.90527477843843),
("<time> <part-of-day><time> <part-of-day>",
-4.212127597878484),
("mm/ddat <time-of-day>", -4.90527477843843),
("hourhour", -3.9889840465642745),
("<time> <part-of-day>by <time>", -4.499809670330265),
("dayyear", -3.6525118099430616),
("<time-of-day> ranoby the end of <time>", -4.499809670330265),
("<named-month> <day-of-month> (non ordinal)until <time-of-day>",
-4.90527477843843),
("monthminute", -4.90527477843843),
("minutemonth", -4.212127597878484),
("Aprilyear", -4.90527477843843),
("<time-of-day> popo\322udniu/wieczorem/w nocyintersect by \",\"",
-4.499809670330265),
("after <time-of-day>at <time-of-day>", -4.90527477843843),
("SundayMarch", -4.90527477843843),
("hh:mmby the end of <time>", -4.90527477843843),
("minutenograin", -4.499809670330265),
("<time> <part-of-day>by the end of <time>",
-4.499809670330265),
("<time> <part-of-day><time-of-day> rano", -4.90527477843843),
("<time-of-day> rano<time> <part-of-day>", -4.499809670330265),
("at <time-of-day>intersect", -4.90527477843843),
("<named-month> <day-of-month> (non ordinal)by <time>",
-4.90527477843843),
("intersectyear", -3.6525118099430616),
("intersectSeptember", -4.212127597878484),
("minuteday", -3.200526686200004),
("hh:mmon <date>", -4.499809670330265),
("at <time-of-day>intersect by \",\"", -4.90527477843843),
("MondayMarch", -4.90527477843843),
("<named-month> <day-of-month> (non ordinal)by the end of <time>",
-4.90527477843843),
("tomorrowexactly <time-of-day>", -4.90527477843843),
("absorption of , after named daySeptember", -4.90527477843843),
("hh:mmon a named-day", -4.499809670330265),
("Sundayon <date>", -4.499809670330265),
("absorption of , after named dayFebruary",
-3.8066624897703196),
("hh:mmby <time>", -4.90527477843843),
("tomorrowat <time-of-day>", -4.90527477843843),
("daynograin", -4.499809670330265),
("<time-of-day> popo\322udniu/wieczorem/w nocyintersect",
-4.499809670330265),
("Tuesdayintersect", -4.499809670330265),
("Sundayintersect", -4.499809670330265)],
n = 68}}),
("half after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("twenty",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("20th ordinal",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("a few",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("third ordinalday (grain)October", -1.7047480922384253),
("daymonth", -1.7047480922384253),
("weekmonth", -1.2992829841302609),
("first ordinalweek (grain)intersect", -1.7047480922384253),
("first ordinalweek (grain)October", -1.7047480922384253)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("year (grain)",
Classifier{okData =
ClassData{prior = -6.0624621816434854e-2,
unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -2.833213344056216, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("from <datetime> - <datetime> (interval)",
Classifier{okData =
ClassData{prior = -0.4700036292457356,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("minuteminute", -1.791759469228055),
("<time-of-day> rano<time-of-day> rano", -2.1972245773362196),
("hh:mmhh:mm", -1.791759469228055),
("hourhour", -1.791759469228055),
("minutehour", -2.1972245773362196),
("<time-of-day> rano<integer> (latent time-of-day)",
-2.1972245773362196),
("hh:mm<integer> (latent time-of-day)", -2.1972245773362196)],
n = 5},
koData =
ClassData{prior = -0.9808292530117262, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("minuteminute", -1.540445040947149),
("minutehour", -1.9459101490553135),
("hh:mmintersect", -1.540445040947149),
("hh:mm<integer> (latent time-of-day)", -1.9459101490553135)],
n = 3}}),
("Saturday",
Classifier{okData =
ClassData{prior = -0.13353139262452263,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -2.0794415416798357,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("from <hour-of-day> - <hour-of-day> (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("minuteminute", -0.6931471805599453),
("hh:mmhh:mm", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("next <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.713572066704308,
likelihoods =
HashMap.fromList
[("week", -1.491654876777717),
("month (grain)", -2.995732273553991),
("year (grain)", -2.5902671654458267),
("second", -2.995732273553991),
("week (grain)", -1.491654876777717),
("quarter", -2.3025850929940455), ("year", -2.5902671654458267),
("second (grain)", -2.995732273553991),
("month", -2.995732273553991),
("quarter (grain)", -2.3025850929940455)],
n = 15},
koData =
ClassData{prior = -infinity, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [], n = 0}}),
("number.number hours",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("from <time-of-day> - <time-of-day> (interval)",
Classifier{okData =
ClassData{prior = -0.1823215567939546, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("minuteminute", -1.7346010553881064),
("<time-of-day> rano<time-of-day> rano", -2.1400661634962708),
("hh:mmhh:mm", -1.7346010553881064),
("hourhour", -1.7346010553881064),
("minutehour", -2.1400661634962708),
("<time-of-day> rano<integer> (latent time-of-day)",
-2.1400661634962708),
("hh:mm<integer> (latent time-of-day)", -2.1400661634962708)],
n = 5},
koData =
ClassData{prior = -1.791759469228055, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("minutehour", -1.5040773967762742),
("hh:mm<integer> (latent time-of-day)", -1.5040773967762742)],
n = 1}}),
("Three Kings' Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm-dd",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("year (latent)",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 20}}),
("mm/dd/yyyy",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("evening|night",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.2188758248682006,
likelihoods = HashMap.fromList [("", 0.0)], n = 23},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("third ordinal",
Classifier{okData =
ClassData{prior = -5.129329438755058e-2,
unseen = -3.044522437723423,
likelihoods = HashMap.fromList [("", 0.0)], n = 19},
koData =
ClassData{prior = -2.995732273553991, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("All Saints' Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Monday",
Classifier{okData =
ClassData{prior = -5.406722127027582e-2,
unseen = -2.995732273553991,
likelihoods = HashMap.fromList [("", 0.0)], n = 18},
koData =
ClassData{prior = -2.9444389791664407,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> quarter <year>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("quarteryear", -0.6931471805599453),
("ordinal (digits)quarter (grain)year", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Saint Nicholas Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh:mm:ss",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("16th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter to|till|before <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -1.791759469228055),
("<integer> (latent time-of-day)", -1.791759469228055),
("noon", -1.3862943611198906), ("hour", -0.8754687373538999)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> (latent time-of-day)",
Classifier{okData =
ClassData{prior = -0.3184537311185346, unseen = -3.784189633918261,
likelihoods =
HashMap.fromList
[("integer (numeric)", -7.232066157962613e-2),
("fifteen", -3.068052935133617)],
n = 40},
koData =
ClassData{prior = -1.2992829841302609,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.1823215567939546),
("one", -2.1972245773362196)],
n = 15}}),
("Labour Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("nth <time> of <time>",
Classifier{okData =
ClassData{prior = -0.5596157879354228, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("daymonth", -1.1631508098056809),
("first ordinalWednesdayintersect", -2.0794415416798357),
("second ordinalWednesdayintersect", -2.0794415416798357),
("first ordinalTuesdayOctober", -2.0794415416798357),
("third ordinalTuesdayintersect", -2.0794415416798357)],
n = 4},
koData =
ClassData{prior = -0.8472978603872037, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("third ordinalTuesdaySeptember", -1.9459101490553135),
("daymonth", -1.252762968495368),
("second ordinalWednesdayOctober", -1.9459101490553135),
("first ordinalWednesdayOctober", -1.9459101490553135)],
n = 3}}),
("Valentine's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("April",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("St Stephen's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("18th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.784189633918261,
likelihoods = HashMap.fromList [("", 0.0)], n = 42},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("fifth ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Corpus Christi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <day-of-week> <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("daymonth", -0.8873031950009028),
("SundayMarch", -2.1400661634962708),
("MondayMarch", -2.1400661634962708),
("Sundayon <date>", -1.7346010553881064),
("Sundayintersect", -1.7346010553881064)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("now",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("Christmas Eve",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<unit-of-duration> as a duration",
Classifier{okData =
ClassData{prior = -2.1435203983643585,
unseen = -3.9318256327243257,
likelihoods =
HashMap.fromList
[("week", -1.7147984280919266),
("hour (grain)", -2.8134107167600364),
("second", -2.5257286443082556),
("week (grain)", -1.7147984280919266),
("day", -2.5257286443082556),
("minute (grain)", -3.2188758248682006),
("second (grain)", -2.5257286443082556),
("hour", -2.8134107167600364), ("minute", -3.2188758248682006),
("day (grain)", -2.5257286443082556)],
n = 17},
koData =
ClassData{prior = -0.12470347850095725, unseen = -5.60947179518496,
likelihoods =
HashMap.fromList
[("week", -2.050454004806584),
("month (grain)", -3.120895416507997),
("hour (grain)", -2.6100697927420065),
("year (grain)", -2.715430308399833),
("second", -3.996364153861897),
("week (grain)", -2.050454004806584),
("day", -2.3869262414277963), ("quarter", -3.4085774889597777),
("minute (grain)", -3.120895416507997),
("year", -2.715430308399833),
("second (grain)", -3.996364153861897),
("hour", -2.6100697927420065), ("month", -3.120895416507997),
("quarter (grain)", -3.4085774889597777),
("minute", -3.120895416507997),
("day (grain)", -2.3869262414277963)],
n = 128}}),
("this <part-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("on <date>", -1.5040773967762742),
("evening|night", -1.0986122886681098),
("hour", -0.8109302162163288)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month>(ordinal) <named-month>",
Classifier{okData =
ClassData{prior = -0.2318016140573243, unseen = -4.060443010546419,
likelihoods =
HashMap.fromList
[("15th ordinalFebruary", -2.9444389791664407),
("8th ordinalAugust", -2.9444389791664407),
("13th ordinalFebruary", -3.349904087274605),
("first ordinalMarch", -2.9444389791664407),
("ordinal (digits)February", -1.55814461804655),
("third ordinalMarch", -3.349904087274605),
("month", -0.8649974374866046),
("ordinal (digits)March", -2.4336133554004498)],
n = 23},
koData =
ClassData{prior = -1.575536360758419, unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("14th ordinalApril", -2.03688192726104),
("third ordinalMay", -2.4423470353692043),
("ordinal (digits)April", -1.749199854809259),
("month", -1.1895840668738362)],
n = 6}}),
("<duration> hence",
Classifier{okData =
ClassData{prior = -0.40546510810816444,
unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("week", -1.3121863889661687),
("<unit-of-duration> as a duration", -1.8718021769015913),
("day", -2.159484249353372), ("year", -2.5649493574615367),
("<integer> <unit-of-duration>", -1.1786549963416462),
("month", -2.5649493574615367)],
n = 10},
koData =
ClassData{prior = -1.0986122886681098, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("week", -2.0794415416798357),
("<unit-of-duration> as a duration", -0.9808292530117262),
("day", -1.6739764335716716), ("year", -2.0794415416798357),
("month", -2.0794415416798357)],
n = 5}}),
("numbers prefix with -, negative or minus",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 11}}),
("Friday",
Classifier{okData =
ClassData{prior = -0.10536051565782628,
unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -2.3025850929940455,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("tomorrow",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("<cycle> after <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("day (grain)tomorrow", -0.6931471805599453),
("dayday", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Mother's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Assumption Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("New Year's Eve",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("half to|till|before <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> after next",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("Friday", -1.3862943611198906), ("day", -1.3862943611198906),
("March", -1.3862943611198906), ("month", -1.3862943611198906)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("two",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Palm Sunday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("by <time>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.58351893845611,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -2.456735772821304),
("year (latent)", -2.8622008809294686),
("<integer> (latent time-of-day)", -1.9459101490553135),
("day", -2.456735772821304), ("year", -2.8622008809294686),
("hh:mm", -2.8622008809294686),
("<day-of-month> (ordinal)", -2.456735772821304),
("noon", -2.8622008809294686),
("<time-of-day> rano", -2.8622008809294686),
("hour", -1.3581234841531944), ("minute", -2.8622008809294686)],
n = 12}}),
("seventh ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("half an hour",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("one",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("fractional number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8}}),
("Sunday",
Classifier{okData =
ClassData{prior = -0.10008345855698253,
unseen = -3.044522437723423,
likelihoods = HashMap.fromList [("", 0.0)], n = 19},
koData =
ClassData{prior = -2.3513752571634776,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("afternoon",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("<duration> from now",
Classifier{okData =
ClassData{prior = -0.40546510810816444, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("second", -1.9459101490553135),
("<unit-of-duration> as a duration", -1.540445040947149),
("day", -1.9459101490553135), ("year", -1.9459101490553135),
("<integer> <unit-of-duration>", -1.540445040947149),
("minute", -1.9459101490553135)],
n = 4},
koData =
ClassData{prior = -1.0986122886681098,
unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("<unit-of-duration> as a duration", -1.2039728043259361),
("year", -1.6094379124341003), ("minute", -1.6094379124341003)],
n = 2}}),
("February",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4657359027997265,
likelihoods = HashMap.fromList [("", 0.0)], n = 30},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.8066624897703196,
likelihoods =
HashMap.fromList
[("week", -1.5869650565820417),
("year (grain)", -1.9924301646902063),
("week (grain)", -1.5869650565820417),
("day", -2.6855773452501515), ("quarter", -2.3978952727983707),
("year", -1.9924301646902063),
("quarter (grain)", -2.3978952727983707),
("day (grain)", -2.6855773452501515)],
n = 18},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("minute (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.639057329615259,
likelihoods = HashMap.fromList [("", 0.0)], n = 12},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <cycle> <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("day (grain)October", -2.639057329615259),
("daymonth", -1.540445040947149),
("day (grain)intersect", -2.2335922215070942),
("day (grain)on <date>", -2.2335922215070942),
("weekmonth", -1.540445040947149),
("week (grain)intersect", -2.2335922215070942),
("week (grain)on <date>", -2.2335922215070942),
("week (grain)September", -2.639057329615259)],
n = 10},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("about <time-of-day>",
Classifier{okData =
ClassData{prior = -0.11778303565638351,
unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -2.4423470353692043),
("<ordinal> (as hour)", -1.5260563034950494),
("<integer> (latent time-of-day)", -2.03688192726104),
("hour", -0.9382696385929302),
("<time-of-day> popo\322udniu/wieczorem/w nocy",
-2.4423470353692043)],
n = 8},
koData =
ClassData{prior = -2.1972245773362196,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("relative minutes after|past <integer> (hour-of-day)",
-1.5040773967762742),
("minute", -1.5040773967762742)],
n = 1}}),
("year",
Classifier{okData =
ClassData{prior = -0.14842000511827333,
unseen = -3.295836866004329,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 25},
koData =
ClassData{prior = -1.9810014688665833, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 4}}),
("last <day-of-week> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("daymonth", -0.8472978603872037),
("SundayMarch", -1.252762968495368),
("Sundayintersect", -1.252762968495368)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <unit-of-duration>",
Classifier{okData =
ClassData{prior = -0.7091475219063865, unseen = -4.59511985013459,
likelihoods =
HashMap.fromList
[("week", -2.2823823856765264),
("threemonth (grain)", -3.4863551900024623),
("fifteenminute (grain)", -3.891820298110627),
("a fewhour (grain)", -3.891820298110627),
("integer (numeric)day (grain)", -2.793208009442517),
("twoweek (grain)", -3.891820298110627),
("fiveday (grain)", -3.891820298110627),
("oneweek (grain)", -3.1986731175506815),
("oneminute (grain)", -3.891820298110627),
("integer (numeric)year (grain)", -3.891820298110627),
("day", -2.505525936990736), ("year", -3.1986731175506815),
("integer (numeric)week (grain)", -3.1986731175506815),
("oneday (grain)", -3.891820298110627),
("hour", -3.1986731175506815), ("month", -3.4863551900024623),
("threeweek (grain)", -3.4863551900024623),
("integer (numeric)minute (grain)", -2.793208009442517),
("minute", -2.505525936990736),
("integer (numeric)hour (grain)", -3.4863551900024623),
("twoyear (grain)", -3.4863551900024623)],
n = 31},
koData =
ClassData{prior = -0.6773988235918061, unseen = -4.61512051684126,
likelihoods =
HashMap.fromList
[("week", -2.8134107167600364),
("threemonth (grain)", -3.912023005428146),
("threehour (grain)", -3.912023005428146),
("integer (numeric)day (grain)", -3.2188758248682006),
("twoweek (grain)", -3.912023005428146),
("twominute (grain)", -3.912023005428146),
("second", -2.995732273553991),
("threeday (grain)", -3.912023005428146),
("threeyear (grain)", -3.912023005428146),
("integer (numeric)second (grain)", -3.506557897319982),
("twomonth (grain)", -3.912023005428146),
("onehour (grain)", -3.912023005428146),
("integer (numeric)year (grain)", -3.506557897319982),
("threesecond (grain)", -3.912023005428146),
("day", -2.659260036932778), ("year", -2.995732273553991),
("threeminute (grain)", -3.912023005428146),
("integer (numeric)week (grain)", -3.2188758248682006),
("twoday (grain)", -3.912023005428146),
("hour", -2.659260036932778), ("month", -3.2188758248682006),
("threeweek (grain)", -3.912023005428146),
("integer (numeric)minute (grain)", -3.506557897319982),
("a fewday (grain)", -3.912023005428146),
("integer (numeric)month (grain)", -3.912023005428146),
("minute", -2.995732273553991),
("twosecond (grain)", -3.912023005428146),
("integer (numeric)hour (grain)", -3.2188758248682006),
("fifteenhour (grain)", -3.912023005428146),
("twoyear (grain)", -3.912023005428146)],
n = 32}}),
("19th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> after <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("dayday", -0.6931471805599453),
("<unit-of-duration> as a durationtomorrow",
-0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("relative minutes after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("integer (numeric)<integer> (latent time-of-day)",
-1.7047480922384253),
("fifteen<ordinal> (as hour)", -1.7047480922384253),
("hour", -1.0116009116784799),
("integer (numeric)<ordinal> (as hour)", -1.7047480922384253)],
n = 3},
koData =
ClassData{prior = -0.916290731874155, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("integer (numeric)noon", -1.0986122886681098),
("hour", -1.0986122886681098)],
n = 2}}),
("Holy Saturday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Thanksgiving Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect by \",\"",
Classifier{okData =
ClassData{prior = -0.2282586519809802, unseen = -4.727387818712341,
likelihoods =
HashMap.fromList
[("Wednesday<named-month> <day-of-month> (non ordinal)",
-4.02535169073515),
("Wednesday<day-of-month> (non ordinal) <named-month>",
-3.332204510175204),
("Friday<named-month> <day-of-month> (non ordinal)",
-3.619886582626985),
("Friday<day-of-month> (non ordinal) <named-month>",
-4.02535169073515),
("intersect by \",\"year", -4.02535169073515),
("dayday", -1.4996230464268938),
("intersect<named-month> <day-of-month> (non ordinal)",
-3.332204510175204),
("intersect<day-of-month> (non ordinal) <named-month>",
-3.332204510175204),
("dayyear", -3.109060958860994),
("Wednesday<day-of-month>(ordinal) <named-month>",
-4.02535169073515),
("<named-month> <day-of-month> (non ordinal)intersect",
-4.02535169073515),
("Monday<day-of-month>(ordinal) <named-month>",
-3.619886582626985),
("Saturday<day-of-month>(ordinal) <named-month>",
-4.02535169073515),
("Fridayintersect", -4.02535169073515),
("intersect by \",\"intersect", -4.02535169073515),
("Sunday<named-month> <day-of-month> (non ordinal)",
-4.02535169073515),
("Sunday<day-of-month> (non ordinal) <named-month>",
-4.02535169073515),
("dayminute", -2.9267394020670396),
("intersectyear", -4.02535169073515),
("minuteday", -2.772588722239781),
("Sunday<day-of-month>(ordinal) <named-month>",
-3.109060958860994),
("intersectintersect", -4.02535169073515),
("Fridayintersect by \",\"", -3.619886582626985),
("Monday<named-month> <day-of-month> (non ordinal)",
-3.332204510175204),
("Monday<day-of-month> (non ordinal) <named-month>",
-3.332204510175204),
("<named-month> <day-of-month> (non ordinal)year",
-3.619886582626985)],
n = 39},
koData =
ClassData{prior = -1.589235205116581, unseen = -4.007333185232471,
likelihoods =
HashMap.fromList
[("daymonth", -1.9095425048844386),
("SundayFebruary", -3.295836866004329),
("FridayJuly", -3.295836866004329),
("FridaySeptember", -3.295836866004329),
("WednesdayFebruary", -3.295836866004329),
("minutemonth", -2.6026896854443837),
("MondayFebruary", -2.6026896854443837),
("intersectSeptember", -2.6026896854443837)],
n = 10}}),
("hh:mm",
Classifier{okData =
ClassData{prior = -4.652001563489282e-2,
unseen = -3.1354942159291497,
likelihoods = HashMap.fromList [("", 0.0)], n = 21},
koData =
ClassData{prior = -3.0910424533583156,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("quarter after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("14th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("21-29th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("second ordinal", -0.6931471805599453),
("first ordinal", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> before <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("dayday", -0.6931471805599453),
("<unit-of-duration> as a durationyesterday",
-0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("second (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("13th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect by \"of\", \"from\", \"'s\"",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("Sundaylast <cycle>", -0.6931471805599453),
("dayweek", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> ago",
Classifier{okData =
ClassData{prior = -0.40546510810816444,
unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("week", -1.3121863889661687),
("<unit-of-duration> as a duration", -1.8718021769015913),
("day", -2.159484249353372), ("year", -2.5649493574615367),
("<integer> <unit-of-duration>", -1.1786549963416462),
("month", -2.5649493574615367)],
n = 10},
koData =
ClassData{prior = -1.0986122886681098, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("week", -2.0794415416798357),
("<unit-of-duration> as a duration", -0.9808292530117262),
("day", -1.6739764335716716), ("year", -2.0794415416798357),
("month", -2.0794415416798357)],
n = 5}}),
("day-before-yesterday (single-word)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Constitution Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <time>",
Classifier{okData =
ClassData{prior = -0.5596157879354228,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("day", -0.8754687373538999), ("Sunday", -1.3862943611198906),
("Tuesday", -1.3862943611198906)],
n = 4},
koData =
ClassData{prior = -0.8472978603872037,
unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("Monday", -1.6094379124341003), ("day", -0.916290731874155),
("Sunday", -1.2039728043259361)],
n = 3}}),
("March",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.995732273553991,
likelihoods = HashMap.fromList [("", 0.0)], n = 18},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("sixth ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("11th ordinal", -2.2335922215070942),
("8th ordinal", -2.639057329615259),
("third ordinal", -2.2335922215070942),
("16th ordinal", -2.639057329615259),
("second ordinal", -1.7227665977411035),
("ordinal (digits)", -2.2335922215070942),
("10th ordinal", -1.7227665977411035),
("9th ordinal", -1.7227665977411035)],
n = 20}}),
("Easter Sunday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("noon",
Classifier{okData =
ClassData{prior = -1.791759469228055, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.1823215567939546,
unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10}}),
("Christmas",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("until <time-of-day>",
Classifier{okData =
ClassData{prior = -0.8873031950009028,
unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -1.9924301646902063),
("<integer> (latent time-of-day)", -1.9924301646902063),
("<time-of-day> rano", -1.9924301646902063),
("hour", -1.0116009116784799),
("<time-of-day> popo\322udniu/wieczorem/w nocy",
-2.3978952727983707)],
n = 7},
koData =
ClassData{prior = -0.5306282510621704, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -2.2335922215070942),
("<integer> (latent time-of-day)", -1.7227665977411035),
("hh:mm", -2.639057329615259), ("noon", -2.2335922215070942),
("<time-of-day> rano", -2.639057329615259),
("hour", -1.0296194171811581), ("minute", -2.639057329615259)],
n = 10}}),
("<integer> and an half hours",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Ash Wednesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> rano",
Classifier{okData =
ClassData{prior = -0.10008345855698253,
unseen = -3.828641396489095,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -2.0149030205422647),
("<ordinal> (as hour)", -2.70805020110221),
("<integer> (latent time-of-day)", -1.5040773967762742),
("hh:mm", -3.1135153092103742),
("until <time-of-day>", -2.70805020110221),
("hour", -0.8622235106038793), ("minute", -3.1135153092103742)],
n = 19},
koData =
ClassData{prior = -2.3513752571634776,
unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -1.7047480922384253),
("until <time-of-day>", -1.7047480922384253),
("hour", -1.2992829841302609)],
n = 2}}),
("after <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453),
("<integer> <unit-of-duration>", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("decimal number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("next <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("Wednesday", -2.5649493574615367),
("Monday", -2.159484249353372), ("day", -0.9555114450274363),
("March", -2.5649493574615367), ("month", -2.5649493574615367),
("Tuesday", -1.3121863889661687)],
n = 10},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("last <cycle>",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -3.4965075614664802,
likelihoods =
HashMap.fromList
[("week", -1.6739764335716716),
("month (grain)", -1.6739764335716716),
("year (grain)", -2.367123614131617),
("week (grain)", -1.6739764335716716),
("year", -2.367123614131617), ("month", -1.6739764335716716)],
n = 12},
koData =
ClassData{prior = -1.3862943611198906, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("week", -1.6739764335716716),
("week (grain)", -1.6739764335716716),
("day", -1.6739764335716716),
("day (grain)", -1.6739764335716716)],
n = 4}}),
("next n <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.912023005428146,
likelihoods =
HashMap.fromList
[("week", -2.793208009442517),
("threemonth (grain)", -3.1986731175506815),
("threehour (grain)", -3.1986731175506815),
("integer (numeric)day (grain)", -3.1986731175506815),
("second", -2.793208009442517),
("threeday (grain)", -3.1986731175506815),
("threeyear (grain)", -3.1986731175506815),
("integer (numeric)second (grain)", -3.1986731175506815),
("integer (numeric)year (grain)", -3.1986731175506815),
("threesecond (grain)", -3.1986731175506815),
("day", -2.505525936990736), ("year", -2.793208009442517),
("threeminute (grain)", -3.1986731175506815),
("integer (numeric)week (grain)", -3.1986731175506815),
("hour", -2.793208009442517), ("month", -3.1986731175506815),
("threeweek (grain)", -3.1986731175506815),
("integer (numeric)minute (grain)", -3.1986731175506815),
("a fewday (grain)", -3.1986731175506815),
("minute", -2.793208009442517),
("integer (numeric)hour (grain)", -3.1986731175506815)],
n = 14},
koData =
ClassData{prior = -infinity, unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [], n = 0}}),
("15th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Halloween Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("by the end of <time>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.58351893845611,
likelihoods =
HashMap.fromList
[("<ordinal> (as hour)", -2.456735772821304),
("year (latent)", -2.8622008809294686),
("<integer> (latent time-of-day)", -1.9459101490553135),
("day", -2.456735772821304), ("year", -2.8622008809294686),
("hh:mm", -2.8622008809294686),
("<day-of-month> (ordinal)", -2.456735772821304),
("noon", -2.8622008809294686),
("<time-of-day> rano", -2.8622008809294686),
("hour", -1.3581234841531944), ("minute", -2.8622008809294686)],
n = 12}}),
("in <duration>",
Classifier{okData =
ClassData{prior = -0.11778303565638351, unseen = -4.0943445622221,
likelihoods =
HashMap.fromList
[("week", -2.691243082785829),
("number.number hours", -3.3843902633457743),
("second", -2.9789251552376097),
("<unit-of-duration> as a duration", -1.9980959022258835),
("day", -2.9789251552376097),
("half an hour", -3.3843902633457743),
("<integer> <unit-of-duration>", -1.5125880864441827),
("<integer> and an half hours", -3.3843902633457743),
("hour", -2.2857779746776643), ("minute", -1.5125880864441827),
("about <duration>", -2.9789251552376097)],
n = 24},
koData =
ClassData{prior = -2.1972245773362196, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("week", -1.4469189829363254),
("<unit-of-duration> as a duration", -2.1400661634962708),
("<integer> <unit-of-duration>", -1.7346010553881064)],
n = 3}}),
("<datetime> - <datetime> (interval)",
Classifier{okData =
ClassData{prior = -1.041453874828161, unseen = -3.258096538021482,
likelihoods =
HashMap.fromList
[("minuteminute", -1.6094379124341003),
("hh:mmhh:mm", -1.6094379124341003),
("dayday", -2.120263536200091),
("<named-month> <day-of-month> (non ordinal)<named-month> <day-of-month> (non ordinal)",
-2.120263536200091)],
n = 6},
koData =
ClassData{prior = -0.4353180712578455, unseen = -3.58351893845611,
likelihoods =
HashMap.fromList
[("<named-month> <day-of-month> (non ordinal)July",
-2.8622008809294686),
("daymonth", -2.456735772821304),
("about <time-of-day>noon", -2.8622008809294686),
("minuteminute", -1.7635885922613588),
("<time-of-day> rano<time-of-day> rano", -2.8622008809294686),
("until <time-of-day>noon", -2.8622008809294686),
("hh:mmhh:mm", -2.8622008809294686),
("hourhour", -1.9459101490553135),
("hh:mmintersect", -1.9459101490553135),
("at <time-of-day>noon", -2.8622008809294686),
("<named-month> <day-of-month> (non ordinal)August",
-2.8622008809294686)],
n = 11}}),
("Tuesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [("", 0.0)], n = 20},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("New Year's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("second ordinal",
Classifier{okData =
ClassData{prior = -8.004270767353637e-2,
unseen = -2.639057329615259,
likelihoods = HashMap.fromList [("", 0.0)], n = 12},
koData =
ClassData{prior = -2.5649493574615367,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<time-of-day> popo\322udniu/wieczorem/w nocy",
Classifier{okData =
ClassData{prior = -1.5037877364540559e-2,
unseen = -4.962844630259907,
likelihoods =
HashMap.fromList
[("exactly <time-of-day>", -4.2626798770413155),
("at <time-of-day>", -2.4709204078132605),
("<ordinal> (as hour)", -1.5218398531161146),
("<integer> (latent time-of-day)", -2.01138807843482),
("about <time-of-day>", -4.2626798770413155),
("hh:mm", -3.857214768933151),
("until <time-of-day>", -4.2626798770413155),
("hour", -0.812692331209728), ("minute", -3.3463891451671604),
("after <time-of-day>", -3.857214768933151)],
n = 66},
koData =
ClassData{prior = -4.204692619390966, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -1.791759469228055),
("hour", -1.791759469228055)],
n = 1}}),
("fifteen",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> - <time-of-day> (interval)",
Classifier{okData =
ClassData{prior = -1.2237754316221157,
unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("minuteminute", -1.4816045409242156),
("hh:mmhh:mm", -1.4816045409242156),
("minutehour", -2.3978952727983707),
("hh:mm<integer> (latent time-of-day)", -2.3978952727983707)],
n = 5},
koData =
ClassData{prior = -0.3483066942682157,
unseen = -3.6109179126442243,
likelihoods =
HashMap.fromList
[("about <time-of-day>noon", -2.890371757896165),
("relative minutes to|till|before <integer> (hour-of-day)<integer> (latent time-of-day)",
-2.890371757896165),
("minuteminute", -2.890371757896165),
("<time-of-day> rano<time-of-day> rano", -2.890371757896165),
("until <time-of-day>noon", -2.890371757896165),
("hh:mmhh:mm", -2.890371757896165),
("hourhour", -1.6376087894007967),
("minutehour", -1.791759469228055),
("at <time-of-day>noon", -2.890371757896165),
("<time-of-day> rano<integer> (latent time-of-day)",
-2.890371757896165),
("until <time-of-day><integer> (latent time-of-day)",
-2.890371757896165),
("hh:mm<integer> (latent time-of-day)", -1.9740810260220096)],
n = 12}}),
("<hour-of-day> - <hour-of-day> (interval)",
Classifier{okData =
ClassData{prior = -1.5040773967762742,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("minuteminute", -1.2809338454620642),
("hh:mmhh:mm", -1.2809338454620642)],
n = 4},
koData =
ClassData{prior = -0.25131442828090605,
unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("about <time-of-day>noon", -2.9444389791664407),
("minuteminute", -2.9444389791664407),
("<time-of-day> rano<time-of-day> rano", -2.9444389791664407),
("until <time-of-day>noon", -2.9444389791664407),
("hh:mmhh:mm", -2.9444389791664407),
("hourhour", -0.9985288301111273),
("after <time-of-day>noon", -2.538973871058276),
("at <time-of-day>noon", -2.9444389791664407),
("<ordinal> (as hour)noon", -1.845826690498331),
("<integer> (latent time-of-day)noon", -2.538973871058276)],
n = 14}}),
("last n <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.9889840465642745,
likelihoods =
HashMap.fromList
[("week", -2.583997552432231),
("integer (numeric)day (grain)", -2.871679624884012),
("twoweek (grain)", -3.2771447329921766),
("twominute (grain)", -3.2771447329921766),
("second", -2.871679624884012),
("integer (numeric)second (grain)", -3.2771447329921766),
("twomonth (grain)", -3.2771447329921766),
("onehour (grain)", -3.2771447329921766),
("integer (numeric)year (grain)", -3.2771447329921766),
("day", -2.583997552432231), ("year", -2.871679624884012),
("integer (numeric)week (grain)", -2.871679624884012),
("twoday (grain)", -3.2771447329921766),
("hour", -2.871679624884012), ("month", -2.871679624884012),
("integer (numeric)minute (grain)", -3.2771447329921766),
("integer (numeric)month (grain)", -3.2771447329921766),
("minute", -2.871679624884012),
("twosecond (grain)", -3.2771447329921766),
("integer (numeric)hour (grain)", -3.2771447329921766),
("twoyear (grain)", -3.2771447329921766)],
n = 16},
koData =
ClassData{prior = -infinity, unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [], n = 0}}),
("<named-month> <day-of-month> (non ordinal)",
Classifier{okData =
ClassData{prior = -0.3364722366212129, unseen = -3.871201010907891,
likelihoods =
HashMap.fromList
[("Augustinteger (numeric)", -1.9042374526547454),
("Marchinteger (numeric)", -2.463853240590168),
("Aprilinteger (numeric)", -3.1570004211501135),
("month", -0.8056251639866356),
("Februaryinteger (numeric)", -1.9042374526547454),
("Septemberinteger (numeric)", -3.1570004211501135),
("Julyinteger (numeric)", -2.463853240590168)],
n = 20},
koData =
ClassData{prior = -1.252762968495368, unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("Marchinteger (numeric)", -2.4423470353692043),
("Aprilinteger (numeric)", -1.3437347467010947),
("month", -0.9382696385929302),
("Julyinteger (numeric)", -2.03688192726104)],
n = 8}}),
("<day-of-month> (non ordinal) <named-month>",
Classifier{okData =
ClassData{prior = -4.8790164169432056e-2,
unseen = -3.891820298110627,
likelihoods =
HashMap.fromList
[("integer (numeric)September", -3.1780538303479458),
("integer (numeric)May", -2.4849066497880004),
("integer (numeric)April", -3.1780538303479458),
("integer (numeric)August", -3.1780538303479458),
("integer (numeric)February", -1.5686159179138452),
("month", -0.8266785731844679),
("integer (numeric)November", -2.2617630984737906),
("integer (numeric)March", -3.1780538303479458)],
n = 20},
koData =
ClassData{prior = -3.044522437723423, unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("integer (numeric)May", -1.6094379124341003),
("month", -1.6094379124341003)],
n = 1}}),
("this|next <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("Wednesday", -2.3978952727983707),
("Monday", -1.9924301646902063), ("day", -0.7884573603642702),
("Tuesday", -1.1451323043030026)],
n = 9},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("three",
Classifier{okData =
ClassData{prior = -0.1670540846631662,
unseen = -2.5649493574615367,
likelihoods = HashMap.fromList [("", 0.0)], n = 11},
koData =
ClassData{prior = -1.8718021769015913,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("ordinal (digits)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4011973816621555,
likelihoods = HashMap.fromList [("", 0.0)], n = 28},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("day (grain)October", -1.9459101490553135),
("daymonth", -1.540445040947149),
("day (grain)intersect", -1.9459101490553135),
("weekmonth", -1.540445040947149),
("week (grain)intersect", -1.9459101490553135),
("week (grain)September", -1.9459101490553135)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month>(ordinal) <named-month> year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("14th ordinalApril", -1.845826690498331),
("third ordinalMarch", -2.2512917986064953),
("ordinal (digits)April", -1.55814461804655),
("month", -0.8649974374866046),
("ordinal (digits)March", -2.2512917986064953)],
n = 7},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("lato",
Classifier{okData =
ClassData{prior = -1.5040773967762742,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.25131442828090605,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("morning",
Classifier{okData =
ClassData{prior = -0.7731898882334817,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -0.6190392084062235,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("relative minutes to|till|before <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("integer (numeric)<integer> (latent time-of-day)",
-0.6931471805599453),
("hour", -0.6931471805599453)],
n = 2}}),
("Women's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week-end",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("10th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("after <time-of-day>",
Classifier{okData =
ClassData{prior = -1.2367626271489267,
unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList
[("<time> <part-of-day>", -2.268683541318364),
("<ordinal> (as hour)", -2.268683541318364),
("afternoon", -1.9810014688665833),
("hour", -1.0647107369924282),
("<time-of-day> popo\322udniu/wieczorem/w nocy",
-2.268683541318364)],
n = 9},
koData =
ClassData{prior = -0.3429447511268303, unseen = -4.02535169073515,
likelihoods =
HashMap.fromList
[("<time> <part-of-day>", -2.908720896564361),
("intersect", -3.3141860046725258),
("tomorrow", -2.3978952727983707), ("day", -2.0614230361771577),
("afternoon", -2.3978952727983707),
("<day-of-month> (ordinal)", -2.908720896564361),
("noon", -1.927891643552635), ("hour", -1.1741198411762548),
("<hour-of-day> - <hour-of-day> (interval)",
-2.908720896564361)],
n = 22}}),
("day (grain)",
Classifier{okData =
ClassData{prior = -0.20479441264601328,
unseen = -3.1780538303479458,
likelihoods = HashMap.fromList [("", 0.0)], n = 22},
koData =
ClassData{prior = -1.6863989535702288,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("9th ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("first ordinal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods = HashMap.fromList [("", 0.0)], n = 11},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<month> dd-dd (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("July", -0.6931471805599453), ("month", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("about <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("half an hour", -0.6931471805599453),
("minute", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("day-after-tomorrow (single-word)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> <integer> (as relative minutes)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("at <time-of-day>fifteen", -1.252762968495368),
("at <time-of-day>twenty", -1.252762968495368),
("hour", -0.8472978603872037)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("23rd ordinal no space",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Pentecost",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <time>",
Classifier{okData =
ClassData{prior = -0.5389965007326869, unseen = -3.784189633918261,
likelihoods =
HashMap.fromList
[("Thursday", -3.068052935133617), ("zima", -2.662587827025453),
("Wednesday", -3.068052935133617),
("Saturday", -3.068052935133617),
("Monday", -3.068052935133617), ("Friday", -3.068052935133617),
("day", -1.276293465905562), ("Sunday", -3.068052935133617),
("hour", -2.374905754573672), ("Tuesday", -3.068052935133617),
("lato", -2.662587827025453), ("week-end", -2.374905754573672)],
n = 14},
koData =
ClassData{prior = -0.8754687373538999, unseen = -3.58351893845611,
likelihoods =
HashMap.fromList
[("April", -2.456735772821304),
("February", -1.3581234841531944),
("month", -1.157452788691043)],
n = 10}}),
("<named-month> <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("February15th ordinal", -1.791759469228055),
("Marchfirst ordinal", -1.791759469228055),
("month", -0.8754687373538999),
("Marchordinal (digits)", -1.3862943611198906)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("within <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("week", -0.6931471805599453),
("<integer> <unit-of-duration>", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("August",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}})] | facebookincubator/duckling | Duckling/Ranking/Classifiers/PL_XX.hs | bsd-3-clause | 152,584 | 0 | 15 | 74,167 | 27,016 | 16,846 | 10,170 | 2,429 | 1 |
{-# language CPP #-}
-- No documentation found for Chapter "Fence"
module Vulkan.Core10.Fence ( createFence
, withFence
, destroyFence
, resetFences
, getFenceStatus
, waitForFences
, waitForFencesSafe
, FenceCreateInfo(..)
, Fence(..)
, FenceCreateFlagBits(..)
, FenceCreateFlags
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Data.Typeable (eqT)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (castPtr)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import qualified Data.Vector (imapM_)
import qualified Data.Vector (length)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.Type.Equality ((:~:)(Refl))
import Data.Typeable (Typeable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Word (Word32)
import Data.Word (Word64)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.CStruct.Extends (forgetExtensions)
import Vulkan.NamedType ((:::))
import Vulkan.Core10.AllocationCallbacks (AllocationCallbacks)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.FundamentalTypes (Bool32(..))
import Vulkan.CStruct.Extends (Chain)
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Dynamic (DeviceCmds(pVkCreateFence))
import Vulkan.Dynamic (DeviceCmds(pVkDestroyFence))
import Vulkan.Dynamic (DeviceCmds(pVkGetFenceStatus))
import Vulkan.Dynamic (DeviceCmds(pVkResetFences))
import Vulkan.Dynamic (DeviceCmds(pVkWaitForFences))
import Vulkan.Core10.Handles (Device_T)
import {-# SOURCE #-} Vulkan.Core11.Promoted_From_VK_KHR_external_fence (ExportFenceCreateInfo)
import {-# SOURCE #-} Vulkan.Extensions.VK_KHR_external_fence_win32 (ExportFenceWin32HandleInfoKHR)
import Vulkan.CStruct.Extends (Extends)
import Vulkan.CStruct.Extends (Extendss)
import Vulkan.CStruct.Extends (Extensible(..))
import Vulkan.Core10.Handles (Fence)
import Vulkan.Core10.Handles (Fence(..))
import Vulkan.Core10.Enums.FenceCreateFlagBits (FenceCreateFlags)
import Vulkan.CStruct.Extends (PeekChain)
import Vulkan.CStruct.Extends (PeekChain(..))
import Vulkan.CStruct.Extends (PokeChain)
import Vulkan.CStruct.Extends (PokeChain(..))
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.CStruct.Extends (SomeStruct)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_FENCE_CREATE_INFO))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
import Vulkan.Core10.Handles (Fence(..))
import Vulkan.Core10.Enums.FenceCreateFlagBits (FenceCreateFlagBits(..))
import Vulkan.Core10.Enums.FenceCreateFlagBits (FenceCreateFlags)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCreateFence
:: FunPtr (Ptr Device_T -> Ptr (SomeStruct FenceCreateInfo) -> Ptr AllocationCallbacks -> Ptr Fence -> IO Result) -> Ptr Device_T -> Ptr (SomeStruct FenceCreateInfo) -> Ptr AllocationCallbacks -> Ptr Fence -> IO Result
-- | vkCreateFence - Create a new fence object
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCreateFence-device-parameter# @device@ /must/ be a valid
-- 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkCreateFence-pCreateInfo-parameter# @pCreateInfo@ /must/ be a
-- valid pointer to a valid 'FenceCreateInfo' structure
--
-- - #VUID-vkCreateFence-pAllocator-parameter# If @pAllocator@ is not
-- @NULL@, @pAllocator@ /must/ be a valid pointer to a valid
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks' structure
--
-- - #VUID-vkCreateFence-pFence-parameter# @pFence@ /must/ be a valid
-- pointer to a 'Vulkan.Core10.Handles.Fence' handle
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks',
-- 'Vulkan.Core10.Handles.Device', 'Vulkan.Core10.Handles.Fence',
-- 'FenceCreateInfo'
createFence :: forall a io
. (Extendss FenceCreateInfo a, PokeChain a, MonadIO io)
=> -- | @device@ is the logical device that creates the fence.
Device
-> -- | @pCreateInfo@ is a pointer to a 'FenceCreateInfo' structure containing
-- information about how the fence is to be created.
(FenceCreateInfo a)
-> -- | @pAllocator@ controls host memory allocation as described in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-allocation Memory Allocation>
-- chapter.
("allocator" ::: Maybe AllocationCallbacks)
-> io (Fence)
createFence device createInfo allocator = liftIO . evalContT $ do
let vkCreateFencePtr = pVkCreateFence (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkCreateFencePtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCreateFence is null" Nothing Nothing
let vkCreateFence' = mkVkCreateFence vkCreateFencePtr
pCreateInfo <- ContT $ withCStruct (createInfo)
pAllocator <- case (allocator) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
pPFence <- ContT $ bracket (callocBytes @Fence 8) free
r <- lift $ traceAroundEvent "vkCreateFence" (vkCreateFence' (deviceHandle (device)) (forgetExtensions pCreateInfo) pAllocator (pPFence))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pFence <- lift $ peek @Fence pPFence
pure $ (pFence)
-- | A convenience wrapper to make a compatible pair of calls to
-- 'createFence' and 'destroyFence'
--
-- To ensure that 'destroyFence' is always called: pass
-- 'Control.Exception.bracket' (or the allocate function from your
-- favourite resource management library) as the last argument.
-- To just extract the pair pass '(,)' as the last argument.
--
withFence :: forall a io r . (Extendss FenceCreateInfo a, PokeChain a, MonadIO io) => Device -> FenceCreateInfo a -> Maybe AllocationCallbacks -> (io Fence -> (Fence -> io ()) -> r) -> r
withFence device pCreateInfo pAllocator b =
b (createFence device pCreateInfo pAllocator)
(\(o0) -> destroyFence device o0 pAllocator)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkDestroyFence
:: FunPtr (Ptr Device_T -> Fence -> Ptr AllocationCallbacks -> IO ()) -> Ptr Device_T -> Fence -> Ptr AllocationCallbacks -> IO ()
-- | vkDestroyFence - Destroy a fence object
--
-- == Valid Usage
--
-- - #VUID-vkDestroyFence-fence-01120# All
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#devsandqueues-submission queue submission>
-- commands that refer to @fence@ /must/ have completed execution
--
-- - #VUID-vkDestroyFence-fence-01121# If
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks' were
-- provided when @fence@ was created, a compatible set of callbacks
-- /must/ be provided here
--
-- - #VUID-vkDestroyFence-fence-01122# If no
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks' were
-- provided when @fence@ was created, @pAllocator@ /must/ be @NULL@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkDestroyFence-device-parameter# @device@ /must/ be a valid
-- 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkDestroyFence-fence-parameter# If @fence@ is not
-- 'Vulkan.Core10.APIConstants.NULL_HANDLE', @fence@ /must/ be a valid
-- 'Vulkan.Core10.Handles.Fence' handle
--
-- - #VUID-vkDestroyFence-pAllocator-parameter# If @pAllocator@ is not
-- @NULL@, @pAllocator@ /must/ be a valid pointer to a valid
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks' structure
--
-- - #VUID-vkDestroyFence-fence-parent# If @fence@ is a valid handle, it
-- /must/ have been created, allocated, or retrieved from @device@
--
-- == Host Synchronization
--
-- - Host access to @fence@ /must/ be externally synchronized
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks',
-- 'Vulkan.Core10.Handles.Device', 'Vulkan.Core10.Handles.Fence'
destroyFence :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that destroys the fence.
Device
-> -- | @fence@ is the handle of the fence to destroy.
Fence
-> -- | @pAllocator@ controls host memory allocation as described in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-allocation Memory Allocation>
-- chapter.
("allocator" ::: Maybe AllocationCallbacks)
-> io ()
destroyFence device fence allocator = liftIO . evalContT $ do
let vkDestroyFencePtr = pVkDestroyFence (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkDestroyFencePtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkDestroyFence is null" Nothing Nothing
let vkDestroyFence' = mkVkDestroyFence vkDestroyFencePtr
pAllocator <- case (allocator) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
lift $ traceAroundEvent "vkDestroyFence" (vkDestroyFence' (deviceHandle (device)) (fence) pAllocator)
pure $ ()
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkResetFences
:: FunPtr (Ptr Device_T -> Word32 -> Ptr Fence -> IO Result) -> Ptr Device_T -> Word32 -> Ptr Fence -> IO Result
-- | vkResetFences - Resets one or more fence objects
--
-- = Description
--
-- If any member of @pFences@ currently has its
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-fences-importing payload imported>
-- with temporary permanence, that fence’s prior permanent payload is first
-- restored. The remaining operations described therefore operate on the
-- restored payload.
--
-- When 'resetFences' is executed on the host, it defines a /fence unsignal
-- operation/ for each fence, which resets the fence to the unsignaled
-- state.
--
-- If any member of @pFences@ is already in the unsignaled state when
-- 'resetFences' is executed, then 'resetFences' has no effect on that
-- fence.
--
-- == Valid Usage
--
-- - #VUID-vkResetFences-pFences-01123# Each element of @pFences@ /must/
-- not be currently associated with any queue command that has not yet
-- completed execution on that queue
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkResetFences-device-parameter# @device@ /must/ be a valid
-- 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkResetFences-pFences-parameter# @pFences@ /must/ be a valid
-- pointer to an array of @fenceCount@ valid
-- 'Vulkan.Core10.Handles.Fence' handles
--
-- - #VUID-vkResetFences-fenceCount-arraylength# @fenceCount@ /must/ be
-- greater than @0@
--
-- - #VUID-vkResetFences-pFences-parent# Each element of @pFences@ /must/
-- have been created, allocated, or retrieved from @device@
--
-- == Host Synchronization
--
-- - Host access to each member of @pFences@ /must/ be externally
-- synchronized
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.Handles.Device', 'Vulkan.Core10.Handles.Fence'
resetFences :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that owns the fences.
Device
-> -- | @pFences@ is a pointer to an array of fence handles to reset.
("fences" ::: Vector Fence)
-> io ()
resetFences device fences = liftIO . evalContT $ do
let vkResetFencesPtr = pVkResetFences (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkResetFencesPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkResetFences is null" Nothing Nothing
let vkResetFences' = mkVkResetFences vkResetFencesPtr
pPFences <- ContT $ allocaBytes @Fence ((Data.Vector.length (fences)) * 8)
lift $ Data.Vector.imapM_ (\i e -> poke (pPFences `plusPtr` (8 * (i)) :: Ptr Fence) (e)) (fences)
r <- lift $ traceAroundEvent "vkResetFences" (vkResetFences' (deviceHandle (device)) ((fromIntegral (Data.Vector.length $ (fences)) :: Word32)) (pPFences))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetFenceStatus
:: FunPtr (Ptr Device_T -> Fence -> IO Result) -> Ptr Device_T -> Fence -> IO Result
-- | vkGetFenceStatus - Return the status of a fence
--
-- = Description
--
-- Upon success, 'getFenceStatus' returns the status of the fence object,
-- with the following return codes:
--
-- +------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+
-- | Status | Meaning |
-- +================================================+========================================================================================================================+
-- | 'Vulkan.Core10.Enums.Result.SUCCESS' | The fence specified by @fence@ is signaled. |
-- +------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+
-- | 'Vulkan.Core10.Enums.Result.NOT_READY' | The fence specified by @fence@ is unsignaled. |
-- +------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+
-- | 'Vulkan.Core10.Enums.Result.ERROR_DEVICE_LOST' | The device has been lost. See |
-- | | <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#devsandqueues-lost-device Lost Device>. |
-- +------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+
--
-- Fence Object Status Codes
--
-- If a
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#devsandqueues-submission queue submission>
-- command is pending execution, then the value returned by this command
-- /may/ immediately be out of date.
--
-- If the device has been lost (see
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#devsandqueues-lost-device Lost Device>),
-- 'getFenceStatus' /may/ return any of the above status codes. If the
-- device has been lost and 'getFenceStatus' is called repeatedly, it will
-- eventually return either 'Vulkan.Core10.Enums.Result.SUCCESS' or
-- 'Vulkan.Core10.Enums.Result.ERROR_DEVICE_LOST'.
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- - 'Vulkan.Core10.Enums.Result.NOT_READY'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_DEVICE_LOST'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.Handles.Device', 'Vulkan.Core10.Handles.Fence'
getFenceStatus :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that owns the fence.
--
-- #VUID-vkGetFenceStatus-device-parameter# @device@ /must/ be a valid
-- 'Vulkan.Core10.Handles.Device' handle
Device
-> -- | @fence@ is the handle of the fence to query.
--
-- #VUID-vkGetFenceStatus-fence-parameter# @fence@ /must/ be a valid
-- 'Vulkan.Core10.Handles.Fence' handle
--
-- #VUID-vkGetFenceStatus-fence-parent# @fence@ /must/ have been created,
-- allocated, or retrieved from @device@
Fence
-> io (Result)
getFenceStatus device fence = liftIO $ do
let vkGetFenceStatusPtr = pVkGetFenceStatus (case device of Device{deviceCmds} -> deviceCmds)
unless (vkGetFenceStatusPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetFenceStatus is null" Nothing Nothing
let vkGetFenceStatus' = mkVkGetFenceStatus vkGetFenceStatusPtr
r <- traceAroundEvent "vkGetFenceStatus" (vkGetFenceStatus' (deviceHandle (device)) (fence))
when (r < SUCCESS) (throwIO (VulkanException r))
pure $ (r)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkWaitForFencesUnsafe
:: FunPtr (Ptr Device_T -> Word32 -> Ptr Fence -> Bool32 -> Word64 -> IO Result) -> Ptr Device_T -> Word32 -> Ptr Fence -> Bool32 -> Word64 -> IO Result
foreign import ccall
"dynamic" mkVkWaitForFencesSafe
:: FunPtr (Ptr Device_T -> Word32 -> Ptr Fence -> Bool32 -> Word64 -> IO Result) -> Ptr Device_T -> Word32 -> Ptr Fence -> Bool32 -> Word64 -> IO Result
-- | waitForFences with selectable safeness
waitForFencesSafeOrUnsafe :: forall io
. (MonadIO io)
=> (FunPtr (Ptr Device_T -> Word32 -> Ptr Fence -> Bool32 -> Word64 -> IO Result) -> Ptr Device_T -> Word32 -> Ptr Fence -> Bool32 -> Word64 -> IO Result)
-> -- | @device@ is the logical device that owns the fences.
Device
-> -- | @pFences@ is a pointer to an array of @fenceCount@ fence handles.
("fences" ::: Vector Fence)
-> -- | @waitAll@ is the condition that /must/ be satisfied to successfully
-- unblock the wait. If @waitAll@ is 'Vulkan.Core10.FundamentalTypes.TRUE',
-- then the condition is that all fences in @pFences@ are signaled.
-- Otherwise, the condition is that at least one fence in @pFences@ is
-- signaled.
("waitAll" ::: Bool)
-> -- | @timeout@ is the timeout period in units of nanoseconds. @timeout@ is
-- adjusted to the closest value allowed by the implementation-dependent
-- timeout accuracy, which /may/ be substantially longer than one
-- nanosecond, and /may/ be longer than the requested period.
("timeout" ::: Word64)
-> io (Result)
waitForFencesSafeOrUnsafe mkVkWaitForFences device fences waitAll timeout = liftIO . evalContT $ do
let vkWaitForFencesPtr = pVkWaitForFences (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkWaitForFencesPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkWaitForFences is null" Nothing Nothing
let vkWaitForFences' = mkVkWaitForFences vkWaitForFencesPtr
pPFences <- ContT $ allocaBytes @Fence ((Data.Vector.length (fences)) * 8)
lift $ Data.Vector.imapM_ (\i e -> poke (pPFences `plusPtr` (8 * (i)) :: Ptr Fence) (e)) (fences)
r <- lift $ traceAroundEvent "vkWaitForFences" (vkWaitForFences' (deviceHandle (device)) ((fromIntegral (Data.Vector.length $ (fences)) :: Word32)) (pPFences) (boolToBool32 (waitAll)) (timeout))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pure $ (r)
-- | vkWaitForFences - Wait for one or more fences to become signaled
--
-- = Description
--
-- If the condition is satisfied when 'waitForFences' is called, then
-- 'waitForFences' returns immediately. If the condition is not satisfied
-- at the time 'waitForFences' is called, then 'waitForFences' will block
-- and wait until the condition is satisfied or the @timeout@ has expired,
-- whichever is sooner.
--
-- If @timeout@ is zero, then 'waitForFences' does not wait, but simply
-- returns the current state of the fences.
-- 'Vulkan.Core10.Enums.Result.TIMEOUT' will be returned in this case if
-- the condition is not satisfied, even though no actual wait was
-- performed.
--
-- If the condition is satisfied before the @timeout@ has expired,
-- 'waitForFences' returns 'Vulkan.Core10.Enums.Result.SUCCESS'. Otherwise,
-- 'waitForFences' returns 'Vulkan.Core10.Enums.Result.TIMEOUT' after the
-- @timeout@ has expired.
--
-- If device loss occurs (see
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#devsandqueues-lost-device Lost Device>)
-- before the timeout has expired, 'waitForFences' /must/ return in finite
-- time with either 'Vulkan.Core10.Enums.Result.SUCCESS' or
-- 'Vulkan.Core10.Enums.Result.ERROR_DEVICE_LOST'.
--
-- Note
--
-- While we guarantee that 'waitForFences' /must/ return in finite time, no
-- guarantees are made that it returns immediately upon device loss.
-- However, the client can reasonably expect that the delay will be on the
-- order of seconds and that calling 'waitForFences' will not result in a
-- permanently (or seemingly permanently) dead process.
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkWaitForFences-device-parameter# @device@ /must/ be a valid
-- 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkWaitForFences-pFences-parameter# @pFences@ /must/ be a valid
-- pointer to an array of @fenceCount@ valid
-- 'Vulkan.Core10.Handles.Fence' handles
--
-- - #VUID-vkWaitForFences-fenceCount-arraylength# @fenceCount@ /must/ be
-- greater than @0@
--
-- - #VUID-vkWaitForFences-pFences-parent# Each element of @pFences@
-- /must/ have been created, allocated, or retrieved from @device@
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- - 'Vulkan.Core10.Enums.Result.TIMEOUT'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_DEVICE_LOST'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32', 'Vulkan.Core10.Handles.Device',
-- 'Vulkan.Core10.Handles.Fence'
waitForFences :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that owns the fences.
Device
-> -- | @pFences@ is a pointer to an array of @fenceCount@ fence handles.
("fences" ::: Vector Fence)
-> -- | @waitAll@ is the condition that /must/ be satisfied to successfully
-- unblock the wait. If @waitAll@ is 'Vulkan.Core10.FundamentalTypes.TRUE',
-- then the condition is that all fences in @pFences@ are signaled.
-- Otherwise, the condition is that at least one fence in @pFences@ is
-- signaled.
("waitAll" ::: Bool)
-> -- | @timeout@ is the timeout period in units of nanoseconds. @timeout@ is
-- adjusted to the closest value allowed by the implementation-dependent
-- timeout accuracy, which /may/ be substantially longer than one
-- nanosecond, and /may/ be longer than the requested period.
("timeout" ::: Word64)
-> io (Result)
waitForFences = waitForFencesSafeOrUnsafe mkVkWaitForFencesUnsafe
-- | A variant of 'waitForFences' which makes a *safe* FFI call
waitForFencesSafe :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that owns the fences.
Device
-> -- | @pFences@ is a pointer to an array of @fenceCount@ fence handles.
("fences" ::: Vector Fence)
-> -- | @waitAll@ is the condition that /must/ be satisfied to successfully
-- unblock the wait. If @waitAll@ is 'Vulkan.Core10.FundamentalTypes.TRUE',
-- then the condition is that all fences in @pFences@ are signaled.
-- Otherwise, the condition is that at least one fence in @pFences@ is
-- signaled.
("waitAll" ::: Bool)
-> -- | @timeout@ is the timeout period in units of nanoseconds. @timeout@ is
-- adjusted to the closest value allowed by the implementation-dependent
-- timeout accuracy, which /may/ be substantially longer than one
-- nanosecond, and /may/ be longer than the requested period.
("timeout" ::: Word64)
-> io (Result)
waitForFencesSafe = waitForFencesSafeOrUnsafe mkVkWaitForFencesSafe
-- | VkFenceCreateInfo - Structure specifying parameters of a newly created
-- fence
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkFenceCreateInfo-sType-sType# @sType@ /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_FENCE_CREATE_INFO'
--
-- - #VUID-VkFenceCreateInfo-pNext-pNext# Each @pNext@ member of any
-- structure (including this one) in the @pNext@ chain /must/ be either
-- @NULL@ or a pointer to a valid instance of
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_fence.ExportFenceCreateInfo'
-- or
-- 'Vulkan.Extensions.VK_KHR_external_fence_win32.ExportFenceWin32HandleInfoKHR'
--
-- - #VUID-VkFenceCreateInfo-sType-unique# The @sType@ value of each
-- struct in the @pNext@ chain /must/ be unique
--
-- - #VUID-VkFenceCreateInfo-flags-parameter# @flags@ /must/ be a valid
-- combination of
-- 'Vulkan.Core10.Enums.FenceCreateFlagBits.FenceCreateFlagBits' values
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.Enums.FenceCreateFlagBits.FenceCreateFlags',
-- 'Vulkan.Core10.Enums.StructureType.StructureType', 'createFence'
data FenceCreateInfo (es :: [Type]) = FenceCreateInfo
{ -- | @pNext@ is @NULL@ or a pointer to a structure extending this structure.
next :: Chain es
, -- | @flags@ is a bitmask of
-- 'Vulkan.Core10.Enums.FenceCreateFlagBits.FenceCreateFlagBits' specifying
-- the initial state and behavior of the fence.
flags :: FenceCreateFlags
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (FenceCreateInfo (es :: [Type]))
#endif
deriving instance Show (Chain es) => Show (FenceCreateInfo es)
instance Extensible FenceCreateInfo where
extensibleTypeName = "FenceCreateInfo"
setNext FenceCreateInfo{..} next' = FenceCreateInfo{next = next', ..}
getNext FenceCreateInfo{..} = next
extends :: forall e b proxy. Typeable e => proxy e -> (Extends FenceCreateInfo e => b) -> Maybe b
extends _ f
| Just Refl <- eqT @e @ExportFenceWin32HandleInfoKHR = Just f
| Just Refl <- eqT @e @ExportFenceCreateInfo = Just f
| otherwise = Nothing
instance (Extendss FenceCreateInfo es, PokeChain es) => ToCStruct (FenceCreateInfo es) where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p FenceCreateInfo{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_FENCE_CREATE_INFO)
pNext'' <- fmap castPtr . ContT $ withChain (next)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) pNext''
lift $ poke ((p `plusPtr` 16 :: Ptr FenceCreateFlags)) (flags)
lift $ f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_FENCE_CREATE_INFO)
pNext' <- fmap castPtr . ContT $ withZeroChain @es
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) pNext'
lift $ f
instance (Extendss FenceCreateInfo es, PeekChain es) => FromCStruct (FenceCreateInfo es) where
peekCStruct p = do
pNext <- peek @(Ptr ()) ((p `plusPtr` 8 :: Ptr (Ptr ())))
next <- peekChain (castPtr pNext)
flags <- peek @FenceCreateFlags ((p `plusPtr` 16 :: Ptr FenceCreateFlags))
pure $ FenceCreateInfo
next flags
instance es ~ '[] => Zero (FenceCreateInfo es) where
zero = FenceCreateInfo
()
zero
| expipiplus1/vulkan | src/Vulkan/Core10/Fence.hs | bsd-3-clause | 31,350 | 0 | 18 | 6,449 | 4,457 | 2,560 | 1,897 | -1 | -1 |
module Cpp where
import FixPrime
cpp (x, y) = [(a, b) | a <- x, b <- y]
cpr (a, y) = [(a, b) | b <- y]
cpl (x, b) = [(a, b) | a <- x]
cpp' (x, y) = do
a <- x
b <- y
return (a, b)
cpr' (a, y) = do
b <- y
return (a, b)
cpl' (x, b) = do
a <- x
return (a, b)
| cutsea110/aop | src/Cpp.hs | bsd-3-clause | 274 | 0 | 8 | 93 | 214 | 117 | 97 | 15 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.TypedBLOB.K614edd84c8bd (TypedBLOB(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
import qualified Test.ZM.ADT.Type.K7028aa556ebc
import qualified Test.ZM.ADT.AbsRef.K4bbd38587b9e
import qualified Test.ZM.ADT.BLOB.Kf139d4751fda
import qualified Test.ZM.ADT.FlatEncoding.K982148c09ddb
data TypedBLOB = TypedBLOB (Test.ZM.ADT.Type.K7028aa556ebc.Type Test.ZM.ADT.AbsRef.K4bbd38587b9e.AbsRef)
(Test.ZM.ADT.BLOB.Kf139d4751fda.BLOB Test.ZM.ADT.FlatEncoding.K982148c09ddb.FlatEncoding)
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance Data.Model.Model TypedBLOB
| tittoassini/typed | test/Test/ZM/ADT/TypedBLOB/K614edd84c8bd.hs | bsd-3-clause | 796 | 0 | 9 | 88 | 178 | 119 | 59 | 15 | 0 |
{- In the game of darts a player throws three darts at a target board which is split into twenty equal sized sections numbered one to twenty.
The score of a dart is determined by the number of the region that the dart lands in. A dart landing outside the red/green outer ring darts zero. The black and cream regions inside this ring represent single darts. However, the red/green outer ring and middle ring score double and treble darts respectively.
At the centre of the board are two concentric circles called the bull region, or bulls-eye. The outer bull is worth 25 points and the inner bull is a double, worth 50 points.
There are many variations of rules but in the most popular game the players will begin with a score 301 or 501 and the first player to reduce their running total to zero is a winner. However, it is normal to play a "doubles out" system, which means that the player must land a double (including the double bulls-eye at the centre of the board) on their final dart to win; any other dart that would reduce their running total to one or lower means the score for that set of three darts is "bust".
When a player is able to finish on their current score it is called a "checkout" and the highest checkout is 170: T20 T20 D25 (two treble 20s and double bull).
There are exactly eleven distinct ways to checkout on a score of 6:
D3
D1 D2
S2 D2
D2 D1
S4 D1
S1 S1 D2
S1 T1 D1
S1 S3 D1
D1 D1 D1
D1 S2 D1
S2 S2 D1
Note that D1 D2 is considered different to D2 D1 as they finish on different doubles. However, the combination S1 T1 D1 is considered the same as T1 S1 D1.
In addition we shall not include misses in considering combinations; for example, D3 is the same as 0 D3 and 0 0 D3.
Incredibly there are 42336 distinct ways of checking out in total.
How many distinct ways can a player checkout with a score less than 100? -}
{-# LANGUAGE ScopedTypeVariables #-}
import qualified Zora.List as ZList
import qualified Zora.Math as ZMath
import qualified Data.Ord as Ord
import qualified Data.List as List
import Data.Maybe
import Control.Applicative
-- (multiplier, score)
type Score = (Int, Int)
checkouts :: [[Score]]
checkouts
=
[ [c]
| c <- last_darts ]
++
[ [b, c]
| b <- darts
, c <- last_darts ]
++
[ [a, b, c]
| a <- darts
, b <- darts
, a <= b
, c <- last_darts ]
where
last_darts :: [Score]
last_darts = filter ((==) 2 . fst) darts
darts :: [Score]
darts
= init -- (3, 25)
$ (\a b -> (a, b))
<$> [1,2,3]
<*> [1..20] ++ [25]
sought :: Int
sought
= length
. filter ((< 100) . eval_turn)
$ checkouts
where
eval_turn :: [Score] -> Int
eval_turn = sum . map (ZList.pair_op (*))
main :: IO ()
main = do
putStrLn . show $ sought
| bgwines/project-euler | src/solved/problem109.hs | bsd-3-clause | 2,741 | 12 | 13 | 603 | 366 | 214 | 152 | 41 | 1 |
module Language.Haskell.GhcMod.Lint where
import Control.Applicative ((<$>))
import Control.Exception (handle, SomeException(..))
import Language.Haskell.GhcMod.Logger (checkErrorPrefix)
import Language.Haskell.GhcMod.Types
import Language.Haskell.HLint (hlint)
-- | Checking syntax of a target file using hlint.
-- Warnings and errors are returned.
lintSyntax :: Options
-> FilePath -- ^ A target file.
-> IO String
lintSyntax opt file = handle handler $ pack <$> hlint (file : "--quiet" : hopts)
where
pack = convert opt . map (init . show) -- init drops the last \n.
hopts = hlintOpts opt
handler (SomeException e) = return $ checkErrorPrefix ++ show e ++ "\n"
| carlohamalainen/ghc-mod | Language/Haskell/GhcMod/Lint.hs | bsd-3-clause | 705 | 0 | 10 | 135 | 183 | 104 | 79 | 13 | 1 |
module DobadoBots.Interpreter.Data
( ActionToken ( ..)
, SensorToken ( ..)
, Cond ( ..)
, LogicExpr ( ..)
, CmpInteger ( ..)
, Collider ( ..)
)
where
import Data.HashMap.Strict (HashMap)
data ActionToken = MoveForward
| TurnLeft
| TurnRight
| FaceObjective
| ChangeObjective
deriving (Show, Eq)
data SensorToken = LaserDistance
| LaserScan
| ObjectiveDistance
deriving (Show, Eq)
data LogicExpr = CmpCollider SensorToken Collider | CmpLogicInt (CmpInteger SensorToken) deriving (Show, Eq)
data CmpInteger a = Sup a Integer
| Inf a Integer
| Eq a Integer
deriving (Show, Eq)
data Cond = Token ActionToken | Cond { sensor :: LogicExpr
, ifValid :: Cond
, ifInvalid :: Cond
} deriving (Show, Eq)
data Collider = Obstacle | Objective | Wall | Robot deriving (Show, Eq)
| NinjaTrappeur/DobadoBots | src/DobadoBots/Interpreter/Data.hs | bsd-3-clause | 1,113 | 0 | 8 | 472 | 256 | 156 | 100 | 28 | 0 |
import Test.QuickCheck
-- Our QC instances and properties:
import Instances
import Properties.Delete
import Properties.Failure
import Properties.Floating
import Properties.Focus
import Properties.GreedyView
import Properties.Insert
import Properties.Screen
import Properties.Shift
import Properties.Stack
import Properties.StackSet
import Properties.Swap
import Properties.View
import Properties.Workspace
import Properties.Layout.Full
import Properties.Layout.Tall
import System.Environment
import Text.Printf
import Control.Monad
import Control.Applicative
main :: IO ()
main = do
arg <- fmap (drop 1) getArgs
let n = if null arg then 100 else read $ head arg
args = stdArgs { maxSuccess = n, maxSize = 100 }
qc t = do
c <- quickCheckWithResult args t
case c of
Success {} -> return True
_ -> return False
perform (s, t) = printf "%-35s: " s >> qc t
n <- length . filter not <$> mapM perform tests
unless (n == 0) (error (show n ++ " test(s) failed"))
tests =
[("StackSet invariants", property prop_invariant)
,("empty: invariant", property prop_empty_I)
,("empty is empty", property prop_empty)
,("empty / current", property prop_empty_current)
,("empty / member", property prop_member_empty)
,("view : invariant", property prop_view_I)
,("view sets current", property prop_view_current)
,("view idempotent", property prop_view_idem)
,("view reversible", property prop_view_reversible)
,("view is local", property prop_view_local)
,("greedyView : invariant", property prop_greedyView_I)
,("greedyView sets current", property prop_greedyView_current)
,("greedyView is safe", property prop_greedyView_current_id)
,("greedyView idempotent", property prop_greedyView_idem)
,("greedyView reversible", property prop_greedyView_reversible)
,("greedyView is local", property prop_greedyView_local)
,("peek/member", property prop_member_peek)
,("index/length", property prop_index_length)
,("focus left : invariant", property prop_focusUp_I)
,("focus master : invariant", property prop_focusMaster_I)
,("focus right: invariant", property prop_focusDown_I)
,("focusWindow: invariant", property prop_focus_I)
,("focus left/master", property prop_focus_left_master)
,("focus right/master", property prop_focus_right_master)
,("focus master/master", property prop_focus_master_master)
,("focusWindow master", property prop_focusWindow_master)
,("focus left/right", property prop_focus_left)
,("focus right/left", property prop_focus_right)
,("focus all left", property prop_focus_all_l)
,("focus all right", property prop_focus_all_r)
,("focus down is local", property prop_focus_down_local)
,("focus up is local", property prop_focus_up_local)
,("focus master is local", property prop_focus_master_local)
,("focus master idemp", property prop_focusMaster_idem)
,("focusWindow is local", property prop_focusWindow_local)
,("focusWindow works" , property prop_focusWindow_works)
,("focusWindow identity", property prop_focusWindow_identity)
,("findTag", property prop_findIndex)
,("allWindows/member", property prop_allWindowsMember)
,("currentTag", property prop_currentTag)
,("insert: invariant", property prop_insertUp_I)
,("insert/new", property prop_insert_empty)
,("insert is idempotent", property prop_insert_idem)
,("insert is reversible", property prop_insert_delete)
,("insert is local", property prop_insert_local)
,("insert duplicates", property prop_insert_duplicate)
,("insert/peek", property prop_insert_peek)
,("insert/size", property prop_size_insert)
,("delete: invariant", property prop_delete_I)
,("delete/empty", property prop_empty)
,("delete/member", property prop_delete)
,("delete is reversible", property prop_delete_insert)
,("delete is local", property prop_delete_local)
,("delete/focus", property prop_delete_focus)
,("delete last/focus up", property prop_delete_focus_end)
,("delete ~last/focus down", property prop_delete_focus_not_end)
,("filter preserves order", property prop_filter_order)
,("swapLeft", property prop_swap_left)
,("swapRight", property prop_swap_right)
,("swapMaster: invariant", property prop_swap_master_I)
,("swapUp: invariant" , property prop_swap_left_I)
,("swapDown: invariant", property prop_swap_right_I)
,("swapMaster id on focus", property prop_swap_master_focus)
,("swapUp id on focus", property prop_swap_left_focus)
,("swapDown id on focus", property prop_swap_right_focus)
,("swapMaster is idempotent", property prop_swap_master_idempotent)
,("swap all left", property prop_swap_all_l)
,("swap all right", property prop_swap_all_r)
,("swapMaster is local", property prop_swap_master_local)
,("swapUp is local", property prop_swap_left_local)
,("swapDown is local", property prop_swap_right_local)
,("shiftMaster id on focus", property prop_shift_master_focus)
,("shiftMaster is local", property prop_shift_master_local)
,("shiftMaster is idempotent", property prop_shift_master_idempotent)
,("shiftMaster preserves ordering", property prop_shift_master_ordering)
,("shift: invariant" , property prop_shift_I)
,("shift is reversible" , property prop_shift_reversible)
,("shiftWin: invariant" , property prop_shift_win_I)
,("shiftWin is shift on focus", property prop_shift_win_focus)
,("shiftWin fix current" , property prop_shift_win_fix_current)
,("shiftWin identity", property prop_shift_win_indentity)
,("floating is reversible" , property prop_float_reversible)
,("floating sets geometry" , property prop_float_geometry)
,("floats can be deleted", property prop_float_delete)
,("screens includes current", property prop_screens)
,("differentiate works", property prop_differentiate)
,("lookupTagOnScreen", property prop_lookup_current)
,("lookupTagOnVisbleScreen", property prop_lookup_visible)
,("screens works", property prop_screens_works)
,("renaming works", property prop_rename1)
,("ensure works", property prop_ensure)
,("ensure hidden semantics", property prop_ensure_append)
,("mapWorkspace id", property prop_mapWorkspaceId)
,("mapWorkspace inverse", property prop_mapWorkspaceInverse)
,("mapLayout id", property prop_mapLayoutId)
,("mapLayout inverse", property prop_mapLayoutInverse)
,("abort fails", property prop_abort)
,("new fails with abort", property prop_new_abort)
,("point within", property prop_point_within)
-- tall layout
,("tile 1 window fullsize", property prop_tile_fullscreen)
,("tiles never overlap", property prop_tile_non_overlap)
,("split horizontal", property prop_split_horizontal)
,("split vertical", property prop_split_vertical)
,("pure layout tall", property prop_purelayout_tall)
,("send shrink tall", property prop_shrink_tall)
,("send expand tall", property prop_expand_tall)
,("send incmaster tall", property prop_incmaster_tall)
-- full layout
,("pure layout full", property prop_purelayout_full)
,("send message full", property prop_sendmsg_full)
,("describe full", property prop_desc_full)
,("describe mirror", property prop_desc_mirror)
-- resize hints
,("window resize hints: inc", property prop_resize_inc)
,("window resize hints: inc all", property prop_resize_inc_extra)
,("window resize hints: max", property prop_resize_max)
,("window resize hints: max all ", property prop_resize_max_extra)
,("window aspect hints: fits", property prop_aspect_fits)
,("window aspect hints: shrinks ", property prop_aspect_hint_shrink)
,("pointWithin", property prop_point_within)
,("pointWithin mirror", property prop_point_within_mirror)
] <>
prop_laws_Stack
| xmonad/xmonad | tests/Properties.hs | bsd-3-clause | 8,250 | 0 | 15 | 1,603 | 1,755 | 1,007 | 748 | 156 | 3 |
module Main where
----------------------------------------------------------------------------------------
-- Specification: --
-- 1. Input format is displayed UTF-8 encoded text. --
-- 2. All punctuation that is not part of a word should be diregarded --
-- 3. Frequency bar for each program should start on the same column --
-- 4. A line should not be longer than 80 characters (size your bars appropriately) --
-- 5. A linear scale should be used --
-- 6. A word with a bar length of 0 should not be printed --
----------------------------------------------------------------------------------------
import Data.Char
import Data.Map as M hiding (map, filter)
import Data.List as L
import Control.Exception as E (catch)
import Control.Monad
import System.IO
import System.IO.Error
import System.Directory
import Control.Applicative
import System.Environment
-- |Checks the first and last characters of a string for punctuation and removes them
clean :: [String] -> [String]
clean [] = []
clean ([]:xs) = clean xs
clean ([x]:xs) = if x `elem` alphabet then [x] : clean xs else clean xs
clean (x:xs)
| badhead && badtail = (tail . init) x : clean xs
| badhead = tail x : clean xs
| badtail = init x : clean xs
| otherwise = x : clean xs
where badhead = head x `notElem` alphabet
badtail = last x `notElem` alphabet
alphabet = ['a'..'z'] ++ ['A'..'Z'] ++ "'"
-- |Maps to lower across a list of strings (frequencies are case-insensitive)
makeLower :: [String] -> [String]
makeLower = map (map toLower)
-- |Passes in an empty map to be populated
toMap :: [String] -> Map String Int
toMap xs = trans xs M.empty
-- |Wrapper function for addCount
trans :: [String] -> Map String Int -> Map String Int
trans [] list = list
trans xs list = L.foldl (\ list x -> addCount ((clean . makeLower . words) x) list) list xs
-- |Adds occurrences of words to an immutable map
addCount :: [String] -> Map String Int -> Map String Int
addCount [] list = list
addCount (x:xs) list = case M.lookup x list of
Nothing -> addCount xs $ M.insert x 1 list
Just val -> addCount xs $ M.update inc x list where
inc num = Just num >>= \x -> Just (x+1)
-- |Builds string output from map, calculates proper spacing
printMap :: Map String Int -> String
printMap m = M.foldWithKey f id m "" where
longestkey key = getMaxKey m - length key
longestval = maximum (map snd $ M.toList m) + getMaxKey m
linearscale = (longestval `div` 80) + 1
f :: String -> Int -> (String -> String) -> String -> String
f key val r = r . ((key ++ concat (replicate (longestkey key) " ") ++ " " ++
replicate (val `div` linearscale) '#' ++ "\n") ++)
-- |Helper function to retrieve the maximum key (largest word) in the map
getMaxKey:: Map String Int -> Int
getMaxKey m = maximum $ map (length . fst) (M.toList m)
-- |Output should be in descending order by highest frequency
sorter :: String -> String -> Ordering
sorter a b = compare (len b) (len a) where
len = length . filter (=='#')
-- |Main method wrapper
main :: IO()
main = E.catch toTry handler
-- |Main method body
toTry :: IO()
toTry = do
(file:xs) <- getArgs
contents <- readFile file
mapM_ putStrLn $ (sortBy sorter . filter ('#' `elem`) . lines . printMap . toMap) [contents]
-- |Error handler
handler :: IOError -> IO ()
handler e = putStrLn "That file does not exist!"
| dmjio/wordfreq | Main.hs | bsd-3-clause | 3,868 | 0 | 18 | 1,174 | 1,067 | 560 | 507 | 58 | 2 |
{-# LANGUAGE TypeApplications #-}
module Streaming.BinarySpec where
import Control.Monad (replicateM_, void)
import Data.Binary (put)
import Data.Binary.Put (runPut)
import Data.Function ((&))
import qualified Data.ByteString.Streaming as Q
import Streaming.Binary
import qualified Streaming.Prelude as S
import Test.Hspec
spec :: Spec
spec = do
let input n = Q.fromLazy $ runPut $ replicateM_ n $ put (42 :: Int)
describe "decode" $ do
it "fails on empty inputs" $ do
(_, _, output) <- decode @Int (input 0)
output `shouldBe` Left "not enough bytes"
it "decodes single integers" $ do
(_, _, output) <- decode @Int (input 1)
output `shouldBe` Right 42
describe "decoded" $ do
it "succeeds on empty inputs" $ do
output <- void (decoded @Int (input 0)) & S.toList_
output `shouldBe` []
it "decodes single integers" $ do
output <- void (decoded @Int (input 1)) & S.toList_
output `shouldBe` [42]
it "decodes multiple integers" $ do
output <- void (decoded @Int (input 10)) & S.toList_
output `shouldBe` (replicate 10 42)
it "decodes multiple integers even when the input is incomplete" $ do
n <- fromIntegral <$> Q.length_ (input 10)
let input' = Q.take (n - 1) (input 10)
output <- void (decoded @Int input') & S.toList_
output `shouldBe` (replicate 9 42)
it "leaves the right amount of leftover on incomplete input" $ do
n <- Q.length_ (input 10)
let input' = Q.take (fromIntegral (n - 1)) (input 10)
(leftover, _, _) <- S.effects (decoded @Int input')
Q.length_ leftover `shouldReturn` (n `div` 10) - 1
describe "laws" $ do
it "decode . encode = id for booleans" $ do
(_, _, output) <- decode (encode True)
output `shouldBe` Right True
it "decoded . encoded = id for booleans" $ do
xs <- S.replicate 10 True & encoded & decoded & void & S.toList_
xs `shouldBe` (replicate 10 True)
| mboes/streaming-binary | test/Streaming/BinarySpec.hs | bsd-3-clause | 2,030 | 0 | 20 | 545 | 757 | 378 | 379 | 47 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PatternGuards #-}
module Main(main) where
import Control.DeepSeq
import Criterion.Main
import Data.Array.Repa as R hiding((++))
import Diagrams.Backend.Cairo.Raster
import Diagrams.Backend.Cairo.Raster.Repa
import Numeric.Noise
import Numeric.Noise.Perlin
main :: IO ()
main = defaultMain benchmarks
benchmarks :: [Benchmark]
benchmarks =
[ bench "crgb" $ nf3 crgb (cn,cn,cn)
, bench "crgbap" $ nf4 crgbap (cn,cn,cn,cn)
, bench "crgba" $ nf4 crgba (cn,cn,cn,cn)
, bench "PlainRaster" $ computePlainF cplus
, repaF "RepaRaster" rplus
, bench "PlainNoise" $ computePlainF $ noisePlain 1
, repaF "RepaNoise" $ noiseRepa 1
]
w, h, cn :: Int
w = 947
h = 547
cn= 99
computePlainF :: PlainF -> IO ()
computePlainF f = whnfIO $ cairoRaster f w h
repaF :: String -> RepaF -> Benchmark
repaF name fun = bench name $ whnfIO $ cairoRepa (\d -> fromFunction d fun) w h
type PlainF= Int -> Int -> CairoColor
type RepaF = DIM2 -> CairoColor
cplus :: PlainF
cplus x y = crgb x y (x*y)
rplus :: RepaF
rplus !(Z :. y :. x) = crgb x y (x*y)
nf3 :: NFData b => (t1 -> t2 -> t3 -> b) -> (t1, t2, t3) -> Pure
nf3 = nf . uncurry3
nf4 :: NFData b => (t1 -> t2 -> t3 -> t4 -> b) -> (t1, t2, t3, t4) -> Pure
nf4 = nf . uncurry4
uncurry3 :: (t1 -> t2 -> t3 -> t) -> (t1, t2, t3) -> t
uncurry3 f (a,b,c) = f a b c
uncurry4 :: (t1 -> t2 -> t3 -> t4 -> t) -> (t1, t2, t3, t4) -> t
uncurry4 f (a,b,c,d) = f a b c d
noisePlain :: Int -> PlainF
noisePlain !seed =
let noise = noiseValue (perlin seed 6 4 0.5)
fn c = fromIntegral (c+1) / fromIntegral (div (w + h) 2)
f !x !y = let !c = round ((1 + noise (fn x, fn y, 0)) * 127) in crgb 255 c (c :: Int)
in f
noiseRepa :: Int -> RepaF
noiseRepa !seed =
let noise = noiseValue (perlin seed 6 4 0.5)
fn c = fromIntegral (c+1) / fromIntegral (div (w + h) 2)
f !(Z :. y :. x) = let !c = round ((1 + noise (fn x, fn y, 0)) * 127) in crgb 255 c (c :: Int)
in f
| taruti/diagrams-cairo-raster | example/Benchmarks.hs | bsd-3-clause | 2,332 | 0 | 20 | 528 | 1,008 | 541 | 467 | 64 | 1 |
{-# LANGUAGE
BangPatterns
#-}
module Tactics.Util
( allLockablePlaces
, randomWalkTillLocked
, randomWalkTillLockedWithPPs
) where
import Control.Monad
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import qualified System.Random as Rand
import Command
import qualified Game
import Unit (Unit)
import qualified Unit
import Util (readProblem)
-- 状態を受け取って、ロック可能な位置を列挙して返す
-- Mapの値は、ロックまでのコマンド列とロック後の状態の1つ
-- 同じ位置の複数の状態がありえるが、それは枝刈りしてしまう
allLockablePlaces :: Game.GameState -> Map Unit ([Command], Game.GameState)
allLockablePlaces g | Game.gsStatus g /= Game.Running = Map.empty
allLockablePlaces g = loop (Map.singleton (Game.gsCurUnit g) ([], g)) Set.empty Map.empty
where
loop :: Map Unit ([Command], Game.GameState) -> Set Unit -> Map Unit ([Command], Game.GameState) -> Map Unit ([Command], Game.GameState)
loop !q !visited !locked
| Map.null q = Map.map (\(cmds, g) -> (reverse cmds, g)) locked
| otherwise = loop q' visited' locked'
where
visited' = visited `Set.union` Map.keysSet q
locked' = locked `Map.union` Map.fromList [x | x@(u2, (cmds,g2)) <- xs, Game.gsLocked g2]
q' = Map.fromList [x | x@(u2, (cmds,g2)) <- xs, not (Game.gsLocked g2), u2 `Set.notMember` visited]
xs = [ (u2, (c : cmds, g2))
| (u, (cmds,g)) <- Map.toList q
, Game.gsStatus g == Game.Running
, c <- allCommands
, let g2 = Game.gameStep c g
, Game.gsStatus g2 /= Game.Error
, let u2 = applyCommand c u -- Game.gsCurUnit g2 は次のピースの可能性があるので別途計算
, u2 `Set.notMember` visited
]
randomWalkTillLocked :: Rand.RandomGen g => g -> Game.GameState -> ([Command], Game.GameState, g)
randomWalkTillLocked = randomWalkTillLockedWithPPs []
randomWalkTillLockedWithPPs :: Rand.RandomGen g => [[Command]] -> g -> Game.GameState -> ([Command], Game.GameState, g)
randomWalkTillLockedWithPPs pp g gs = loop g [] gs
where
loop g acts gs
| Game.gsLocked gs2 = (concat (reverse acts2), gs2, g')
| otherwise = loop g' acts2 gs2
where
(acts2, gs2) =
if null gss2 then error "randomWalkTillLockWithPP: should not happen"
else gss2 !! i
--gss2 = [(act : acts, gs2) | act <- allActions, let gs2 = Game.gameStepN act gs, Game.gsStatus gs2 /= Game.Error]
--gss2 = [(act2 : acts, gs2) | act <- allActions, let (act2,gs2) = stepN act gs, Game.gsStatus gs2 /= Game.Error]
gss2 = [(act2 : acts, gs2) | act <- allActions, (act2,gs2) <- stepN2 act gs, Game.gsStatus gs2 /= Game.Error]
(i, g') = Rand.randomR (0, length gss2 - 1) g
allActions = [[c] | c <- allCommands] ++ pp
stepN :: [Command] -> Game.GameState -> ([Command], Game.GameState)
stepN (c:cs) = loop cs [c] . Game.gameStep c
where
loop [] hist g = (reverse hist, g)
loop _ hist g | Game.gsLocked g = (reverse hist, g)
loop (c:cs) hist g = loop cs (c : hist) (Game.gameStep c g)
stepN2 :: [Command] -> Game.GameState -> [([Command], Game.GameState)]
stepN2 (c:cs) = loop cs [c] . Game.gameStep c
where
loop [] hist g = [(reverse hist, g)]
loop _ hist g | Game.gsLocked g = [] -- (reverse hist, g)
loop (c:cs) hist g = loop cs (c : hist) (Game.gameStep c g)
applyCommand :: Command -> Unit -> Unit
applyCommand (Move dir) = Unit.move dir
applyCommand (Turn dir) = Unit.turn dir
test_allLockablePlaces = do
Just input <- readProblem "problems/problem_1.json"
let gs0 = head $ Game.initGameStates input []
m = allLockablePlaces gs0
Game.gameDisplay gs0
forM_ (Map.toList m) $ \(u, (cmds, g)) -> do
putStrLn "----"
Game.gameDisplay g
test_randomWalkTillLocked = do
Just input <- readProblem "problems/problem_1.json"
let gs0 = head $ Game.initGameStates input []
Game.gameDisplay gs0
g <- Rand.newStdGen
putStrLn "----"
let (cmds, gs, g') = randomWalkTillLocked g gs0
-- Game.gameDisplay gs
Game.gameDisplay $ Game.gameStepN (init cmds) gs0 -- ロックして次のピースが出てくる前の状態を印字
print cmds
| msakai/icfpc2015 | src/Tactics/Util.hs | bsd-3-clause | 4,435 | 0 | 16 | 1,023 | 1,526 | 800 | 726 | 78 | 3 |
-----------------------------------------------------------------------------
{- |
This module defines the monad of sampling functions. See Park, Pfenning and Thrun:
A probabilistic language based upon sampling functions, Principles of programming languages 2005
Sampling functions allow the composition of both discrete and continuous
probability distributions.
The implementation and interface are similar to those in the random-fu, monte-carlo
and monad-mersenne-random packages.
Example -- a biased coin:
@
data Throw = Head | Tail
throw bias = do
b <- bernoulli bias
return $ if b then Head else Tail
tenThrowsCrooked = replicateM 10 $ throw 0.3
countHeads = do
throws <- tenThrowsCrooked
return $ length [ () | Head <- throws]
main = do
print =<< sampleIO tenThrowsCrooked
print =<< eval ((\<4) \`fmap\` countHeads)
@
-}
{-# LANGUAGE BangPatterns, ScopedTypeVariables #-}
module Math.Probably.Sampler where
import Control.Monad
import Control.Applicative
import qualified Math.Probably.PDF as PDF
import Numeric.LinearAlgebra hiding (find)
import System.Random.Mersenne.Pure64
import System.Environment
import Data.List
import Data.Maybe
import Data.Ord
import Control.Spoon
--import Debug.Trace
type Seed = PureMT
data Sampler a = Sam {unSam :: Seed -> (a, Seed) }
| Samples [a]
instance Functor Sampler where
fmap f (Sam sf) = Sam $ \rs -> let (x,rs') = sf rs in
(f x, rs')
fmap f (Samples xs) = Samples $ map f xs
instance Applicative Sampler where
pure x = Sam (\rs-> (x, rs))
(Sam sff) <*> (Sam sfx) = Sam $ \rs-> let (f ,rs') = sff rs
(x, rs'') = sfx rs' in
(f x, rs'')
instance Monad Sampler where
return = pure
(Sam sf) >>= f = Sam $ \rs-> let (x, rs'::Seed) = sf rs
nextProb = f x
in case nextProb of
Sam g -> g rs'
Samples xs -> primOneOf xs rs'
(Samples xs) >>= f = Sam $ \rs-> let (x, rs'::Seed) = primOneOf xs rs
nextProb = f x
in case nextProb of
Sam g -> g rs'
Samples ys -> primOneOf ys rs'
-- | given a seed, return an infinite list of draws from sampling function
runSampler :: Seed -> Sampler a -> [a]
runSampler pmt s@(Sam sf)
= let (x, pmt') = sf pmt
in x : runSampler pmt' s
runSampler _ (Samples xs) = xs
-- | Get a seed
getSeedIO :: IO Seed
getSeedIO = do
args <- getArgs
case mapMaybe (stripPrefix "--seed=") args of
[] -> newPureMT
sdStr:_ -> return $ pureMT $ read sdStr
-- | Return an infinite list of draws from sampling function in the IO monad
runSamplerIO :: Sampler a -> IO [a]
runSamplerIO s =
fmap (`runSampler` s) $ getSeedIO
-- | Return a singe draw from sampling function
sampleIO :: Sampler a -> IO a
sampleIO s = head `fmap` runSamplerIO s
-- | Return a list of n draws from sampling function
sampleNIO :: Int -> Sampler a -> IO [a]
sampleNIO n s = take n `fmap` runSamplerIO s
-- | Estimate the probability that a hypothesis is true (in the IO monad)
eval :: Sampler Bool -> Sampler Double
eval s = do
bs <- replicateM 1000 s
return $ realToFrac (length (filter id bs)) / 1000
{-mu :: Vector Double
sigma :: Matrix Double
mystery = -1
mu = fromList [0,0,0]
sigma = (3><3) [ 1, 1, 0,
1, 1, mystery,
0, mystery, 1]
samIt = sampleNIO 2 $ multiNormal mu sigma
-}
-- | The joint distribution of two independent distributions
joint :: Sampler a -> Sampler b -> Sampler (a,b)
joint sf1 sf2 = liftM2 (,) sf1 sf2
-- | The joint distribution of two distributions where one depends on the other
jointConditional :: Sampler a -> (a-> Sampler b) -> Sampler (a,b)
jointConditional sf1 condsf
= do x <- sf1
y <- condsf x
return (x,y)
--replicateM :: Monad m => Int -> m a -> m [a]
--replicateM n ma = forM [1..n] $ const ma
-- * Uniform distributions
-- | The unit interval U(0,1)
unitSample :: Sampler Double
unitSample = Sam randomDouble
-- | for x and y, the uniform distribution between x and y
uniform :: (Fractional a) => a -> a -> Sampler a
uniform a b = (\x->(realToFrac x)*(b-a)+a) `fmap` unitSample
-- * Normally distributed sampling function
--http://en.wikipedia.org/wiki/Box-Muller_transform
-- | The univariate gaussian (normal) distribution defined by mean and standard deviation
gauss :: (Floating b) => b -> b -> Sampler b
gauss m sd =
do (u1,u2) <- (mapPair realToFrac) `fmap` joint unitSample unitSample
return $ sqrt(-2*log(u1))*cos(2*pi*u2)*sd+m
where mapPair f (x,y) = (f x, f y)
-- | Gaussians specialised for doubles
gaussD :: Double -> Double -> Sampler Double
gaussD m sd =
do (u1,u2) <- joint unitSample unitSample
return $ sqrt(-2*log(u1))*cos(2*pi*u2)*sd+m
gaussMany :: Floating b => [(b,b)] -> Sampler [b]
gaussMany means_sds = do gus <- gaussManyUnit (length means_sds)
return $ map f $ zip gus means_sds
where f (gu, (mean, sd)) = gu*sd+mean
gaussManyD :: [(Double,Double)] -> Sampler [Double]
gaussManyD means_sds = do gus <- gaussManyUnitD (length means_sds)
return $ zipWith f gus means_sds
where f gu (mean, sd) = gu*sd+mean
gaussManyUnit :: Floating b => Int -> Sampler [b]
gaussManyUnit 0 = return []
gaussManyUnit n | odd n = liftM2 (:) (gauss 0 1) (gaussManyUnit (n-1))
| otherwise = do us <- forM [1..n] $ const $ unitSample
return $ gaussTwoAtATime $ map realToFrac us
where
gaussTwoAtATime :: Floating a => [a] -> [a]
gaussTwoAtATime (u1:u2:rest) = sqrt(-2*log(u1))*cos(2*pi*u2) : sqrt(-2*log(u1))*sin(2*pi*u2) : gaussTwoAtATime rest
gaussTwoAtATime _ = []
gaussManyUnitD :: Int -> Sampler [Double]
gaussManyUnitD 0 = return []
gaussManyUnitD n | odd n = liftM2 (:) (gauss 0 1) (gaussManyUnit (n-1))
| otherwise = do us <- forM [1..n] $ const $ unitSample
return $ gaussTwoAtATimeD us
where
gaussTwoAtATimeD :: [Double] -> [Double]
gaussTwoAtATimeD (u1:u2:rest) = sqrt(-2*log(u1))*cos(2*pi*u2) : sqrt(-2*log(u1))*sin(2*pi*u2) : gaussTwoAtATimeD rest
gaussTwoAtATimeD _ = []
-- | Multivariate normal distribution
multiNormal :: Vector Double -> Matrix Double -> Sampler (Vector Double)
multiNormal mu sigma =
let c = cholSH sigma
a = trans c
k = dim mu
in do z <- fromList `fmap` gaussManyUnitD k
-- return $ mu + (head $ toColumns $ a*asRow z)
let c = asColumn z
let r = asRow z
return $ (mu + (head $ toColumns $ a `multiply` asColumn z))
multiNormalByChol :: Vector Double -> Matrix Double -> Sampler (Vector Double)
multiNormalByChol mu cholSigma =
let a = trans $ cholSigma
k = dim mu
in do z <- fromList `fmap` gaussManyUnitD k
-- return $ mu + (head $ toColumns $ a*asRow z)
let c = asColumn z
let r = asRow z
return $ (mu + (head $ toColumns $ a `multiply` asColumn z))
multiNormalIndep :: Vector Double -> Vector Double -> Sampler (Vector Double)
multiNormalIndep vars mus = do
let k = dim mus
gs <- gaussManyUnitD k
return $ fromList $ zipWith3 (\var mu g -> g*sqrt(var) + mu) (toList vars) (toList mus) gs
--http://en.wikipedia.org/wiki/Log-normal_distribution#Generating_log-normally-distributed_random_variates
-- | log-normal distribution <http://en.wikipedia.org/wiki/Log-normal_distribution>
logNormal :: (Floating b) => b -> b -> Sampler b
logNormal m sd =
do n <- gauss 0 1
return $ exp $ m + sd * n
-- * Other distribution
-- | Bernoulli distribution. Returns a Boolean that is 'True' with probability 'p'
bernoulli :: Double -> Sampler Bool
bernoulli p = (<p) `fmap` unitSample
discrete :: [(Double,a)] -> Sampler a
discrete weightedSamples =
let sumWeights = sum $ map fst weightedSamples
cummWeightedSamples = scanl (\(csum,_) (w,x) -> (csum+w,x)) (0,undefined) $ sortBy (comparing fst) weightedSamples
in do u <- unitSample
return . snd . fromJust $ find ((>=u*sumWeights) . fst) cummWeightedSamples
primOneOf :: [a] -> Seed -> (a, Seed)
primOneOf xs seed
= let (u, nextSeed) = randomDouble seed
idx = floor $ (realToFrac u)*(realToFrac $ length xs )
in (xs !! idx, nextSeed)
oneOf :: [a] -> Sampler a
oneOf xs = do idx <- floor `fmap` uniform (0::Double) (realToFrac $ length xs )
return $ xs !! idx
nOf :: Int -> [a] -> Sampler [a]
nOf n xs = sequence $ replicate n $ oneOf xs
{-main = do
rnds <- take 1000 `fmap` runSamplerSysRan (oneOf [1,2,3])
let diff = sort $ nub rnds
print $ map (\x->(x, length $ filter (==x) rnds )) $ diff -}
-- | Bayesian inference from likelihood and prior using rejection sampling.
bayesRejection :: (PDF.PDF a) -> Double -> Sampler a -> Sampler a
bayesRejection p c q = bayes
where bayes = do x <- q
u <- unitSample
if u < p x / c
then return x
else bayes
{-
expectation :: Fractional a => Int -> Sampler a -> IO a
expectation n sf =
(mean . take n) `fmap` runSamplerIO sf
expectSD :: Floating a => Int -> Sampler a -> IO (a,a)
expectSD n sf =
(meanSD . take n) `fmap` runSamplerIO sf
-}
--mapPair :: (a->b) -> (a,a) -> (b,b)
{-
--http://cgi.cse.unsw.edu.au/~dons/blog/2008/05/16#fast
mean :: Fractional a => [a] -> a
mean = go 0 0
where
-- go :: -> Int -> [Double] -> Double
go s n [] = s / fromIntegral n
go !s !n (x:xs) = go (s+x) (n+1) xs
meanSD :: Floating a => [a] -> (a,a)
meanSD = go 0 0 0
where go sq s n [] = let len = fromIntegral n in
(s/len, (recip len)*sqrt (len*sq-s*s))
go !sq !s !n (x:xs) = go (sq+x*x) (s+x) (n+1) xs
test_mean = mean [0..1e8]
test_meanVar = meanSD [0..1e8]
main = do u <- expectSD 1000000 $ gauss 0 1
print u
-}
--poisson :: :: Double -> [Double] -> IO Double
-- | Exponential distribution
expDist rate = (\u-> negate $ (log(1-u))/rate) `fmap` unitSample
-- | Homogeneous poisson process defined by rate and duration
poissonMany :: Double -> Double -> Sampler [Double]
poissonMany rate tmax = aux 0
where aux last = do
next <- (+last) `fmap` expDist rate
if next > tmax
then return []
else liftM2 (:) (return next) $ aux next
-- | binomial distribution
binomial :: Int -> Double -> Sampler Int
binomial n p = do
bools <- forM [1..n] $ const $ fmap (<p) unitSample
return $ length $ [t | t@True <- bools]
-- from random-fu
-- | Gamma distribution
gamma :: Double -> Double -> Sampler Double
gamma a b
| a < 1
= do
u <- unitSample
x <- gamma (1 + a) b
return (x * u ** recip a)
| otherwise
= go
where
d = a - (1 / 3)
c = recip (3 * sqrt d) -- (1 / 3) / sqrt d
go = do
x <- gaussD 0 1
let cx = c * x
v = (1 + cx) ^ 3
x_2 = x * x
x_4 = x_2 * x_2
if cx <= (-1)
then go
else do
u <- unitSample
if u < 1 - 0.0331 * x_4
|| log u < 0.5 * x_2 + d * (1 - v + log v)
then return (b * d * v)
else go
-- | inverse gamma distribution
invGamma :: Double -> Double -> Sampler Double
invGamma a b = recip `fmap` gamma a b
--http://en.wikipedia.org/wiki/Multivariate_normal_distribution#Drawing_values_from_the_distribution
--multiNormal :: Vector Double -> Matrix Double -> Sampler (Vector Double)
--http://www.xycoon.com/beta_randomnumbers.htm
-- | beta distribution
beta :: Int -> Int -> Sampler Double
beta a b =
let gam n = do us <- forM [1..n] $ const unitSample
return $ log $ product us
in do gama1 <- gamma (realToFrac a) 1
-- gama2 <- gamma (realToFrac a) 1
gamb <- gamma (realToFrac b) 1
return $ gama1/(gama1+gamb)
tbeta = sampleNIO 100 $ beta 1 1 | glutamate/probably | Math/Probably/Sampler.hs | bsd-3-clause | 12,708 | 0 | 20 | 3,927 | 3,673 | 1,871 | 1,802 | 200 | 3 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.ES.Rules
( rules ) where
import Control.Monad (liftM2)
import qualified Data.Text as Text
import Prelude
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Ordinal.Types (OrdinalData (..))
import qualified Duckling.Ordinal.Types as TOrdinal
import Duckling.Regex.Types
import Duckling.Time.Helpers
import Duckling.Time.Types (TimeData (..))
import qualified Duckling.Time.Types as TTime
import qualified Duckling.TimeGrain.Types as TG
import Duckling.Types
ruleNamedday :: Rule
ruleNamedday = Rule
{ name = "named-day"
, pattern =
[ regex "lunes|lun?\\.?"
]
, prod = \_ -> tt $ dayOfWeek 1
}
ruleTheDayAfterTomorrow :: Rule
ruleTheDayAfterTomorrow = Rule
{ name = "the day after tomorrow"
, pattern =
[ regex "pasado\\s?ma(n|\x00f1)ana"
]
, prod = \_ -> tt $ cycleNth TG.Day 2
}
ruleHaceDuration :: Rule
ruleHaceDuration = Rule
{ name = "hace <duration>"
, pattern =
[ regex "hace"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ durationAgo dd
_ -> Nothing
}
ruleNamedmonth12 :: Rule
ruleNamedmonth12 = Rule
{ name = "named-month"
, pattern =
[ regex "diciembre|dic\\.?"
]
, prod = \_ -> tt $ month 12
}
ruleCeTime :: Rule
ruleCeTime = Rule
{ name = "ce <time>"
, pattern =
[ regex "este"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 False td
_ -> Nothing
}
ruleNamedday2 :: Rule
ruleNamedday2 = Rule
{ name = "named-day"
, pattern =
[ regex "martes|mar?\\.?"
]
, prod = \_ -> tt $ dayOfWeek 2
}
ruleThisDayofweek :: Rule
ruleThisDayofweek = Rule
{ name = "this <day-of-week>"
, pattern =
[ regex "este"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 True td
_ -> Nothing
}
ruleNamedday6 :: Rule
ruleNamedday6 = Rule
{ name = "named-day"
, pattern =
[ regex "s(\x00e1|a)bado|s(\x00e1|a)b\\.?"
]
, prod = \_ -> tt $ dayOfWeek 6
}
ruleDatetimeDatetimeInterval :: Rule
ruleDatetimeDatetimeInterval = Rule
{ name = "<datetime> - <datetime> (interval)"
, pattern =
[ Predicate isNotLatent
, regex "\\-|al?"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Open td1 td2
_ -> Nothing
}
ruleNamedmonth7 :: Rule
ruleNamedmonth7 = Rule
{ name = "named-month"
, pattern =
[ regex "julio|jul\\.?"
]
, prod = \_ -> tt $ month 7
}
ruleEvening :: Rule
ruleEvening = Rule
{ name = "evening"
, pattern =
[ regex "noche"
]
, prod = \_ ->
let from = hour False 18
to = hour False 0
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleDayOfMonthSt :: Rule
ruleDayOfMonthSt = Rule
{ name = "day of month (1st)"
, pattern =
[ regex "primero|uno|prem\\.?|1o"
]
, prod = \_ -> tt $ dayOfMonth 1
}
ruleEnDuration :: Rule
ruleEnDuration = Rule
{ name = "en <duration>"
, pattern =
[ regex "en"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleNow :: Rule
ruleNow = Rule
{ name = "now"
, pattern =
[ regex "(hoy)|(en este momento)"
]
, prod = \_ -> tt $ cycleNth TG.Day 0
}
ruleUltimoDayofweekDeTime :: Rule
ruleUltimoDayofweekDeTime = Rule
{ name = "ultimo <day-of-week> de <time>"
, pattern =
[ regex "(\x00fa|u)ltimo"
, Predicate isADayOfWeek
, regex "de|en"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
tt $ predLastOf td1 td2
_ -> Nothing
}
ruleEntreDatetimeEtDatetimeInterval :: Rule
ruleEntreDatetimeEtDatetimeInterval = Rule
{ name = "entre <datetime> et <datetime> (interval)"
, pattern =
[ regex "entre"
, dimension Time
, regex "y"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Open td1 td2
_ -> Nothing
}
ruleHhhmmTimeofday :: Rule
ruleHhhmmTimeofday = Rule
{ name = "hh(:|.|h)mm (time-of-day)"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))[:h\\.]([0-5]\\d)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
h <- parseInt m1
m <- parseInt m2
tt $ hourMinute True h m
_ -> Nothing
}
ruleNamedday4 :: Rule
ruleNamedday4 = Rule
{ name = "named-day"
, pattern =
[ regex "jueves|jue|jue\\."
]
, prod = \_ -> tt $ dayOfWeek 4
}
ruleElDayofmonthDeNamedmonth :: Rule
ruleElDayofmonthDeNamedmonth = Rule
{ name = "el <day-of-month> de <named-month>"
, pattern =
[ regex "el"
, Predicate isDOMInteger
, regex "de"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(_:token:_:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleNPasadosCycle :: Rule
ruleNPasadosCycle = Rule
{ name = "n pasados <cycle>"
, pattern =
[ Predicate $ isIntegerBetween 2 9999
, regex "pasad(a|o)s?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(token:_:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain (- v)
_ -> Nothing
}
ruleElProximoCycle :: Rule
ruleElProximoCycle = Rule
{ name = "el proximo <cycle> "
, pattern =
[ regex "(el|los|la|las) ?"
, regex "pr(\x00f3|o)xim(o|a)s?|siguientes?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 1
_ -> Nothing
}
rulePasadosNCycle :: Rule
rulePasadosNCycle = Rule
{ name = "pasados n <cycle>"
, pattern =
[ regex "pasad(a|o)s?"
, Predicate $ isIntegerBetween 2 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain (- v)
_ -> Nothing
}
ruleElDayofmonthNonOrdinal :: Rule
ruleElDayofmonthNonOrdinal = Rule
{ name = "el <day-of-month> (non ordinal)"
, pattern =
[ regex "el"
, Predicate $ isIntegerBetween 1 31
]
, prod = \tokens -> case tokens of
(_:token:_) -> do
v <- getIntValue token
tt $ dayOfMonth v
_ -> Nothing
}
ruleSeason4 :: Rule
ruleSeason4 = Rule
{ name = "season"
, pattern =
[ regex "primavera"
]
, prod = \_ ->
let from = monthDay 3 20
to = monthDay 6 21
in Token Time <$> interval TTime.Open from to
}
ruleYearLatent2 :: Rule
ruleYearLatent2 = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween 2101 10000
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ year v
_ -> Nothing
}
ruleNoon :: Rule
ruleNoon = Rule
{ name = "noon"
, pattern =
[ regex "mediod(\x00ed|i)a"
]
, prod = \_ -> tt $ hour False 12
}
ruleProximasNCycle :: Rule
ruleProximasNCycle = Rule
{ name = "proximas n <cycle>"
, pattern =
[ regex "pr(\x00f3|o)xim(o|a)s?"
, Predicate $ isIntegerBetween 2 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleNochevieja :: Rule
ruleNochevieja = Rule
{ name = "Nochevieja"
, pattern =
[ regex "nochevieja"
]
, prod = \_ -> tt $ monthDay 12 31
}
ruleTheDayBeforeYesterday :: Rule
ruleTheDayBeforeYesterday = Rule
{ name = "the day before yesterday"
, pattern =
[ regex "anteayer|antes de (ayer|anoche)|antier"
]
, prod = \_ -> tt . cycleNth TG.Day $ - 2
}
ruleHourofdayMinusIntegerAsRelativeMinutes :: Rule
ruleHourofdayMinusIntegerAsRelativeMinutes = Rule
{ name = "<hour-of-day> minus <integer> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "menos\\s?"
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time td:_:token:_) -> do
n <- getIntValue token
t <- minutesBefore n td
Just $ Token Time t
_ -> Nothing
}
ruleHourofdayMinusIntegerAsRelativeMinutes2 :: Rule
ruleHourofdayMinusIntegerAsRelativeMinutes2 = Rule
{ name = "<hour-of-day> minus <integer> (as relative minutes) minutes"
, pattern =
[ Predicate isAnHourOfDay
, regex "menos\\s?"
, Predicate $ isIntegerBetween 1 59
, regex "min\\.?(uto)?s?"
]
, prod = \tokens -> case tokens of
(Token Time td:_:token:_) -> do
n <- getIntValue token
t <- minutesBefore n td
Just $ Token Time t
_ -> Nothing
}
ruleHourofdayMinusQuarter :: Rule
ruleHourofdayMinusQuarter = Rule
{ name = "<hour-of-day> minus quarter (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "menos\\s? cuarto"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> do
t <- minutesBefore 15 td
Just $ Token Time t
_ -> Nothing
}
ruleHourofdayMinusHalf :: Rule
ruleHourofdayMinusHalf = Rule
{ name = "<hour-of-day> minus half (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "menos\\s? media"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> do
t <- minutesBefore 30 td
Just $ Token Time t
_ -> Nothing
}
ruleHourofdayMinusThreeQuarter :: Rule
ruleHourofdayMinusThreeQuarter = Rule
{ name = "<hour-of-day> minus three quarter (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "menos\\s? (3|tres) cuartos?"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> do
t <- minutesBefore 45 td
Just $ Token Time t
_ -> Nothing
}
ruleHourofdayIntegerAsRelativeMinutes :: Rule
ruleHourofdayIntegerAsRelativeMinutes = Rule
{ name = "<hour-of-day> <integer> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleHourofdayIntegerAsRelativeMinutes2 :: Rule
ruleHourofdayIntegerAsRelativeMinutes2 = Rule
{ name = "<hour-of-day> <integer> (as relative minutes) minutes"
, pattern =
[ Predicate isAnHourOfDay
, Predicate $ isIntegerBetween 1 59
, regex "min\\.?(uto)?s?"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleHourofdayQuarter :: Rule
ruleHourofdayQuarter = Rule
{ name = "<hour-of-day> quarter (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "cuarto"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHourofdayHalf :: Rule
ruleHourofdayHalf = Rule
{ name = "<hour-of-day> half (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "media"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleHourofdayThreeQuarter :: Rule
ruleHourofdayThreeQuarter = Rule
{ name = "<hour-of-day> three quarters (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "(3|tres) cuartos?"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 45
_ -> Nothing
}
ruleHourofdayAndRelativeMinutes :: Rule
ruleHourofdayAndRelativeMinutes = Rule
{ name = "<hour-of-day> and <relative minutes>"
, pattern =
[ Predicate isAnHourOfDay
, regex "y"
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleHourofdayAndRelativeMinutes2 :: Rule
ruleHourofdayAndRelativeMinutes2 = Rule
{ name = "<hour-of-day> and <relative minutes> minutes"
, pattern =
[ Predicate isAnHourOfDay
, regex "y"
, Predicate $ isIntegerBetween 1 59
, regex "min\\.?(uto)?s?"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleHourofdayAndQuarter :: Rule
ruleHourofdayAndQuarter = Rule
{ name = "<hour-of-day> and quarter"
, pattern =
[ Predicate isAnHourOfDay
, regex "y cuarto"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHourofdayAndHalf :: Rule
ruleHourofdayAndHalf = Rule
{ name = "<hour-of-day> and half"
, pattern =
[ Predicate isAnHourOfDay
, regex "y media"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleHourofdayAndThreeQuarter :: Rule
ruleHourofdayAndThreeQuarter = Rule
{ name = "<hour-of-day> and 3 quarters"
, pattern =
[ Predicate isAnHourOfDay
, regex "y (3|tres) cuartos?"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 45
_ -> Nothing
}
ruleNamedmonth :: Rule
ruleNamedmonth = Rule
{ name = "named-month"
, pattern =
[ regex "enero|ene\\.?"
]
, prod = \_ -> tt $ month 1
}
ruleInThePartofday :: Rule
ruleInThePartofday = Rule
{ name = "in the <part-of-day>"
, pattern =
[ regex "(a|en|de|por) la"
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleDelYear :: Rule
ruleDelYear = Rule
{ name = "del <year>"
, pattern =
[ regex "del( a(\x00f1|n)o)?"
, Predicate $ isIntegerBetween 1000 2100
]
, prod = \tokens -> case tokens of
(_:token:_) -> do
v <- getIntValue token
tt $ year v
_ -> Nothing
}
ruleNamedmonth3 :: Rule
ruleNamedmonth3 = Rule
{ name = "named-month"
, pattern =
[ regex "marzo|mar\\.?"
]
, prod = \_ -> tt $ month 3
}
ruleDdmm :: Rule
ruleDdmm = Rule
{ name = "dd[/-]mm"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])[/-](0?[1-9]|1[0-2])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
d <- parseInt m1
m <- parseInt m2
tt $ monthDay m d
_ -> Nothing
}
ruleAfternoon :: Rule
ruleAfternoon = Rule
{ name = "afternoon"
, pattern =
[ regex "tarde"
]
, prod = \_ ->
let from = hour False 12
to = hour False 19
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleNamedmonth4 :: Rule
ruleNamedmonth4 = Rule
{ name = "named-month"
, pattern =
[ regex "abril|abr\\.?"
]
, prod = \_ -> tt $ month 4
}
ruleMidnight :: Rule
ruleMidnight = Rule
{ name = "midnight"
, pattern =
[ regex "medianoche"
]
, prod = \_ -> tt $ hour False 0
}
ruleAnoNuevo :: Rule
ruleAnoNuevo = Rule
{ name = "ano nuevo"
, pattern =
[ regex "a(n|\x00f1)o nuevo"
]
, prod = \_ -> tt $ monthDay 1 1
}
ruleNamedday5 :: Rule
ruleNamedday5 = Rule
{ name = "named-day"
, pattern =
[ regex "viernes|vie|vie\\."
]
, prod = \_ -> tt $ dayOfWeek 5
}
ruleDdddMonthinterval :: Rule
ruleDdddMonthinterval = Rule
{ name = "dd-dd <month>(interval)"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])"
, regex "\\-|al?"
, regex "(3[01]|[12]\\d|0?[1-9])"
, regex "de"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:_)):
_:
Token RegexMatch (GroupMatch (m2:_)):
_:
Token Time td:
_) -> do
d1 <- parseInt m1
d2 <- parseInt m2
from <- intersect (dayOfMonth d1) td
to <- intersect (dayOfMonth d2) td
Token Time <$> interval TTime.Closed from to
_ -> Nothing
}
ruleTimeofdayLatent :: Rule
ruleTimeofdayLatent = Rule
{ name = "time-of-day (latent)"
, pattern =
[ Predicate $ isIntegerBetween 0 23
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ hour True v
_ -> Nothing
}
ruleNamedmonth2 :: Rule
ruleNamedmonth2 = Rule
{ name = "named-month"
, pattern =
[ regex "febrero|feb\\.?"
]
, prod = \_ -> tt $ month 2
}
ruleNamedmonthnameddayPast :: Rule
ruleNamedmonthnameddayPast = Rule
{ name = "<named-month|named-day> past"
, pattern =
[ dimension Time
, regex "pasad(o|a)"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ predNth (-1) False td
_ -> Nothing
}
ruleSeason3 :: Rule
ruleSeason3 = Rule
{ name = "season"
, pattern =
[ regex "invierno"
]
, prod = \_ ->
let from = monthDay 12 21
to = monthDay 3 20
in Token Time <$> interval TTime.Open from to
}
ruleSeason :: Rule
ruleSeason = Rule
{ name = "season"
, pattern =
[ regex "verano"
]
, prod = \_ ->
let from = monthDay 6 21
to = monthDay 9 23
in Token Time <$> interval TTime.Open from to
}
ruleRightNow :: Rule
ruleRightNow = Rule
{ name = "right now"
, pattern =
[ regex "ahor(it)?a|ya|en\\s?seguida|cuanto antes"
]
, prod = \_ -> tt $ cycleNth TG.Second 0
}
ruleDimTimeDeLaTarde :: Rule
ruleDimTimeDeLaTarde = Rule
{ name = "<dim time> de la tarde"
, pattern =
[ Predicate isATimeOfDay
, regex "(a|en|de) la tarde"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> do
tarde <- interval TTime.Open (hour False 12) (hour False 21)
Token Time <$> intersect td (mkLatent $ partOfDay tarde)
_ -> Nothing
}
ruleIntegerInThePartofday :: Rule
ruleIntegerInThePartofday = Rule
{ name = "<integer> in the <part-of-day>"
, pattern =
[ Predicate isAPartOfDay
, regex "(a|en|de|por) la"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNCycleProximoqueViene :: Rule
ruleNCycleProximoqueViene = Rule
{ name = "n <cycle> (proximo|que viene)"
, pattern =
[ Predicate $ isIntegerBetween 2 9999
, dimension TimeGrain
, regex "(pr(\x00f3|o)xim(o|a)s?|que vienen?|siguientes?)"
]
, prod = \tokens -> case tokens of
(token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleNamedmonthnameddayNext :: Rule
ruleNamedmonthnameddayNext = Rule
{ name = "<named-month|named-day> next"
, pattern =
[ dimension Time
, regex "que vienen?"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ predNth 1 False td
_ -> Nothing
}
ruleIntersect :: Rule
ruleIntersect = Rule
{ name = "intersect"
, pattern =
[ Predicate isNotLatent
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleTimeofdayPartofday :: Rule
ruleTimeofdayPartofday = Rule
{ name = "<time-of-day> <part-of-day>"
, pattern =
[ dimension Time
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleDimTimeDeLaManana :: Rule
ruleDimTimeDeLaManana = Rule
{ name = "<dim time> de la manana"
, pattern =
[ Predicate isATimeOfDay
, regex "(a|en|de) la ma(\x00f1|n)ana"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> do
manana <- interval TTime.Open (hour False 0) (hour False 12)
Token Time <$> intersect td (mkLatent $ partOfDay manana)
_ -> Nothing
}
ruleDeDatetimeDatetimeInterval :: Rule
ruleDeDatetimeDatetimeInterval = Rule
{ name = "de <datetime> - <datetime> (interval)"
, pattern =
[ regex "del?"
, dimension Time
, regex "\\-|al?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Open td1 td2
_ -> Nothing
}
ruleNthTimeDeTime2 :: Rule
ruleNthTimeDeTime2 = Rule
{ name = "nth <time> de <time>"
, pattern =
[ regex "the"
, dimension Ordinal
, dimension Time
, regex "de|en"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:
Token Ordinal (OrdinalData {TOrdinal.value = v}):
Token Time td1:
_:
Token Time td2:
_) -> Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleNamedmonth6 :: Rule
ruleNamedmonth6 = Rule
{ name = "named-month"
, pattern =
[ regex "junio|jun\\.?"
]
, prod = \_ -> tt $ month 6
}
ruleDentroDeDuration :: Rule
ruleDentroDeDuration = Rule
{ name = "dentro de <duration>"
, pattern =
[ regex "dentro de"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
Token Time <$>
interval TTime.Open (cycleNth TG.Second 0) (inDuration dd)
_ -> Nothing
}
ruleNamedmonth8 :: Rule
ruleNamedmonth8 = Rule
{ name = "named-month"
, pattern =
[ regex "agosto|ago\\.?"
]
, prod = \_ -> tt $ month 8
}
ruleWeekend :: Rule
ruleWeekend = Rule
{ name = "week-end"
, pattern =
[ regex "week[ -]?end|fin de semana"
]
, prod = \_ -> do
from <- intersect (dayOfWeek 5) (hour False 18)
to <- intersect (dayOfWeek 1) (hour False 0)
Token Time <$> interval TTime.Open from to
}
ruleOrdinalQuarterYear :: Rule
ruleOrdinalQuarterYear = Rule
{ name = "<ordinal> quarter <year>"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
, regex "del? ?"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal (OrdinalData {TOrdinal.value = v}):_:Token Time td:_) ->
tt $ cycleNthAfter False TG.Quarter (v - 1) td
_ -> Nothing
}
ruleYyyymmdd :: Rule
ruleYyyymmdd = Rule
{ name = "yyyy-mm-dd"
, pattern =
[ regex "(\\d{2,4})-(0?[1-9]|1[0-2])-(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:m3:_)):_) -> do
y <- parseInt m1
m <- parseInt m2
d <- parseInt m3
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleTimeofdayHoras :: Rule
ruleTimeofdayHoras = Rule
{ name = "<time-of-day> horas"
, pattern =
[ Predicate isATimeOfDay
, regex "h\\.?(ora)?s?"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleNavidad :: Rule
ruleNavidad = Rule
{ name = "Navidad"
, pattern =
[ regex "(la )?navidad"
]
, prod = \_ -> tt $ monthDay 12 25
}
ruleElCycleAntesTime :: Rule
ruleElCycleAntesTime = Rule
{ name = "el <cycle> antes <time>"
, pattern =
[ regex "l[ea']? ?"
, dimension TimeGrain
, regex "antes de"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain (-1) td
_ -> Nothing
}
ruleTwoTimeTokensSeparatedBy :: Rule
ruleTwoTimeTokensSeparatedBy = Rule
{ name = "two time tokens separated by \",\""
, pattern =
[ Predicate isNotLatent
, regex ","
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleMorning :: Rule
ruleMorning = Rule
{ name = "morning"
, pattern =
[ regex "ma(\x00f1|n)ana"
]
, prod = \_ ->
let from = hour False 4
to = hour False 12
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleALasHourmintimeofday :: Rule
ruleALasHourmintimeofday = Rule
{ name = "a las <hour-min>(time-of-day)"
, pattern =
[ regex "((al?)( las?)?|las?)"
, Predicate isATimeOfDay
, regex "horas?"
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleThisPartofday :: Rule
ruleThisPartofday = Rule
{ name = "this <part-of-day>"
, pattern =
[ regex "est(e|a)"
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time . partOfDay <$>
intersect (cycleNth TG.Day 0) td
_ -> Nothing
}
ruleLaCyclePasado :: Rule
ruleLaCyclePasado = Rule
{ name = "la <cycle> pasado"
, pattern =
[ regex "(el|los|la|las) ?"
, dimension TimeGrain
, regex "pasad(a|o)s?|(u|\x00fa)ltim[ao]s?"
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt . cycleNth grain $ - 1
_ -> Nothing
}
ruleYearLatent :: Rule
ruleYearLatent = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween (- 10000) 999
]
, prod = \tokens -> case tokens of
(token:_) -> do
n <- getIntValue token
tt . mkLatent $ year n
_ -> Nothing
}
ruleYesterday :: Rule
ruleYesterday = Rule
{ name = "yesterday"
, pattern =
[ regex "ayer"
]
, prod = \_ -> tt . cycleNth TG.Day $ - 1
}
ruleSeason2 :: Rule
ruleSeason2 = Rule
{ name = "season"
, pattern =
[ regex "oto(\x00f1|n)o"
]
, prod = \_ ->
let from = monthDay 9 23
to = monthDay 12 21
in Token Time <$> interval TTime.Open from to
}
ruleDayofweekDayofmonth :: Rule
ruleDayofweekDayofmonth = Rule
{ name = "<day-of-week> <day-of-month>"
, pattern =
[ Predicate isADayOfWeek
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleTimeofdayAmpm :: Rule
ruleTimeofdayAmpm = Rule
{ name = "<time-of-day> am|pm"
, pattern =
[ Predicate isATimeOfDay
, regex "([ap])\\.?m?\\.?"
]
, prod = \tokens -> case tokens of
(Token Time td:Token RegexMatch (GroupMatch (ap:_)):_) ->
tt . timeOfDayAMPM td $ Text.toLower ap == "a"
_ -> Nothing
}
ruleDayofmonthDeNamedmonth :: Rule
ruleDayofmonthDeNamedmonth = Rule
{ name = "<day-of-month> de <named-month>"
, pattern =
[ Predicate isDOMInteger
, regex "de"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleEntreDdEtDdMonthinterval :: Rule
ruleEntreDdEtDdMonthinterval = Rule
{ name = "entre dd et dd <month>(interval)"
, pattern =
[ regex "entre( el)?"
, regex "(0?[1-9]|[12]\\d|3[01])"
, regex "y( el)?"
, regex "(0?[1-9]|[12]\\d|3[01])"
, regex "de"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(_:
Token RegexMatch (GroupMatch (m1:_)):
_:
Token RegexMatch (GroupMatch (m2:_)):
_:
Token Time td:
_) -> do
v1 <- parseInt m1
v2 <- parseInt m2
from <- intersect (dayOfMonth v1) td
to <- intersect (dayOfMonth v2) td
Token Time <$> interval TTime.Closed from to
_ -> Nothing
}
ruleNamedmonthDayofmonth :: Rule
ruleNamedmonthDayofmonth = Rule
{ name = "<named-month> <day-of-month>"
, pattern =
[ Predicate isAMonth
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleNamedmonth5 :: Rule
ruleNamedmonth5 = Rule
{ name = "named-month"
, pattern =
[ regex "mayo?\\.?"
]
, prod = \_ -> tt $ month 5
}
ruleNamedday7 :: Rule
ruleNamedday7 = Rule
{ name = "named-day"
, pattern =
[ regex "domingo|dom\\.?"
]
, prod = \_ -> tt $ dayOfWeek 7
}
ruleElTime :: Rule
ruleElTime = Rule
{ name = "el <time>"
, pattern =
[ regex "d?el"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleYear :: Rule
ruleYear = Rule
{ name = "year"
, pattern =
[ Predicate $ isIntegerBetween 1000 2100
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt $ year v
_ -> Nothing
}
ruleNamedmonth10 :: Rule
ruleNamedmonth10 = Rule
{ name = "named-month"
, pattern =
[ regex "octubre|oct\\.?"
]
, prod = \_ -> tt $ month 10
}
ruleEsteenUnCycle :: Rule
ruleEsteenUnCycle = Rule
{ name = "este|en un <cycle>"
, pattern =
[ regex "(est(e|a|os)|en (el|los|la|las) ?)"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 0
_ -> Nothing
}
ruleNProximasCycle :: Rule
ruleNProximasCycle = Rule
{ name = "n proximas <cycle>"
, pattern =
[ Predicate $ isIntegerBetween 2 9999
, regex "pr(\x00f3|o)xim(o|a)s?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(token:_:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleLaPasadoCycle :: Rule
ruleLaPasadoCycle = Rule
{ name = "la pasado <cycle>"
, pattern =
[ regex "(el|los|la|las) ?"
, regex "pasad(a|o)s?|(u|\x00fa)ltim[ao]s?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:_:Token TimeGrain grain:_) ->
tt . cycleNth grain $ - 1
_ -> Nothing
}
ruleALasTimeofday :: Rule
ruleALasTimeofday = Rule
{ name = "a las <time-of-day>"
, pattern =
[ regex "(al?)( las?)?|las?"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleDdmmyyyy :: Rule
ruleDdmmyyyy = Rule
{ name = "dd[/-.]mm[/-.]yyyy"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])[\\./-](0?[1-9]|1[0-2])[\\./-](\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:m3:_)):_) -> do
d <- parseInt m1
m <- parseInt m2
y <- parseInt m3
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleNamedmonth11 :: Rule
ruleNamedmonth11 = Rule
{ name = "named-month"
, pattern =
[ regex "noviembre|nov\\.?"
]
, prod = \_ -> tt $ month 11
}
ruleOrdinalQuarter :: Rule
ruleOrdinalQuarter = Rule
{ name = "<ordinal> quarter"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
]
, prod = \tokens -> case tokens of
(Token Ordinal (OrdinalData {TOrdinal.value = v}):_) ->
tt . cycleNthAfter False TG.Quarter (v - 1)
$ cycleNth TG.Year 0
_ -> Nothing
}
ruleElCycleProximoqueViene :: Rule
ruleElCycleProximoqueViene = Rule
{ name = "el <cycle> (proximo|que viene)"
, pattern =
[ regex "(el|los|la|las) ?"
, dimension TimeGrain
, regex "(pr(\x00f3|o)xim(o|a)s?|que vienen?|siguientes?)"
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 1
_ -> Nothing
}
ruleElCycleProximoqueVieneTime :: Rule
ruleElCycleProximoqueVieneTime = Rule
{ name = "el <cycle> proximo|que viene <time>"
, pattern =
[ regex "(el|los|la|las)"
, dimension TimeGrain
, regex "(pr(\x00f3|o)xim(o|a)s?|que vienen?|siguientes?)"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain 1 td
_ -> Nothing
}
ruleDelMedioda :: Rule
ruleDelMedioda = Rule
{ name = "del mediodía"
, pattern =
[ regex "del mediod(i|\x00ed)a"
]
, prod = \_ ->
let from = hour False 12
to = hour False 17
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleNamedday3 :: Rule
ruleNamedday3 = Rule
{ name = "named-day"
, pattern =
[ regex "mi(e|\x00e9)\\.?(rcoles)?|mx|mier?\\."
]
, prod = \_ -> tt $ dayOfWeek 3
}
ruleIntersectByDe :: Rule
ruleIntersectByDe = Rule
{ name = "intersect by `de`"
, pattern =
[ Predicate isNotLatent
, regex "de"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleTomorrow :: Rule
ruleTomorrow = Rule
{ name = "tomorrow"
, pattern =
[ regex "ma(n|\x00f1)ana"
]
, prod = \_ -> tt $ cycleNth TG.Day 1
}
ruleNthTimeDeTime :: Rule
ruleNthTimeDeTime = Rule
{ name = "nth <time> de <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "de|en"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal (OrdinalData {TOrdinal.value = v}):
Token Time td1:
_:
Token Time td2:
_) -> Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleNamedmonth9 :: Rule
ruleNamedmonth9 = Rule
{ name = "named-month"
, pattern =
[ regex "septiembre|sept?\\.?"
]
, prod = \_ -> tt $ month 9
}
ruleTimezone :: Rule
ruleTimezone = Rule
{ name = "<time> timezone"
, pattern =
[ Predicate $ liftM2 (&&) isATimeOfDay isNotLatent
, regex "\\b(YEKT|YEKST|YAKT|YAKST|WITA|WIT|WIB|WGT|WGST|WFT|WET|WEST|WAT|WAST|VUT|VLAT|VLAST|VET|UZT|UYT|UYST|UTC|ULAT|TVT|TMT|TLT|TKT|TJT|TFT|TAHT|SST|SRT|SGT|SCT|SBT|SAST|SAMT|RET|PYT|PYST|PWT|PST|PONT|PMST|PMDT|PKT|PHT|PHOT|PGT|PETT|PETST|PET|PDT|OMST|OMSST|NZST|NZDT|NUT|NST|NPT|NOVT|NOVST|NFT|NDT|NCT|MYT|MVT|MUT|MST|MSK|MSD|MMT|MHT|MDT|MAWT|MART|MAGT|MAGST|LINT|LHST|LHDT|KUYT|KST|KRAT|KRAST|KGT|JST|IST|IRST|IRKT|IRKST|IRDT|IOT|IDT|ICT|HOVT|HKT|GYT|GST|GMT|GILT|GFT|GET|GAMT|GALT|FNT|FKT|FKST|FJT|FJST|EST|EGT|EGST|EET|EEST|EDT|ECT|EAT|EAST|EASST|DAVT|ChST|CXT|CVT|CST|COT|CLT|CLST|CKT|CHAST|CHADT|CET|CEST|CDT|CCT|CAT|CAST|BTT|BST|BRT|BRST|BOT|BNT|AZT|AZST|AZOT|AZOST|AWST|AWDT|AST|ART|AQTT|ANAT|ANAST|AMT|AMST|ALMT|AKST|AKDT|AFT|AEST|AEDT|ADT|ACST|ACDT)\\b"
]
, prod = \tokens -> case tokens of
(Token Time td:
Token RegexMatch (GroupMatch (tz:_)):
_) -> Token Time <$> inTimezone tz td
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleALasHourmintimeofday
, ruleALasTimeofday
, ruleAfternoon
, ruleAnoNuevo
, ruleCeTime
, ruleDatetimeDatetimeInterval
, ruleDayOfMonthSt
, ruleDayofmonthDeNamedmonth
, ruleDayofweekDayofmonth
, ruleDdddMonthinterval
, ruleDdmm
, ruleDdmmyyyy
, ruleDeDatetimeDatetimeInterval
, ruleDelMedioda
, ruleDelYear
, ruleDentroDeDuration
, ruleDimTimeDeLaManana
, ruleDimTimeDeLaTarde
, ruleElCycleAntesTime
, ruleElCycleProximoqueViene
, ruleElCycleProximoqueVieneTime
, ruleElDayofmonthDeNamedmonth
, ruleElDayofmonthNonOrdinal
, ruleElProximoCycle
, ruleElTime
, ruleEnDuration
, ruleEntreDatetimeEtDatetimeInterval
, ruleEntreDdEtDdMonthinterval
, ruleEsteenUnCycle
, ruleEvening
, ruleHaceDuration
, ruleHhhmmTimeofday
, ruleHourofdayAndRelativeMinutes
, ruleHourofdayIntegerAsRelativeMinutes
, ruleHourofdayMinusIntegerAsRelativeMinutes
, ruleInThePartofday
, ruleIntegerInThePartofday
, ruleIntersect
, ruleIntersectByDe
, ruleLaCyclePasado
, ruleLaPasadoCycle
, ruleMidnight
, ruleMorning
, ruleNCycleProximoqueViene
, ruleNPasadosCycle
, ruleNProximasCycle
, ruleNamedday
, ruleNamedday2
, ruleNamedday3
, ruleNamedday4
, ruleNamedday5
, ruleNamedday6
, ruleNamedday7
, ruleNamedmonth
, ruleNamedmonth10
, ruleNamedmonth11
, ruleNamedmonth12
, ruleNamedmonth2
, ruleNamedmonth3
, ruleNamedmonth4
, ruleNamedmonth5
, ruleNamedmonth6
, ruleNamedmonth7
, ruleNamedmonth8
, ruleNamedmonth9
, ruleNamedmonthDayofmonth
, ruleNamedmonthnameddayNext
, ruleNamedmonthnameddayPast
, ruleNavidad
, ruleNochevieja
, ruleNoon
, ruleNow
, ruleNthTimeDeTime
, ruleNthTimeDeTime2
, ruleOrdinalQuarter
, ruleOrdinalQuarterYear
, rulePasadosNCycle
, ruleProximasNCycle
, ruleRightNow
, ruleSeason
, ruleSeason2
, ruleSeason3
, ruleSeason4
, ruleTheDayAfterTomorrow
, ruleTheDayBeforeYesterday
, ruleThisDayofweek
, ruleThisPartofday
, ruleTimeofdayAmpm
, ruleTimeofdayHoras
, ruleTimeofdayLatent
, ruleTimeofdayPartofday
, ruleTomorrow
, ruleTwoTimeTokensSeparatedBy
, ruleUltimoDayofweekDeTime
, ruleWeekend
, ruleYear
, ruleYearLatent
, ruleYearLatent2
, ruleYesterday
, ruleYyyymmdd
, ruleHourofdayAndThreeQuarter
, ruleHourofdayAndHalf
, ruleHourofdayAndQuarter
, ruleHourofdayAndRelativeMinutes2
, ruleHourofdayThreeQuarter
, ruleHourofdayHalf
, ruleHourofdayQuarter
, ruleHourofdayIntegerAsRelativeMinutes2
, ruleHourofdayMinusThreeQuarter
, ruleHourofdayMinusHalf
, ruleHourofdayMinusQuarter
, ruleHourofdayMinusIntegerAsRelativeMinutes2
, ruleTimezone
]
| rfranek/duckling | Duckling/Time/ES/Rules.hs | bsd-3-clause | 38,523 | 0 | 21 | 10,279 | 11,130 | 6,027 | 5,103 | 1,241 | 2 |
{-# LINE 1 "Control.Monad.ST.Lazy.Imp.hs" #-}
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE MagicHash, UnboxedTuples, RankNTypes #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.ST.Lazy.Imp
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : non-portable (requires universal quantification for runST)
--
-- This module presents an identical interface to "Control.Monad.ST",
-- except that the monad delays evaluation of state operations until
-- a value depending on them is required.
--
-----------------------------------------------------------------------------
module Control.Monad.ST.Lazy.Imp (
-- * The 'ST' monad
ST,
runST,
fixST,
-- * Converting between strict and lazy 'ST'
strictToLazyST, lazyToStrictST,
-- * Converting 'ST' To 'IO'
RealWorld,
stToIO,
-- * Unsafe operations
unsafeInterleaveST,
unsafeIOToST
) where
import Control.Monad.Fix
import qualified Control.Monad.ST as ST
import qualified Control.Monad.ST.Unsafe as ST
import qualified GHC.ST as GHC.ST
import GHC.Base
-- | The lazy state-transformer monad.
-- A computation of type @'ST' s a@ transforms an internal state indexed
-- by @s@, and returns a value of type @a@.
-- The @s@ parameter is either
--
-- * an unstantiated type variable (inside invocations of 'runST'), or
--
-- * 'RealWorld' (inside invocations of 'stToIO').
--
-- It serves to keep the internal states of different invocations of
-- 'runST' separate from each other and from invocations of 'stToIO'.
--
-- The '>>=' and '>>' operations are not strict in the state. For example,
--
-- @'runST' (writeSTRef _|_ v >>= readSTRef _|_ >> return 2) = 2@
newtype ST s a = ST (State s -> (a, State s))
data State s = S# (State# s)
instance Functor (ST s) where
fmap f m = ST $ \ s ->
let
ST m_a = m
(r,new_s) = m_a s
in
(f r,new_s)
instance Applicative (ST s) where
pure a = ST $ \ s -> (a,s)
(<*>) = ap
instance Monad (ST s) where
fail s = errorWithoutStackTrace s
(ST m) >>= k
= ST $ \ s ->
let
(r,new_s) = m s
ST k_a = k r
in
k_a new_s
{-# NOINLINE runST #-}
-- | Return the value computed by a state transformer computation.
-- The @forall@ ensures that the internal state used by the 'ST'
-- computation is inaccessible to the rest of the program.
runST :: (forall s. ST s a) -> a
runST st = case st of ST the_st -> let (r,_) = the_st (S# realWorld#) in r
-- | Allow the result of a state transformer computation to be used (lazily)
-- inside the computation.
-- Note that if @f@ is strict, @'fixST' f = _|_@.
fixST :: (a -> ST s a) -> ST s a
fixST m = ST (\ s ->
let
ST m_r = m r
(r,s') = m_r s
in
(r,s'))
instance MonadFix (ST s) where
mfix = fixST
-- ---------------------------------------------------------------------------
-- Strict <--> Lazy
{-|
Convert a strict 'ST' computation into a lazy one. The strict state
thread passed to 'strictToLazyST' is not performed until the result of
the lazy state thread it returns is demanded.
-}
strictToLazyST :: ST.ST s a -> ST s a
strictToLazyST m = ST $ \s ->
let
pr = case s of { S# s# -> GHC.ST.liftST m s# }
r = case pr of { GHC.ST.STret _ v -> v }
s' = case pr of { GHC.ST.STret s2# _ -> S# s2# }
in
(r, s')
{-|
Convert a lazy 'ST' computation into a strict one.
-}
lazyToStrictST :: ST s a -> ST.ST s a
lazyToStrictST (ST m) = GHC.ST.ST $ \s ->
case (m (S# s)) of (a, S# s') -> (# s', a #)
-- | A monad transformer embedding lazy state transformers in the 'IO'
-- monad. The 'RealWorld' parameter indicates that the internal state
-- used by the 'ST' computation is a special one supplied by the 'IO'
-- monad, and thus distinct from those used by invocations of 'runST'.
stToIO :: ST RealWorld a -> IO a
stToIO = ST.stToIO . lazyToStrictST
-- ---------------------------------------------------------------------------
-- Strict <--> Lazy
unsafeInterleaveST :: ST s a -> ST s a
unsafeInterleaveST = strictToLazyST . ST.unsafeInterleaveST . lazyToStrictST
unsafeIOToST :: IO a -> ST s a
unsafeIOToST = strictToLazyST . ST.unsafeIOToST
| phischu/fragnix | builtins/base/Control.Monad.ST.Lazy.Imp.hs | bsd-3-clause | 4,607 | 0 | 15 | 1,161 | 858 | 482 | 376 | 63 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
module Language.Why3.PP (ppTh, ppD, ppE, ppT, ppL, ppP, isOpWhy3) where
import Language.Why3.AST
import Text.PrettyPrint
import Data.Text (Text)
import qualified Data.Text as Text
ppTh :: Theory -> Doc
ppTh (Theory x ds) = text "theory" <+> ppText x
$$ vcat (map ppD ds) $$ text "end"
ppD :: Decl -> Doc
ppD decl =
case decl of
Use mb x mbAs -> text "use" <+> opt ppImpExp mb <+> ppText x
<+> opt ppAs mbAs
Goal x e -> text "goal" <+> ppText x <> colon <+> ppE e
Axiom x e -> text "axiom" <+> ppText x <> colon <+> ppE e
Lemma x e -> text "lemma" <+> ppText x <> colon <+> ppE e
Predicate x _ ts -> text "predicate" <+> ppText x
<+> fsep (map (ppPrecT 1) ts)
PredicateDef x _ ps e -> text "predicate" <+> ppText x
<+> fsep (map ppParam ps)
<+> text "=" <+> ppE e
Function x _ [] t -> text "constant" <+> ppText x <> colon <+> ppT t
Function x _ ts t -> text "function" <+> ppText x
<+> fsep (map (ppPrecT 1) ts)
<+> colon <+> ppT t
FunctionDef x _ [] t e -> text "constant" <+> ppText x
<> colon <+> ppT t
<+> text "=" <+> ppE e
FunctionDef x _ ps t e -> text "function" <+> ppText x
<+> fsep (map ppParam ps)
<+> colon <+> ppT t
<+> text "=" <+> ppE e
Type x _ tvs -> text "type" <+> ppText x <+> fsep (map ppTV tvs)
TypeDef x _ tvs ty -> text "type" <+> ppText x <+> fsep (map ppTV tvs) <+>
text "=" <+>
case ty of
Ty t -> ppT t
TyRecord fs ->
braces $ vcat $ punctuate (char ';') $ map ppF fs
TyCase tcs -> vcat $ map ppTyCaseAlt tcs
where
ppF (x,t) = ppText x <> colon <+> ppT t
ppTV (x,_) = text "'" <> ppText x
ppParam (Nothing, t) = ppPrecT 1 t
ppParam (Just x, t) = parens (ppText x <> colon <+> ppT t)
opt _ Nothing = empty
opt f (Just x) = f x
ppImpExp Import = text "import"
ppImpExp Export = text "export"
ppAs x = text "as" <+> ppText x
ppTyCaseAlt (TyCaseAlt x _ as) = text "|" <+> ppText x
<+> fsep (map ppParam as)
ppL :: Literal -> Doc
ppL lit =
case lit of
Integer n -> if n < 0 then parens (integer n) else integer n
Real x -> ppText x
Bool b -> text (if b then "true" else "false")
isOpWhy3 :: Name -> Maybe Int
isOpWhy3 x
| Text.any (`elem` op1) x = Just 1
| Text.any (`elem` op2) x = Just 2
| Text.any (`elem` op3) x = Just 3
| Text.any (`elem` op4) x = Just 4
| otherwise = Nothing
where
op1 = ['=', '<', '>', '~' ]
op2 = ['+', '-' ]
op3 = ['*', '/', '%' ]
op4 = ['!', '$', '&', '?', '@', '^', '.', ':', '|', '#' ]
ppE :: Expr -> Doc
ppE = go 0
where
go prec expr =
case expr of
Lit l -> ppL l
App x [e1, e2]
| Just n <- isOpWhy3 x
, let lP = case e1 of
App {} -> n - 1
_ -> n -- (e.g., if we have `if` on the left0
-> wrap n prec (go lP e1 <+> ppText x <+> go n e2)
App "[]" [e1, e2] -> wrap 6 prec (go 5 e1 <> brackets (go 0 e2))
App "[<-]" [e1, e2, e3] ->
wrap 6 prec (go 5 e1 <> brackets (go 0 e2 <+> text "<-" <+> go 0 e3))
App x [] -> ppText x
App x es -> wrap 5 prec (ppText x <+> fsep (map (go 5) es))
Let p e1 e2 ->
wrap 1 prec (text "let" <+> ppP p <+> text "=" <+>
go 0 e1 <+> text "in" $$ go 0 e2)
If e1 e2 e3 -> wrap 1 prec
(text "if" <+> go 0 e1
$$ nest 2 (text "then" <+> go 0 e2 $$
text "else" <+> go 0 e3))
Match es alts -> wrap 1 prec
( text "match"
<+> fsep (punctuate comma (map (go 0) es))
<+> text "with"
$$ nest 2 (vcat (map ppAlt alts))
)
where ppAlt (p,e) = text "|" <+> ppP p <+> text "->" <+> go 0 e
Conn Implies _ _ ->
wrap 1 prec (vcat [ go 1 e <+> text "->" | e <- xs ]
$$ go 1 y)
where splitImp (Conn Implies e1 e2) = let (xs',y') = splitImp e2
in (e1:xs',y')
splitImp e = ([],e)
(xs,y) = splitImp expr
Conn c e1 e2 -> wrap 1 prec (go 1 e1 <+> text ct <+> go 1 e2)
where ct = case c of
And -> "/\\"
AsymAnd -> "&&"
Or -> "\\/"
AsymOr -> "||"
Implies -> "->"
Iff -> "<->"
Record fs -> braces (sep [ ppText x <+> text "=" <+> go 0 e
| (x,e) <- fs ])
RecordUpdate r fs -> braces (go 0 r <+> text "with" <+>
sep [ ppText x <+> text "=" <+> go 0 e
| (x,e) <- fs ])
Not e -> wrap 2 prec (text "not" <+> go 2 e)
Field l e -> wrap 2 prec (go 1 e <> text "." <> ppText l)
Cast e t -> wrap 1 prec (go 0 e <+> text ":" <+> ppT t)
Labeled l e -> wrap 1 prec (text (show l) <+> go 1 e)
Quant q xs trigs e -> wrap 1 prec $
qd <+> fsep (punctuate comma $ map param xs) <+> trds <> text "."
<+> go 0 e
where qd = case q of
Forall -> text "forall"
Exists -> text "exists"
param (x,t) = ppText x <> colon <+> ppT t
trds = case trigs of
[] -> empty
_ -> brackets $ fsep
$ punctuate (text "|")
$ map ppTrig trigs
ppTrig = fsep . punctuate comma . map ppE
ppP :: Pattern -> Doc
ppP = ppPrecP 0
ppPrecP :: Int -> Pattern -> Doc
ppPrecP prec pat =
case pat of
PWild -> text "_"
PVar x -> ppText x
PCon c [] -> ppText c
PCon c ps -> wrap 1 prec $ ppText c <+> fsep (map (ppPrecP 1) ps)
ppT :: Type -> Doc
ppT = ppPrecT 0
ppPrecT :: Int -> Type -> Doc
ppPrecT prec ty =
case ty of
TyCon x [] -> ppText x
TyCon x ts -> wrap 1 prec (ppText x <+> hsep (map (ppPrecT 1) ts))
TyVar a -> text "'" <> ppText a
Tuple ts -> parens (hsep $ map (ppPrecT 0) ts)
wrap :: Int -> Int -> Doc -> Doc
wrap n prec d = if prec >= n then parens d else d
ppText :: Text -> Doc
ppText = text . Text.unpack
| GaloisInc/why3 | src/Language/Why3/PP.hs | bsd-3-clause | 7,013 | 0 | 21 | 3,112 | 2,888 | 1,390 | 1,498 | 158 | 27 |
{-# LANGUAGE OverloadedStrings #-}
module Network.XMPP.TCPConnection
( TCPConnection
, openStream
, getStreamStart
, openComponent
, tagXMPPConn
)
where
import Network.XMPP.XMLParse
import Network.XMPP.XMPPConnection
import System.Log.Logger
import Network
import Control.Concurrent.MVar
import System.IO
import Data.IORef
import Data.Char (ord)
import qualified Data.ByteString.Lazy as BL
import Data.Digest.Pure.SHA (sha1, showDigest)
import Control.Exception (catch, throwIO, AssertionFailed(..))
import qualified Data.Text as T
import qualified Data.Text.IO as T
tagXMPPConn :: String
tagXMPPConn = "XMPP.Conn"
-- |An XMPP connection over TCP.
data TCPConnection = TCPConnection { handle :: Handle
, buffer :: IORef T.Text
, readLock :: MVar ()
, writeLock :: MVar ()
--, debugFile :: Maybe Handle
}
-- |Open a TCP connection to the named server, port 5222 (or others
-- found in SRV), and send a stream header.
openStream :: String -> String -> IO TCPConnection
openStream server serverName =
do
-- here we do service lookup (via SRV or A)
svcs <- getSvcServer server 5222
h <- connectStream svcs
let s = xmlToString False $
XML "stream:stream"
[("to",T.pack serverName),
("xmlns","jabber:client"),
("xmlns:stream","http://etherx.jabber.org/streams")]
[]
debugM tagXMPPConn $ "Sending : "++T.unpack s
T.hPutStr h s
buffer <- newIORef T.empty
readLock <- newMVar ()
writeLock <- newMVar ()
--debugFile <- openFile ("xx-"++show h) WriteMode
return $ TCPConnection h buffer readLock writeLock -- (Just debugFile)
openComponent :: String -> Int -> String -> String -> IO TCPConnection
openComponent server port compName secret =
do
svcs <- getSvcServer server port
h <- connectStream svcs
let s = xmlToString False $
XML "stream:stream"
[("to", T.pack compName),
("xmlns","jabber:component:accept"),
("xmlns:stream","http://etherx.jabber.org/streams")]
[]
debugM tagXMPPConn $ "Sending : "++T.unpack s
T.hPutStr h s
buffer <- newIORef T.empty
readLock <- newMVar ()
writeLock <- newMVar ()
let c = TCPConnection h buffer readLock writeLock -- Nothing
e <- getStreamStart c
debugM tagXMPPConn $ "Got : "++show e
let from = maybe "" id (getAttr "from" e)
let idStr = maybe "" id (getAttr "id" e)
if from==T.pack compName && not (T.null idStr)
then doHandshake c idStr secret
else error "from mismatch"
return c
where
doHandshake c idStr secret = do
let digest = showDigest . sha1 . BL.pack . map (fromIntegral . ord) $ T.unpack idStr++secret
debugM tagXMPPConn $ "digest="++digest
sendStanza c $ XML "handshake" [] [CData $ T.pack digest]
s <- getStanzas c
debugM tagXMPPConn $ "got handshake response : "++show s
getSvcServer :: String -> Int -> IO [(String, PortID)]
getSvcServer domain port = return [(domain,PortNumber $ toEnum port)]
connectStream :: [(String, PortID)] -> IO Handle
connectStream [] = error "can't connect: no suitable servers found"
connectStream (x:xs) =
Control.Exception.catch
(connectStream' x)
(\e -> putStrLn ("e="++show (e :: IOError)) >> connectStream xs)
connectStream' :: (String, PortID) -> IO Handle
connectStream' (host, port) = do
debugM tagXMPPConn $ "Trying connectTo : "++host -- ++" : "++show port
s <- connectTo host port
hSetBuffering s NoBuffering
hSetEncoding s utf8
enc <- hGetEncoding s
debugM tagXMPPConn $ "Connected, encoding : "++show enc
return s
-- |Get the stream header that the server sent. This needs to be
-- called before doing anything else with the stream.
getStreamStart :: TCPConnection -> IO XMLElem
getStreamStart c =
parseBuffered c xmppStreamStart
withLock :: MVar () -> IO a -> IO a
withLock mvar a = withMVar mvar $ \_ -> a
instance XMPPConnection TCPConnection where
getStanzas c = withLock (readLock c) $ do x <- parseBuffered c deepTag ; return [x] -- FIXME
sendStanza c x =
let str = xmlToString True x
in withLock (writeLock c) $ do
debugM tagXMPPConn $ "sent '" ++ T.unpack str ++ "'"
T.hPutStr (handle c) str
closeConnection c =
hClose (handle c)
parseBuffered :: (Show a) => TCPConnection -> Parser a -> IO a
parseBuffered c parser = do
buf <- readIORef (buffer c)
go (parse parser) buf
where
readMore = getString (handle c)
-- go :: (T.Text -> IResult T.Text a) -> T.Text -> IO a
go p buf1 = do
buf <- if T.null buf1
then readMore
else return buf1
debugM tagXMPPConn $ "got '" ++ T.unpack buf ++ "'"
case p buf of
Fail rest _ctxt msg -> do warningM tagXMPPConn $ "An error! Throwing exception : "++msg
writeIORef (buffer c) rest
throwIO (AssertionFailed "Protocol error")
--parseBuffered c parser
Done rest result -> do writeIORef (buffer c) rest
return result
Partial cont -> go cont =<< readMore
getString :: Handle -> IO T.Text
getString h = T.hGetChunk h
{-
debugLog debugH m = case debugH of
Nothing -> return ()
Just debugH -> hPutStr debugH m >> hFlush debugH
-}
| drpowell/XMPP | Network/XMPP/TCPConnection.hs | bsd-3-clause | 5,945 | 0 | 17 | 1,937 | 1,602 | 790 | 812 | 124 | 4 |
-- | Simulates the @isUnicodeIdentifierStart@ Java method. <http://docs.oracle.com/javase/6/docs/api/java/lang/Character.html#isUnicodeIdentifierStart%28int%29>
module Language.Java.Character.IsUnicodeIdentifierStart
(
IsUnicodeIdentifierStart(..)
) where
import Data.Char
import Data.Word
import Data.Set.Diet(Diet)
import qualified Data.Set.Diet as S
-- | Instances simulate Java characters and provide a decision on simulating @isUnicodeIdentifierStart@.
class Enum c => IsUnicodeIdentifierStart c where
isUnicodeIdentifierStart ::
c
-> Bool
isNotUnicodeIdentifierStart ::
c
-> Bool
isNotUnicodeIdentifierStart =
not . isUnicodeIdentifierStart
instance IsUnicodeIdentifierStart Char where
isUnicodeIdentifierStart c =
ord c `S.member` isUnicodeIdentifierStartSet
instance IsUnicodeIdentifierStart Int where
isUnicodeIdentifierStart c =
c `S.member` isUnicodeIdentifierStartSet
instance IsUnicodeIdentifierStart Integer where
isUnicodeIdentifierStart c =
c `S.member` isUnicodeIdentifierStartSet
instance IsUnicodeIdentifierStart Word8 where
isUnicodeIdentifierStart c =
c `S.member` isUnicodeIdentifierStartSet
instance IsUnicodeIdentifierStart Word16 where
isUnicodeIdentifierStart c =
c `S.member` isUnicodeIdentifierStartSet
instance IsUnicodeIdentifierStart Word32 where
isUnicodeIdentifierStart c =
c `S.member` isUnicodeIdentifierStartSet
instance IsUnicodeIdentifierStart Word64 where
isUnicodeIdentifierStart c =
c `S.member` isUnicodeIdentifierStartSet
isUnicodeIdentifierStartSet ::
(Num a, Enum a, Ord a) =>
Diet a
isUnicodeIdentifierStartSet =
let r = [
[65..90]
, [97..122]
, [170]
, [181]
, [186]
, [192..214]
, [216..246]
, [248..566]
, [592..705]
, [710..721]
, [736..740]
, [750]
, [890]
, [902]
, [904..906]
, [908]
, [910..929]
, [931..974]
, [976..1013]
, [1015..1019]
, [1024..1153]
, [1162..1230]
, [1232..1269]
, [1272..1273]
, [1280..1295]
, [1329..1366]
, [1369]
, [1377..1415]
, [1488..1514]
, [1520..1522]
, [1569..1594]
, [1600..1610]
, [1646..1647]
, [1649..1747]
, [1749]
, [1765..1766]
, [1774..1775]
, [1786..1788]
, [1791]
, [1808]
, [1810..1839]
, [1869..1871]
, [1920..1957]
, [1969]
, [2308..2361]
, [2365]
, [2384]
, [2392..2401]
, [2437..2444]
, [2447..2448]
, [2451..2472]
, [2474..2480]
, [2482]
, [2486..2489]
, [2493]
, [2524..2525]
, [2527..2529]
, [2544..2545]
, [2565..2570]
, [2575..2576]
, [2579..2600]
, [2602..2608]
, [2610..2611]
, [2613..2614]
, [2616..2617]
, [2649..2652]
, [2654]
, [2674..2676]
, [2693..2701]
, [2703..2705]
, [2707..2728]
, [2730..2736]
, [2738..2739]
, [2741..2745]
, [2749]
, [2768]
, [2784..2785]
, [2821..2828]
, [2831..2832]
, [2835..2856]
, [2858..2864]
, [2866..2867]
, [2869..2873]
, [2877]
, [2908..2909]
, [2911..2913]
, [2929]
, [2947]
, [2949..2954]
, [2958..2960]
, [2962..2965]
, [2969..2970]
, [2972]
, [2974..2975]
, [2979..2980]
, [2984..2986]
, [2990..2997]
, [2999..3001]
, [3077..3084]
, [3086..3088]
, [3090..3112]
, [3114..3123]
, [3125..3129]
, [3168..3169]
, [3205..3212]
, [3214..3216]
, [3218..3240]
, [3242..3251]
, [3253..3257]
, [3261]
, [3294]
, [3296..3297]
, [3333..3340]
, [3342..3344]
, [3346..3368]
, [3370..3385]
, [3424..3425]
, [3461..3478]
, [3482..3505]
, [3507..3515]
, [3517]
, [3520..3526]
, [3585..3632]
, [3634..3635]
, [3648..3654]
, [3713..3714]
, [3716]
, [3719..3720]
, [3722]
, [3725]
, [3732..3735]
, [3737..3743]
, [3745..3747]
, [3749]
, [3751]
, [3754..3755]
, [3757..3760]
, [3762..3763]
, [3773]
, [3776..3780]
, [3782]
, [3804..3805]
, [3840]
, [3904..3911]
, [3913..3946]
, [3976..3979]
, [4096..4129]
, [4131..4135]
, [4137..4138]
, [4176..4181]
, [4256..4293]
, [4304..4344]
, [4352..4441]
, [4447..4514]
, [4520..4601]
, [4608..4614]
, [4616..4678]
, [4680]
, [4682..4685]
, [4688..4694]
, [4696]
, [4698..4701]
, [4704..4742]
, [4744]
, [4746..4749]
, [4752..4782]
, [4784]
, [4786..4789]
, [4792..4798]
, [4800]
, [4802..4805]
, [4808..4814]
, [4816..4822]
, [4824..4846]
, [4848..4878]
, [4880]
, [4882..4885]
, [4888..4894]
, [4896..4934]
, [4936..4954]
, [5024..5108]
, [5121..5740]
, [5743..5750]
, [5761..5786]
, [5792..5866]
, [5870..5872]
, [5888..5900]
, [5902..5905]
, [5920..5937]
, [5952..5969]
, [5984..5996]
, [5998..6000]
, [6016..6067]
, [6103]
, [6108]
, [6176..6263]
, [6272..6312]
, [6400..6428]
, [6480..6509]
, [6512..6516]
, [7424..7531]
, [7680..7835]
, [7840..7929]
, [7936..7957]
, [7960..7965]
, [7968..8005]
, [8008..8013]
, [8016..8023]
, [8025]
, [8027]
, [8029]
, [8031..8061]
, [8064..8116]
, [8118..8124]
, [8126]
, [8130..8132]
, [8134..8140]
, [8144..8147]
, [8150..8155]
, [8160..8172]
, [8178..8180]
, [8182..8188]
, [8305]
, [8319]
, [8450]
, [8455]
, [8458..8467]
, [8469]
, [8473..8477]
, [8484]
, [8486]
, [8488]
, [8490..8493]
, [8495..8497]
, [8499..8505]
, [8509..8511]
, [8517..8521]
, [8544..8579]
, [12293..12295]
, [12321..12329]
, [12337..12341]
, [12344..12348]
, [12353..12438]
, [12445..12447]
, [12449..12538]
, [12540..12543]
, [12549..12588]
, [12593..12686]
, [12704..12727]
, [12784..12799]
, [13312..19893]
, [19968..40869]
, [40960..42124]
, [44032..55203]
, [63744..64045]
, [64048..64106]
, [64256..64262]
, [64275..64279]
, [64285]
, [64287..64296]
, [64298..64310]
, [64312..64316]
, [64318]
, [64320..64321]
, [64323..64324]
, [64326..64433]
, [64467..64829]
, [64848..64911]
, [64914..64967]
, [65008..65019]
, [65136..65140]
, [65142..65276]
, [65313..65338]
, [65345..65370]
, [65382..65470]
, [65474..65479]
, [65482..65487]
, [65490..65495]
, [65498..65500]
, [65536..65547]
, [65549..65574]
, [65576..65594]
, [65596..65597]
, [65599..65613]
, [65616..65629]
, [65664..65786]
, [66304..66334]
, [66352..66378]
, [66432..66461]
, [66560..66717]
, [67584..67589]
, [67592]
, [67594..67637]
, [67639..67640]
, [67644]
, [67647]
, [119808..119892]
, [119894..119964]
, [119966..119967]
, [119970]
, [119973..119974]
, [119977..119980]
, [119982..119993]
, [119995]
, [119997..120003]
, [120005..120069]
, [120071..120074]
, [120077..120084]
, [120086..120092]
, [120094..120121]
, [120123..120126]
, [120128..120132]
, [120134]
, [120138..120144]
, [120146..120483]
, [120488..120512]
, [120514..120538]
, [120540..120570]
, [120572..120596]
, [120598..120628]
, [120630..120654]
, [120656..120686]
, [120688..120712]
, [120714..120744]
, [120746..120770]
, [120772..120777]
, [131072..173782]
, [194560..195101]
]
in S.fromList . concat $ r | tonymorris/java-character | src/Language/Java/Character/IsUnicodeIdentifierStart.hs | bsd-3-clause | 9,723 | 0 | 10 | 4,189 | 2,818 | 1,754 | 1,064 | 371 | 1 |
module ControllerService
( controller
, PacketIn
) where
import Prelude hiding (catch)
import Base
import Data.Map (Map)
import MacLearning (PacketOutChan)
import qualified NIB
import qualified NIB2
import qualified Nettle.OpenFlow as OF
import Nettle.OpenFlow.Switch (showSwID)
import qualified Nettle.Servers.Server as OFS
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.List as List
import System.Process
import System.Exit
import Network.Socket as Skt
import qualified System.Log.Logger as Logger
import System.Log.Logger.TH (deriveLoggers)
$(deriveLoggers "Logger" [Logger.DEBUG, Logger.NOTICE, Logger.WARNING,
Logger.ERROR])
type PacketIn = (OF.TransactionID, Integer, OF.SwitchID, OF.PacketInfo)
controller :: Chan NIB.Snapshot -- ^input channel (from Compiler)
-> Chan NIB.Msg -- ^output channel (headed to NIB module)
-> Chan NIB2.Msg -- ^output channel (headed to NIB2 module)
-> Chan PacketIn -- ^output channel (headed to MAC Learning)
-> Chan (OF.SwitchID, Bool) -- ^output channel (for MAC Learning;
-- switches connecting & disconnecting)
-> PacketOutChan -- ^input channel (from MAC Learning)
-> PaneConfig
-> IO ()
controller nibSnapshot toNIB toNIB2 packets switches pktOut config = do
server <- OFS.startOpenFlowServer Nothing (controllerPort config)
-- actually send packets sent by MAC learning module
forkIO $ forever $ do
(swID, xid, pktOut) <- readChan pktOut
debugM $ "SEND packet-out" ++ show (OF.bufferIDData pktOut)
killOnExns "send pkt from controller"
(OFS.sendToSwitchWithID server swID (xid, OF.PacketOut pktOut))
-- no-op reader of the original copy of the nibSnapshot channel
forkIO $ forever $ do
readChan nibSnapshot
-- process new switches
forever $ do
(switch, switchFeatures) <- retryOnExns "accept switch"
(OFS.acceptSwitch server)
noticeM $ "OpenFlow controller connected to new switch."
writeChan toNIB (NIB.NewSwitch switch switchFeatures)
writeChan toNIB2 (NIB2.NewSwitch switch switchFeatures)
writeChan switches (OFS.handle2SwitchID switch, True)
-- Disable Nagle's algorithm on this socket since we are sometimes seeing
-- junk at the end of Controller -> Switch messages. Since it's not clear
-- where this is coming from, let's eliminate the kernel's buffers as a
-- source of confusion. It's most likely that Nettle is not well-behaved
-- when we have multiple hardware threads; still, setting NO_DELAY will
-- hopefully cut-down on the problems we were seeing. If we still see them,
-- we should change sendToSwitch & Strict.runPtr in Nettle to check that
-- they send the same number of bytes as in the OpenFlow header's len field
Skt.setSocketOption (OFS.switchSocket switch) Skt.NoDelay 1 -- Disable Nagle
nibSnapshot <- dupChan nibSnapshot
configThreadId <- forkIO (configureSwitch nibSnapshot switch NIB.emptySwitch config)
forkIO (handleSwitch packets toNIB toNIB2 switches switch configThreadId)
ignoreExns "stats request" $
OFS.sendToSwitch switch (0, OF.StatsRequest OF.DescriptionRequest)
OFS.closeServer server
--
-- Functions to handle messages from switches
--
handleSwitch :: Chan PacketIn -- ^output channel (headed to MAC Learning)
-> Chan NIB.Msg -- ^output channel (headed to NIB module)
-> Chan NIB2.Msg -- ^output channel (headed to NIB module)
-> Chan (OF.SwitchID, Bool) -- ^output channel (for MAC Learning;
-- switches connecting & disconnecting)
-> OFS.SwitchHandle
-> ThreadId -- ^ ThreadId of the configuration thread
-> IO ()
handleSwitch packets toNIB toNIB2 switches switch configThreadId = do
let swID = OFS.handle2SwitchID switch
killOnExns ("clear flowtable on switch with ID: " ++ showSwID swID)
(OFS.sendToSwitch switch
(0, OF.FlowMod $ OF.DeleteFlows OF.matchAny Nothing))
OFS.untilNothing
(retryOnExns ("receive from switch with ID: " ++ showSwID swID)
(OFS.receiveFromSwitch switch))
(\msg -> ignoreExns "msgHandler" (messageHandler packets toNIB toNIB2 switch msg))
ignoreExns ("close handle for switch with ID: " ++ showSwID swID)
(OFS.closeSwitchHandle switch)
writeChan switches (swID, False)
-- TODO(adf): also inform NIB that switch is gone? could be transient...
noticeM $ "Connection to switch " ++ showSwID swID ++ " closed. Killing config thread."
killThread configThreadId
messageHandler :: Chan PacketIn -- ^output channel (headed to MAC Learning)
-> Chan NIB.Msg -- ^output channel (headed to NIB module)
-> Chan NIB2.Msg -- ^output channel (headed to NIB module)
-> OFS.SwitchHandle
-> (OF.TransactionID, OF.SCMessage) -- ^coming from Nettle
-> IO ()
messageHandler packets toNIB toNIB2 switch (xid, msg) = case msg of
OF.PacketIn pkt -> do
now <- readIORef sysTime
writeChan packets (xid, now, OFS.handle2SwitchID switch, pkt)
writeChan toNIB (NIB.PacketIn (OFS.handle2SwitchID switch) pkt)
writeChan toNIB2 (NIB2.PacketIn (OFS.handle2SwitchID switch) pkt)
OF.StatsReply pkt -> do
writeChan toNIB (NIB.StatsReply (OFS.handle2SwitchID switch) pkt)
writeChan toNIB2 (NIB2.StatsReply (OFS.handle2SwitchID switch) pkt)
OF.PortStatus pkt -> do
writeChan toNIB2 (NIB2.PortStatus (OFS.handle2SwitchID switch) pkt)
otherwise -> do
warningM $ "unhandled message from switch " ++
(showSwID $ OFS.handle2SwitchID switch) ++ "\n" ++ show msg
return ()
--
-- Functions to reconfigure switches
--
-- |Block until new snapshot appears, then reconfigure switch based
-- on updated NIB.
configureSwitch :: Chan NIB.Snapshot -- ^input channel (from the Compiler)
-> OFS.SwitchHandle
-> NIB.Switch
-> PaneConfig
-> IO ()
configureSwitch nibSnapshot switchHandle oldSw@(NIB.Switch oldPorts oldTbl _)
config = do
let switchID = OFS.handle2SwitchID switchHandle
snapshot <- readChan nibSnapshot
case Map.lookup switchID snapshot of
Nothing -> do
errorM $ "configureSwitch did not find " ++ showSwID switchID ++
" in the NIB snapshot."
configureSwitch nibSnapshot switchHandle oldSw config
Just sw@(NIB.Switch newPorts newTbl swType) -> do
now <- readIORef sysTime
let (portActions, deleteQueueTimers, msgs') =
case swType of
NIB.ReferenceSwitch -> mkPortModsExt now oldPorts newPorts
(OFS.sendToSwitch switchHandle)
NIB.OpenVSwitch -> mkPortModsOVS now oldPorts newPorts
switchID config
NIB.ProntoSwitch -> mkPortModsExt now oldPorts newPorts
(OFS.sendToSwitch switchHandle)
otherwise -> (errorM $ "Don't know how to create "
++ "queues for " ++ show swType,
return(), [])
let msgs = msgs' ++ mkFlowMods now newTbl oldTbl
unless (null msgs) $ do
debugM $ "Controller modifying tables on " ++ showSwID switchID
debugM $ "sending " ++ show (length msgs) ++ " messages; "
++ "oldTbl size = " ++ show (Set.size oldTbl) ++
" newTbl size = " ++ show (Set.size newTbl)
mapM_ (\x -> debugM $ " " ++ show x) msgs
debugM "-------------------------------------------------"
return ()
-- TODO(adf): should do something smarter here than silently ignoring
-- exceptions while writing config to switch...
portActions
killOnExns ("configuring switch with ID: " ++ showSwID switchID)
(mapM_ (OFS.sendToSwitch switchHandle) (zip [0 ..] msgs))
deleteQueueTimers
configureSwitch nibSnapshot switchHandle sw config
mkFlowMods :: Integer
-> NIB.FlowTbl
-> NIB.FlowTbl
-> [OF.CSMessage]
mkFlowMods now newTbl oldTbl = map OF.FlowMod (delMsgs ++ addMsgs)
where delMsgs = mapMaybe mkDelFlow (Set.toList oldRules)
addMsgs = mapMaybe mkAddFlow (Set.toList newRules)
mkAddFlow (prio, match, acts, expiry) = case expiry <= fromInteger now of
True -> Nothing -- rule is expiring
False ->
Just (OF.AddFlow {
OF.match = match,
OF.priority = prio,
OF.actions = acts,
OF.cookie = 0,
OF.idleTimeOut = OF.Permanent,
OF.hardTimeOut = toTimeout now expiry ,
OF.notifyWhenRemoved = False,
OF.applyToPacket = Nothing,
OF.overlapAllowed = True
})
mkDelFlow (prio, match, _, expiry) = case expiry <= fromInteger now of
True -> Nothing -- rule would've been automatically deleted by switch
False -> Just (OF.DeleteExactFlow match Nothing prio)
newRules = Set.difference newTbl oldTbl
oldRules = Set.difference oldTbl newTbl
-- |We cannot have queues automatically expire with the slicing extension.
-- So, we return an action that sets up timers to delete queues.
mkPortModsExt :: Integer
-> Map OF.PortID NIB.PortCfg
-> Map OF.PortID NIB.PortCfg
-> ((OF.TransactionID, OF.CSMessage) -> IO ())
-> (IO (), IO (), [OF.CSMessage])
mkPortModsExt now portsNow portsNext sendCmd = (addActions, delTimers, addMsgs)
where addActions = return ()
addMsgs = map newQueueMsg newQueues
delTimers = sequence_ (map delQueueAction newQueues)
newQueueMsg ((pid, qid), NIB.Queue (OF.Enabled resv) OF.Disabled _) =
OF.ExtQueueModify pid
[OF.QueueConfig qid [OF.MinRateQueue (OF.Enabled (
translateRate resv))]]
newQueueMsg ((pid, qid), NIB.Queue OF.Disabled (OF.Enabled rlimit) _) =
OF.ExtQueueModify pid
[OF.QueueConfig qid [OF.MaxRateQueue (OF.Enabled (
translateRate rlimit))]]
delQueueAction ((_, _), NIB.Queue _ _ NoLimit) = return ()
delQueueAction ((pid, qid), NIB.Queue _ _ (DiscreteLimit end)) = do
forkIO $ do
threadDelay (10^6 * (fromIntegral $ end - now))
debugM $ "Deleting queue " ++ show qid ++ " on port " ++ show pid
ignoreExns ("deleting queue " ++ show qid)
(sendCmd (0, OF.ExtQueueDelete pid [OF.QueueConfig qid []]))
return ()
qCmpLeft ql qr = if ql == qr then Nothing else (Just ql)
newQueues = Map.toList $
Map.differenceWith qCmpLeft (flatten portsNext) (flatten portsNow)
flatten portMap = Map.fromList $
concatMap (\(pid, NIB.PortCfg qMap) ->
map (\(qid, q) -> ((pid, qid), q)) (Map.toList qMap))
(Map.toList portMap)
-- |We cannot have queues automatically expire with Open vSwitch, either.
-- So, we return an action that sets up timers to delete queues.
mkPortModsOVS :: Integer
-> Map OF.PortID NIB.PortCfg
-> Map OF.PortID NIB.PortCfg
-> OF.SwitchID
-> PaneConfig
-> (IO (), IO (), [OF.CSMessage])
mkPortModsOVS now portsNow portsNext swid config =
(addActions, delTimers, addMsgs)
where addMsgs = [] -- No OpenFlow messages needed
addActions = sequence_ (map newQueueAction newQueues)
delTimers = sequence_ (map delQueueAction newQueues)
newQueueAction ((pid, qid), NIB.Queue (OF.Enabled resv) OF.Disabled _) =
runOVSscript "create" (ovsSetQueue config) swid pid qid resv 0
newQueueAction ((pid, qid), NIB.Queue OF.Disabled (OF.Enabled rlimit) _) =
runOVSscript "create" (ovsSetQueue config) swid pid qid 0 rlimit
delQueueAction ((_, _), NIB.Queue _ _ NoLimit) = return ()
delQueueAction ((pid, qid), NIB.Queue _ _ (DiscreteLimit end)) = do
forkIO $ do
threadDelay (10^6 * (fromIntegral $ end - now))
runOVSscript "delete" (ovsDeleteQueue config) swid pid qid 0 0
return()
return ()
qCmpLeft ql qr = if ql == qr then Nothing else (Just ql)
newQueues = Map.toList $
Map.differenceWith qCmpLeft (flatten portsNext) (flatten portsNow)
flatten portMap = Map.fromList $
concatMap (\(pid, NIB.PortCfg qMap) ->
map (\(qid, q) -> ((pid, qid), q)) (Map.toList qMap))
(Map.toList portMap)
-- |Helper to handle fork'ing out to run the scripts which know how
-- to configure Open vSwitch-based switches.
runOVSscript desc script swid pid qid resv rlimit = do
debugM $ desc ++ " queue " ++ show qid ++ " on port " ++ show pid
++ " switch " ++ show swid
exitcode <- rawSystem script [show swid, show pid, show qid,
show resv, show rlimit]
case exitcode of
ExitSuccess -> return ()
ExitFailure n -> noticeM $ "Exception (ignoring): failed to " ++ desc
++ " OVS queue: " ++ show swid ++ " " ++
show pid ++ " " ++ show qid ++
"; ExitFailure: " ++ show n
-- TODO(arjun): toTimeout will fail if (end - now) does not fit in a Word16
toTimeout :: Integer -> Limit -> OF.TimeOut
toTimeout _ NoLimit =
OF.Permanent
toTimeout now (DiscreteLimit end) =
OF.ExpireAfter (fromInteger (end - fromInteger now))
-- assuming the total bandwidth is 1000Mbps, r is the rate in Mbps
-- returns the rate in tenths of a percent, which the Reference switch
-- uses as the guaranteed minimum bandwidth for a queue.
--
-- TODO(adf): should pull link speed from NIB, rather than assume 1000Mbps
translateRate :: Word16 -> Word16
translateRate r =
let linkSpeed = 1000
in truncate $ ((toRational r) / linkSpeed) * 1000
| brownsys/pane | src/ControllerService.hs | bsd-3-clause | 14,376 | 0 | 23 | 4,132 | 3,558 | 1,817 | 1,741 | -1 | -1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE FlexibleInstances #-}
module Language.Nano.Errors where
import Debug.Trace
import Text.Printf
import Text.PrettyPrint.HughesPJ
import Language.ECMAScript3.PrettyPrint
bugBadPhi l t1s t2s = printf "BUG: Unbalanced Phi at %s \n %s \n %s" (ppshow l) (ppshow t1s) (ppshow t2s)
bugBadSubtypes x = printf "BUG: Unexpected Subtyping Constraint \n %s" (ppshow x)
bugUnboundPhiVar x = printf "BUG: Phi Variable %s is unbound" (ppshow x)
bugUnboundVariable l x = printf "BUG: Variable %s is unbound in environment at %s" (ppshow x) (ppshow l)
bugMissingTypeArgs l = printf "BUG: Missing Type Arguments at %s" (ppshow l)
errorArgName l x y = printf "Wrong Parameter Name at %s: Saw %s but Expected %s" (ppshow l) (ppshow x) (ppshow y)
errorMissingSpec l f = printf "Missing Signature For %s defined at %s" (ppshow f) (ppshow l)
errorDuplicate i l l' = printf "Duplicate Specification for %s:\n %s \n %s" (ppshow i) (ppshow l) (ppshow l')
errorArgMismatch = printf "Mismatch in Number of Args in Call"
errorNonFunction f t = printf "Non-function type for %s :: %s" (ppshow f) (ppshow t)
errorUnboundId x = printf "Identifier %s unbound" (ppshow x)
errorUnboundIdEnv x t = printf "ZOGBERT Identifier %s unbound in %s" (ppshow x) (ppshow t)
errorWrongType m e t t' = printf "%s -- unexpected type for %s :: %s expected %s" m (ppshow e) (ppshow t) (ppshow t')
errorJoin x t t' = printf "Conflicting join for %s \n %s\n %s" (ppshow x) (ppshow t) (ppshow t')
errorUnification t t' = printf "Cannot unify types: %s and %s" (ppshow t) (ppshow t')
errorBoundTyVar a t = printf "Cannot unify bound type parameter %s with %s" (ppshow a) (ppshow t)
errorFreeTyVar t = printf "Type not fully instantiated: %s" (ppshow t)
errorWriteImmutable x = printf "Cannot write immutable: %s" (ppshow x)
errorInvalidTopStmt x = printf "Invalid top-level statement: %s" (ppshow x)
errorOccursCheck a t = printf "Occurs check fails: %s in %s" (ppshow a) (ppshow t)
errorRigidUnify a t = printf "Cannot unify rigid variable %s with %s" (ppshow a) (ppshow t)
ppshow = render . pp
tracePP :: (PP a) => String -> a -> a
tracePP s x = trace (printf "\nTrace: [%s]: %s" s (ppshow x)) x
instance PP a => PP (Either String a) where
pp (Left s) = text $ "ERROR!" ++ s
pp (Right x) = pp x
| UCSD-PL/nano-js | Language/Nano/Errors.hs | bsd-3-clause | 2,435 | 0 | 9 | 527 | 727 | 359 | 368 | 34 | 1 |
module Paths_language_c_quote_utils (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName
) where
import Data.Version (Version(..))
import System.Environment (getEnv)
version :: Version
version = Version {versionBranch = [0,0,0,1], versionTags = []}
bindir, libdir, datadir, libexecdir :: FilePath
bindir = "/Users/hi5networks/.cabal/bin"
libdir = "/Users/hi5networks/.cabal/lib/language-c-quote-utils-0.0.0.1/ghc-7.2.2"
datadir = "/Users/hi5networks/.cabal/share/language-c-quote-utils-0.0.0.1"
libexecdir = "/Users/hi5networks/.cabal/libexec"
getBinDir, getLibDir, getDataDir, getLibexecDir :: IO FilePath
getBinDir = catch (getEnv "language_c_quote_utils_bindir") (\_ -> return bindir)
getLibDir = catch (getEnv "language_c_quote_utils_libdir") (\_ -> return libdir)
getDataDir = catch (getEnv "language_c_quote_utils_datadir") (\_ -> return datadir)
getLibexecDir = catch (getEnv "language_c_quote_utils_libexecdir") (\_ -> return libexecdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| jfischoff/language-c-quote-utils | dist/build/autogen/Paths_language_c_quote_utils.hs | bsd-3-clause | 1,119 | 0 | 10 | 144 | 283 | 163 | 120 | 22 | 1 |
import Network
import Control.Concurrent
import System.IO
main = withSocketsDo $ do
sock <- listenOn $ PortNumber 5002
loop sock
loop sock = do
(h,_,_) <- accept sock
forkIO $ body h
loop sock
where
body h = do
hPutStr h msg
hFlush h
hClose h
msg = "HTTP/1.1 200 OK\r\nContent-Length: 13\r\n\r\nHello, World!"
| aycanirican/hlibev | Examples/BasicConcurrent.hs | bsd-3-clause | 358 | 0 | 10 | 100 | 121 | 56 | 65 | 15 | 1 |
-- https://www.codewars.com/kata/number-of-trailing-zeros-of-n
module Zeros where
zeros :: Int -> Int
zeros n = h n 0
where
h n z = let r = div n 5 in if r == 0 then z else h r (z + r)
| airtial/Codegames | codewars/number-of-trailing-zeros-of-n.hs | gpl-2.0 | 193 | 0 | 12 | 48 | 79 | 41 | 38 | 4 | 2 |
{-|
hledger-ui - a hledger add-on providing a curses-style interface.
Copyright (c) 2007-2015 Simon Michael <simon@joyful.com>
Released under GPL version 3 or later.
-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Hledger.UI.Main where
import Control.Applicative ((<|>))
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async (withAsync)
import Control.Monad (forM_, void, when)
import Data.List (find)
import Data.List.Extra (nubSort)
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Graphics.Vty (mkVty, Mode (Mouse), Vty (outputIface), Output (setMode))
import Lens.Micro ((^.))
import System.Directory (canonicalizePath)
import System.FilePath (takeDirectory)
import System.FSNotify (Event(Modified), isPollingManager, watchDir, withManager)
import Brick
import qualified Brick.BChan as BC
import Hledger
import Hledger.Cli hiding (progname,prognameandversion)
import Hledger.UI.UIOptions
import Hledger.UI.UITypes
import Hledger.UI.Theme
import Hledger.UI.AccountsScreen
import Hledger.UI.RegisterScreen
----------------------------------------------------------------------
newChan :: IO (BC.BChan a)
newChan = BC.newBChan 10
writeChan :: BC.BChan a -> a -> IO ()
writeChan = BC.writeBChan
main :: IO ()
main = do
opts@UIOpts{uoCliOpts=copts@CliOpts{inputopts_=iopts,rawopts_=rawopts}} <- getHledgerUIOpts
-- when (debug_ $ cliopts_ opts) $ printf "%s\n" prognameandversion >> printf "opts: %s\n" (show opts)
-- always generate forecasted periodic transactions; their visibility will be toggled by the UI.
let copts' = copts{inputopts_=iopts{forecast_=forecast_ iopts <|> Just nulldatespan}}
case True of
_ | "help" `inRawOpts` rawopts -> putStr (showModeUsage uimode)
_ | "info" `inRawOpts` rawopts -> runInfoForTopic "hledger-ui" Nothing
_ | "man" `inRawOpts` rawopts -> runManForTopic "hledger-ui" Nothing
_ | "version" `inRawOpts` rawopts -> putStrLn prognameandversion
-- _ | "binary-filename" `inRawOpts` rawopts -> putStrLn (binaryfilename progname)
_ -> withJournalDo copts' (runBrickUi opts)
runBrickUi :: UIOpts -> Journal -> IO ()
runBrickUi uopts@UIOpts{uoCliOpts=copts@CliOpts{inputopts_=_iopts,reportspec_=rspec@ReportSpec{_rsReportOpts=ropts}}} j = do
let
today = copts^.rsDay
-- hledger-ui's query handling is currently in flux, mixing old and new approaches.
-- Related: #1340, #1383, #1387. Some notes and terminology:
-- The *startup query* is the Query generated at program startup, from
-- command line options, arguments, and the current date. hledger CLI
-- uses this.
-- hledger-ui/hledger-web allow the query to be changed at will, creating
-- a new *runtime query* each time.
-- The startup query or part of it can be used as a *constraint query*,
-- limiting all runtime queries. hledger-web does this with the startup
-- report period, never showing transactions outside those dates.
-- hledger-ui does not do this.
-- A query is a combination of multiple subqueries/terms, which are
-- generated from command line options and arguments, ui/web app runtime
-- state, and/or the current date.
-- Some subqueries are generated by parsing freeform user input, which
-- can fail. We don't want hledger users to see such failures except:
-- 1. at program startup, in which case the program exits
-- 2. after entering a new freeform query in hledger-ui/web, in which case
-- the change is rejected and the program keeps running
-- So we should parse those kinds of subquery only at those times. Any
-- subqueries which do not require parsing can be kept separate. And
-- these can be combined to make the full query when needed, eg when
-- hledger-ui screens are generating their data. (TODO)
-- Some parts of the query are also kept separate for UI reasons.
-- hledger-ui provides special UI for controlling depth (number keys),
-- the report period (shift arrow keys), realness/status filters (RUPC keys) etc.
-- There is also a freeform text area for extra query terms (/ key).
-- It's cleaner and less conflicting to keep the former out of the latter.
uopts' = uopts{
uoCliOpts=copts{
reportspec_=rspec{
_rsQuery=filteredQuery $ _rsQuery rspec, -- query with depth/date parts removed
_rsReportOpts=ropts{
depth_ =queryDepth $ _rsQuery rspec, -- query's depth part
period_=periodfromoptsandargs, -- query's date part
no_elide_=True, -- avoid squashing boring account names, for a more regular tree (unlike hledger)
empty_=not $ empty_ ropts -- show zero items by default, hide them with -E (unlike hledger)
}
}
}
}
where
datespanfromargs = queryDateSpan (date2_ ropts) $ _rsQuery rspec
periodfromoptsandargs =
dateSpanAsPeriod $ spansIntersect [periodAsDateSpan $ period_ ropts, datespanfromargs]
filteredQuery q = simplifyQuery $ And [queryFromFlags ropts, filtered q]
where filtered = filterQuery (\x -> not $ queryIsDepth x || queryIsDate x)
(scr, prevscrs) = case uoRegister uopts' of
Nothing -> (accountsScreen, [])
-- with --register, start on the register screen, and also put
-- the accounts screen on the prev screens stack so you can exit
-- to that as usual.
Just apat -> (rsSetAccount acct False registerScreen, [ascr'])
where
acct = fromMaybe (error' $ "--register "++apat++" did not match any account") -- PARTIAL:
. firstMatch $ journalAccountNamesDeclaredOrImplied j
firstMatch = case toRegexCI $ T.pack apat of
Right re -> find (regexMatchText re)
Left _ -> const Nothing
-- Initialising the accounts screen is awkward, requiring
-- another temporary UIState value..
ascr' = aScreen $
asInit today True
UIState{
astartupopts=uopts'
,aopts=uopts'
,ajournal=j
,aScreen=asSetSelectedAccount acct accountsScreen
,aPrevScreens=[]
,aMode=Normal
}
ui =
(sInit scr) today True $
UIState{
astartupopts=uopts'
,aopts=uopts'
,ajournal=j
,aScreen=scr
,aPrevScreens=prevscrs
,aMode=Normal
}
brickapp :: App UIState AppEvent Name
brickapp = App {
appStartEvent = return
, appAttrMap = const $ fromMaybe defaultTheme $ getTheme =<< uoTheme uopts'
, appChooseCursor = showFirstCursor
, appHandleEvent = \ui ev -> sHandle (aScreen ui) ui ev
, appDraw = \ui -> sDraw (aScreen ui) ui
}
-- print (length (show ui)) >> exitSuccess -- show any debug output to this point & quit
let
-- helper: make a Vty terminal controller with mouse support enabled
makevty = do
v <- mkVty mempty
setMode (outputIface v) Mouse True
return v
if not (uoWatch uopts')
then do
vty <- makevty
void $ customMain vty makevty Nothing brickapp ui
else do
-- a channel for sending misc. events to the app
eventChan <- newChan
-- start a background thread reporting changes in the current date
-- use async for proper child termination in GHCI
let
watchDate old = do
threadDelay 1000000 -- 1 s
new <- getCurrentDay
when (new /= old) $ do
let dc = DateChange old new
-- dbg1IO "datechange" dc -- XXX don't uncomment until dbg*IO fixed to use traceIO, GHC may block/end thread
-- traceIO $ show dc
writeChan eventChan dc
watchDate new
withAsync
-- run this small task asynchronously:
(getCurrentDay >>= watchDate)
-- until this main task terminates:
$ \_async ->
-- start one or more background threads reporting changes in the directories of our files
-- XXX many quick successive saves causes the problems listed in BUGS
-- with Debounce increased to 1s it easily gets stuck on an error or blank screen
-- until you press g, but it becomes responsive again quickly.
-- withManagerConf defaultConfig{confDebounce=Debounce 1} $ \mgr -> do
-- with Debounce at the default 1ms it clears transient errors itself
-- but gets tied up for ages
withManager $ \mgr -> do
dbg1IO "fsnotify using polling ?" $ isPollingManager mgr
files <- mapM (canonicalizePath . fst) $ jfiles j
let directories = nubSort $ map takeDirectory files
dbg1IO "files" files
dbg1IO "directories to watch" directories
forM_ directories $ \d -> watchDir
mgr
d
-- predicate: ignore changes not involving our files
(\case
Modified f _ False -> f `elem` files
-- Added f _ -> f `elem` files
-- Removed f _ -> f `elem` files
-- we don't handle adding/removing journal files right now
-- and there might be some of those events from tmp files
-- clogging things up so let's ignore them
_ -> False
)
-- action: send event to app
(\fev -> do
-- return $ dbglog "fsnotify" $ showFSNEvent fev -- not working
dbg1IO "fsnotify" $ show fev
writeChan eventChan FileChange
)
-- and start the app. Must be inside the withManager block. (XXX makevty too ?)
vty <- makevty
void $ customMain vty makevty (Just eventChan) brickapp ui
| adept/hledger | hledger-ui/Hledger/UI/Main.hs | gpl-3.0 | 9,951 | 0 | 24 | 2,734 | 1,588 | 874 | 714 | 130 | 5 |
{- Copied & modified from the public-domain uglymemo package by
- Lennart Augustsson
-}
module Data.MRUMemo
( memoIO, memoIOPure, memo
) where
import Control.Concurrent.MVar
import Data.IORef
import System.IO.Unsafe (unsafePerformIO)
memoIO :: Eq a => (a -> IO b) -> IO (a -> IO b)
memoIO act =
do
var <- newMVar Nothing
return $ memoized var
where
memoized var key = modifyMVar var onMVar
where
onMVar j@(Just (oldKey, oldvalue))
| oldKey == key = return (j, oldvalue)
| otherwise = callOrig
onMVar Nothing = callOrig
callOrig =
do
res <- act key
return (Just (key, res), res)
-- | Memoize the given function with a single most-recently-used value
memoIOPure
:: (Show a, Eq a)
=> (a -> b) -- ^Function to memoize
-> IO (a -> IO b)
memoIOPure f =
do
lastResultRef <- newIORef Nothing
return $ \x -> atomicModifyIORef lastResultRef $ \m ->
let r = f x
callOrig = (Just (x, r), r)
in case m of
Nothing -> callOrig
Just (key, val)
| key == x -> (m, val)
| otherwise -> callOrig
-- | The pure version of 'memoIO'.
memo :: (Show a, Eq a)
=> (a -> b) -- ^Function to memoize
-> a -> b
memo f = let f' = unsafePerformIO (memoIOPure f)
in unsafePerformIO . f'
| da-x/lamdu | bottlelib/Data/MRUMemo.hs | gpl-3.0 | 1,560 | 0 | 19 | 623 | 462 | 238 | 224 | 39 | 2 |
-- Copyright (C) 2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
module BDCS.Label.License(matches,
mkLabel)
where
import Data.Char(toUpper)
import Data.List(isPrefixOf)
import qualified Data.Text as T
import System.FilePath.Posix(takeFileName)
import BDCS.DB(Files(..))
import BDCS.Label.Types(Label(..))
feq :: FilePath -> String -> Bool
feq path s = let
path' = map toUpper path
sDot = s ++ "."
sDash = s ++ "-"
in
path' == s ||
sDot `isPrefixOf` path ||
sDash `isPrefixOf` path
matches :: Files -> Bool
matches Files{..} = let
filesPath' = T.unpack filesPath
fn = takeFileName filesPath'
in
"/usr/share/licenses/" `isPrefixOf` filesPath' ||
feq fn "COPYING" || feq fn "COPYRIGHT" || feq fn "LICENSE"
mkLabel :: Files -> Maybe Label
mkLabel _ = Just LicenseLabel
| atodorov/bdcs | src/BDCS/Label/License.hs | lgpl-2.1 | 1,522 | 0 | 11 | 339 | 281 | 161 | 120 | -1 | -1 |
-- just fire up ghci, :load Smt.hs and run `go file.smt2`
module Smt where
import qualified Data.Text.Lazy.IO as T
import Language.Fixpoint.Config (SMTSolver (..))
import Language.Fixpoint.Parse
import Language.Fixpoint.SmtLib2
import System.Environment
main = do f:_ <- getArgs
_ <- go f
return ()
runFile f
= readFile f >>= runString
runString str
= runCommands $ rr str
runCommands cmds
= do me <- makeContext Z3
mapM_ (T.putStrLn . smt2) cmds
zs <- mapM (command me) cmds
return zs
| rolph-recto/liquid-fixpoint | tests/smt2/Smt.hs | bsd-3-clause | 558 | 0 | 10 | 148 | 170 | 88 | 82 | 18 | 1 |
{-# LANGUAGE BangPatterns, FlexibleInstances, OverloadedStrings,
TypeSynonymInstances #-}
module Main ( main ) where
import Control.Applicative
import Criterion.Main
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as HM
import Data.Text (Text)
import Data.Vector (Vector)
import Data.Csv
type President = (Int, Text, ByteString, ByteString, ByteString, Text, Text)
instance FromNamedRecord President where
parseNamedRecord m = (,,,,,,) <$>
m .: "Presidency" <*>
m .: "President" <*>
m .: "Wikipedia Entry" <*>
m .: "Took office" <*>
m .: "Left office" <*>
m .: "Party" <*>
m .: "Home State"
instance ToNamedRecord President where
toNamedRecord (presidency, president, wikipediaEntry, tookOffice,
leftOffice, party, homeState) = namedRecord
[ "Presidency" .= presidency
, "President" .= president
, "Wikipedia Entry" .= wikipediaEntry
, "Took office" .= tookOffice
, "Left office" .= leftOffice
, "Party" .= party
, "Home State" .= homeState
]
fromStrict s = BL.fromChunks [s]
type BSHashMap a = HM.HashMap B.ByteString a
main :: IO ()
main = do
!csvData <- fromStrict `fmap` B.readFile "benchmarks/presidents.csv"
!csvDataN <- fromStrict `fmap` B.readFile
"benchmarks/presidents_with_header.csv"
let (Right !presidents) = decodePresidents csvData
(Right (!hdr, !presidentsN)) = decodePresidentsN csvDataN
defaultMain [
bgroup "positional"
[ bgroup "decode"
[ bench "presidents/without conversion" $ whnf idDecode csvData
, bench "presidents/with conversion" $ whnf decodePresidents csvData
]
, bgroup "encode"
[ bench "presidents/with conversion" $ whnf encode presidents
]
]
, bgroup "named"
[ bgroup "decode"
[ bench "presidents/without conversion" $ whnf idDecodeN csvDataN
, bench "presidents/with conversion" $ whnf decodePresidentsN csvDataN
]
, bgroup "encode"
[ bench "presidents/with conversion" $ whnf (encodeByName hdr) presidentsN
]
]
]
where
decodePresidents :: BL.ByteString -> Either String (Vector President)
decodePresidents = decode False
decodePresidentsN :: BL.ByteString -> Either String (Header, Vector President)
decodePresidentsN = decodeByName
idDecode :: BL.ByteString -> Either String (Vector (Vector B.ByteString))
idDecode = decode False
idDecodeN :: BL.ByteString -> Either String (Header, Vector (BSHashMap B.ByteString))
idDecodeN = decodeByName
| solidsnack/cassava | benchmarks/Benchmarks.hs | bsd-3-clause | 2,992 | 0 | 20 | 916 | 693 | 370 | 323 | 62 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for the 'Ganeti.Common' module.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Common
( testCommon
, checkOpt
, passFailOpt
, checkEarlyExit
) where
import Test.QuickCheck hiding (Result)
import Test.HUnit
import qualified System.Console.GetOpt as GetOpt
import System.Exit
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Ganeti.BasicTypes
import Ganeti.Common
import Ganeti.HTools.Program.Main (personalities)
{-# ANN module "HLint: ignore Use camelCase" #-}
-- | Helper to check for correct parsing of an option.
checkOpt :: (StandardOptions b) =>
(a -> Maybe String) -- ^ Converts the value into a cmdline form
-> b -- ^ The default options
-> (String -> c) -- ^ Fail test function
-> (String -> d -> d -> c) -- ^ Check for equality function
-> (a -> d) -- ^ Transforms the value to a compare val
-> (a, GenericOptType b, b -> d) -- ^ Triple of value, the
-- option, function to
-- extract the set value
-- from the options
-> c
checkOpt repr defaults failfn eqcheck valfn
(val, opt@(GetOpt.Option _ longs _ _, _), fn) =
case longs of
[] -> failfn "no long options?"
cmdarg:_ ->
case parseOptsInner defaults
["--" ++ cmdarg ++ maybe "" ("=" ++) (repr val)]
"prog" [opt] [] of
Left e -> failfn $ "Failed to parse option '" ++ cmdarg ++ ": " ++
show e
Right (options, _) -> eqcheck ("Wrong value in option " ++
cmdarg ++ "?") (valfn val) (fn options)
-- | Helper to check for correct and incorrect parsing of an option.
passFailOpt :: (StandardOptions b) =>
b -- ^ The default options
-> (String -> c) -- ^ Fail test function
-> c -- ^ Pass function
-> (GenericOptType b, String, String)
-- ^ The list of enabled options, fail value and pass value
-> c
passFailOpt defaults failfn passfn
(opt@(GetOpt.Option _ longs _ _, _), bad, good) =
let first_opt = case longs of
[] -> error "no long options?"
x:_ -> x
prefix = "--" ++ first_opt ++ "="
good_cmd = prefix ++ good
bad_cmd = prefix ++ bad in
case (parseOptsInner defaults [bad_cmd] "prog" [opt] [],
parseOptsInner defaults [good_cmd] "prog" [opt] []) of
(Left _, Right _) -> passfn
(Right _, Right _) -> failfn $ "Command line '" ++ bad_cmd ++
"' succeeded when it shouldn't"
(Left _, Left _) -> failfn $ "Command line '" ++ good_cmd ++
"' failed when it shouldn't"
(Right _, Left _) ->
failfn $ "Command line '" ++ bad_cmd ++
"' succeeded when it shouldn't, while command line '" ++
good_cmd ++ "' failed when it shouldn't"
-- | Helper to test that a given option is accepted OK with quick exit.
checkEarlyExit :: (StandardOptions a) =>
a -> String -> [GenericOptType a] -> [ArgCompletion]
-> Assertion
checkEarlyExit defaults name options arguments =
mapM_ (\param ->
case parseOptsInner defaults [param] name options arguments of
Left (code, _) ->
assertEqual ("Program " ++ name ++
" returns invalid code " ++ show code ++
" for option " ++ param) ExitSuccess code
_ -> assertFailure $ "Program " ++ name ++
" doesn't consider option " ++
param ++ " as early exit one"
) ["-h", "--help", "-V", "--version"]
-- | Test parseYesNo.
prop_parse_yes_no :: Bool -> Bool -> String -> Property
prop_parse_yes_no def testval val =
forAll (elements [val, "yes", "no"]) $ \actual_val ->
if testval
then parseYesNo def Nothing ==? Ok def
else let result = parseYesNo def (Just actual_val)
in if actual_val `elem` ["yes", "no"]
then result ==? Ok (actual_val == "yes")
else property $ isBad result
-- | Check that formatCmdUsage works similar to Python _FormatUsage.
case_formatCommands :: Assertion
case_formatCommands =
assertEqual "proper wrap for HTools Main"
resCmdTest (formatCommands personalities)
where resCmdTest :: [String]
resCmdTest =
[ " hail - Ganeti IAllocator plugin that implements the instance\
\ placement and"
, " movement using the same algorithm as hbal(1)"
, " harep - auto-repair tool that detects certain kind of problems\
\ with"
, " instances and applies the allowed set of solutions"
, " hbal - cluster balancer that looks at the current state of\
\ the cluster and"
, " computes a series of steps designed to bring the\
\ cluster into a"
, " better state"
, " hcheck - cluster checker; prints information about cluster's\
\ health and"
, " checks whether a rebalance done using hbal would help"
, " hinfo - cluster information printer; it prints information\
\ about the current"
, " cluster state and its residing nodes/instances"
, " hroller - cluster rolling maintenance helper; it helps\
\ scheduling node reboots"
, " in a manner that doesn't conflict with the instances'\
\ topology"
, " hscan - tool for scanning clusters via RAPI and saving their\
\ data in the"
, " input format used by hbal(1) and hspace(1)"
, " hspace - computes how many additional instances can be fit on a\
\ cluster,"
, " while maintaining N+1 status."
, " hsqueeze - cluster dynamic power management; it powers up and\
\ down nodes to"
, " keep the amount of free online resources in a given\
\ range"
]
testSuite "Common"
[ 'prop_parse_yes_no
, 'case_formatCommands
]
| ribag/ganeti-experiments | test/hs/Test/Ganeti/Common.hs | gpl-2.0 | 7,189 | 0 | 17 | 2,431 | 1,137 | 627 | 510 | 113 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SQS.GetQueueAttributes
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Gets attributes for the specified queue. The following attributes are
-- supported: 'All' - returns all values. 'ApproximateNumberOfMessages' - returns
-- the approximate number of visible messages in a queue. For more information,
-- see <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ApproximateNumber.html Resources Required to Process Messages> in the /Amazon SQS Developer Guide/.
-- 'ApproximateNumberOfMessagesNotVisible' - returns the approximate number of
-- messages that are not timed-out and not deleted. For more information, see <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ApproximateNumber.html Resources Required to Process Messages> in the /Amazon SQS Developer Guide/. 'VisibilityTimeout' - returns the
-- visibility timeout for the queue. For more information about visibility
-- timeout, see <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/AboutVT.html Visibility Timeout> in the /Amazon SQS Developer Guide/. 'CreatedTimestamp' - returns the time when the queue was created (epoch time in seconds). 'LastModifiedTimestamp' - returns the time when the queue was last changed (epoch time in seconds). 'Policy' - returns the queue's policy. 'MaximumMessageSize' - returns the limit
-- of how many bytes a message can contain before Amazon SQS rejects it. 'MessageRetentionPeriod' - returns the number of seconds Amazon SQS retains a message. 'QueueArn' -
-- returns the queue's Amazon resource name (ARN). 'ApproximateNumberOfMessagesDelayed' - returns the approximate number of messages that are pending to be added to
-- the queue. 'DelaySeconds' - returns the default delay on the queue in seconds.
-- 'ReceiveMessageWaitTimeSeconds' - returns the time for which a ReceiveMessage
-- call will wait for a message to arrive. 'RedrivePolicy' - returns the
-- parameters for dead letter queue functionality of the source queue. For more
-- information about RedrivePolicy and dead letter queues, see <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/SQSDeadLetterQueue.html Using Amazon SQSDead Letter Queues> in the /Amazon SQS Developer Guide/.
--
-- Going forward, new attributes might be added. If you are writing code that
-- calls this action, we recommend that you structure your code so that it can
-- handle new attributes gracefully. Some API actions take lists of parameters.
-- These lists are specified using the 'param.n' notation. Values of 'n' are
-- integers starting from 1. For example, a parameter list with two elements
-- looks like this: '&Attribute.1=this'
--
-- '&Attribute.2=that'
--
-- <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_GetQueueAttributes.html>
module Network.AWS.SQS.GetQueueAttributes
(
-- * Request
GetQueueAttributes
-- ** Request constructor
, getQueueAttributes
-- ** Request lenses
, gqaAttributeNames
, gqaQueueUrl
-- * Response
, GetQueueAttributesResponse
-- ** Response constructor
, getQueueAttributesResponse
-- ** Response lenses
, gqarAttributes
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.SQS.Types
import qualified GHC.Exts
data GetQueueAttributes = GetQueueAttributes
{ _gqaAttributeNames :: List "member" Text
, _gqaQueueUrl :: Text
} deriving (Eq, Read, Show)
-- | 'GetQueueAttributes' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gqaAttributeNames' @::@ ['Text']
--
-- * 'gqaQueueUrl' @::@ 'Text'
--
getQueueAttributes :: Text -- ^ 'gqaQueueUrl'
-> GetQueueAttributes
getQueueAttributes p1 = GetQueueAttributes
{ _gqaQueueUrl = p1
, _gqaAttributeNames = mempty
}
-- | A list of attributes to retrieve information for.
gqaAttributeNames :: Lens' GetQueueAttributes [Text]
gqaAttributeNames =
lens _gqaAttributeNames (\s a -> s { _gqaAttributeNames = a })
. _List
-- | The URL of the Amazon SQS queue to take action on.
gqaQueueUrl :: Lens' GetQueueAttributes Text
gqaQueueUrl = lens _gqaQueueUrl (\s a -> s { _gqaQueueUrl = a })
newtype GetQueueAttributesResponse = GetQueueAttributesResponse
{ _gqarAttributes :: EMap "entry" "Name" "Value" Text Text
} deriving (Eq, Read, Show, Monoid, Semigroup)
-- | 'GetQueueAttributesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gqarAttributes' @::@ 'HashMap' 'Text' 'Text'
--
getQueueAttributesResponse :: GetQueueAttributesResponse
getQueueAttributesResponse = GetQueueAttributesResponse
{ _gqarAttributes = mempty
}
-- | A map of attributes to the respective values.
gqarAttributes :: Lens' GetQueueAttributesResponse (HashMap Text Text)
gqarAttributes = lens _gqarAttributes (\s a -> s { _gqarAttributes = a }) . _EMap
instance ToPath GetQueueAttributes where
toPath = const "/"
instance ToQuery GetQueueAttributes where
toQuery GetQueueAttributes{..} = mconcat
[ toQuery _gqaAttributeNames
, "QueueUrl" =? _gqaQueueUrl
]
instance ToHeaders GetQueueAttributes
instance AWSRequest GetQueueAttributes where
type Sv GetQueueAttributes = SQS
type Rs GetQueueAttributes = GetQueueAttributesResponse
request = post "GetQueueAttributes"
response = xmlResponse
instance FromXML GetQueueAttributesResponse where
parseXML = withElement "GetQueueAttributesResult" $ \x -> GetQueueAttributesResponse
<$> parseXML x
| romanb/amazonka | amazonka-sqs/gen/Network/AWS/SQS/GetQueueAttributes.hs | mpl-2.0 | 6,581 | 0 | 10 | 1,209 | 530 | 328 | 202 | 61 | 1 |
{-# LANGUAGE DeriveDataTypeable, PatternGuards, FlexibleInstances, MultiParamTypeClasses, CPP #-}
-- deriving Typeable for ghc-6.6 compatibility, which is retained in the core
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Hooks.ManageDocks
-- Copyright : (c) Joachim Breitner <mail@joachim-breitner.de>
-- License : BSD
--
-- Maintainer : Joachim Breitner <mail@joachim-breitner.de>
-- Stability : unstable
-- Portability : unportable
--
-- This module provides tools to automatically manage 'dock' type programs,
-- such as gnome-panel, kicker, dzen, and xmobar.
module XMonad.Hooks.ManageDocks (
-- * Usage
-- $usage
manageDocks, checkDock, AvoidStruts, avoidStruts, avoidStrutsOn,
docksEventHook,
ToggleStruts(..),
SetStruts(..),
module XMonad.Util.Types,
#ifdef TESTING
r2c,
c2r,
RectC(..),
#endif
-- for XMonad.Actions.FloatSnap
calcGap
) where
-----------------------------------------------------------------------------
import XMonad
import Foreign.C.Types (CLong)
import XMonad.Layout.LayoutModifier
import XMonad.Util.Types
import XMonad.Util.WindowProperties (getProp32s)
import XMonad.Util.XUtils (fi)
import Data.Monoid (All(..))
import qualified Data.Set as S
-- $usage
-- To use this module, add the following import to @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Hooks.ManageDocks
--
-- The first component is a 'ManageHook' which recognizes these
-- windows and de-manages them, so that xmonad does not try to tile
-- them. To enable it:
--
-- > manageHook = ... <+> manageDocks
--
-- The second component is a layout modifier that prevents windows
-- from overlapping these dock windows. It is intended to replace
-- xmonad's so-called \"gap\" support. First, you must add it to your
-- list of layouts:
--
-- > layoutHook = avoidStruts (tall ||| mirror tall ||| ...)
-- > where tall = Tall 1 (3/100) (1/2)
--
-- The third component is an event hook that causes new docks to appear
-- immediately, instead of waiting for the next focus change.
--
-- > handleEventHook = ... <+> docksEventHook
--
-- 'AvoidStruts' also supports toggling the dock gaps; add a keybinding
-- similar to:
--
-- > ,((modm, xK_b ), sendMessage ToggleStruts)
--
-- If you have multiple docks, you can toggle their gaps individually.
-- For example, to toggle only the top gap:
--
-- > ,((modm .|. controlMask, xK_t), sendMessage $ ToggleStrut U)
--
-- Similarly, you can use 'D', 'L', and 'R' to individually toggle
-- gaps on the bottom, left, or right.
--
-- If you want certain docks to be avoided but others to be covered by
-- default, you can manually specify the sides of the screen on which
-- docks should be avoided, using 'avoidStrutsOn'. For example:
--
-- > layoutHook = avoidStrutsOn [U,L] (tall ||| mirror tall ||| ...)
--
-- /Important note/: if you are switching from manual gaps
-- (defaultGaps in your config) to avoidStruts (recommended, since
-- manual gaps will probably be phased out soon), be sure to switch
-- off all your gaps (with mod-b) /before/ reloading your config with
-- avoidStruts! Toggling struts with a 'ToggleStruts' message will
-- not work unless your gaps are set to zero.
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
--
-- | Detects if the given window is of type DOCK and if so, reveals
-- it, but does not manage it. If the window has the STRUT property
-- set, adjust the gap accordingly.
manageDocks :: ManageHook
manageDocks = checkDock --> doIgnore
-- | Checks if a window is a DOCK or DESKTOP window
checkDock :: Query Bool
checkDock = ask >>= \w -> liftX $ do
dock <- getAtom "_NET_WM_WINDOW_TYPE_DOCK"
desk <- getAtom "_NET_WM_WINDOW_TYPE_DESKTOP"
mbr <- getProp32s "_NET_WM_WINDOW_TYPE" w
case mbr of
Just rs -> return $ any (`elem` [dock,desk]) (map fromIntegral rs)
_ -> return False
-- | Whenever a new dock appears, refresh the layout immediately to avoid the
-- new dock.
docksEventHook :: Event -> X All
docksEventHook (MapNotifyEvent {ev_window = w}) = do
whenX ((not `fmap` (isClient w)) <&&> runQuery checkDock w) refresh
return (All True)
docksEventHook _ = return (All True)
-- | Gets the STRUT config, if present, in xmonad gap order
getStrut :: Window -> X [Strut]
getStrut w = do
msp <- getProp32s "_NET_WM_STRUT_PARTIAL" w
case msp of
Just sp -> return $ parseStrutPartial sp
Nothing -> fmap (maybe [] parseStrut) $ getProp32s "_NET_WM_STRUT" w
where
parseStrut xs@[_, _, _, _] = parseStrutPartial . take 12 $ xs ++ cycle [minBound, maxBound]
parseStrut _ = []
parseStrutPartial [l, r, t, b, ly1, ly2, ry1, ry2, tx1, tx2, bx1, bx2]
= filter (\(_, n, _, _) -> n /= 0)
[(L, l, ly1, ly2), (R, r, ry1, ry2), (U, t, tx1, tx2), (D, b, bx1, bx2)]
parseStrutPartial _ = []
-- | Goes through the list of windows and find the gap so that all
-- STRUT settings are satisfied.
calcGap :: S.Set Direction2D -> X (Rectangle -> Rectangle)
calcGap ss = withDisplay $ \dpy -> do
rootw <- asks theRoot
-- We don't keep track of dock like windows, so we find all of them here
(_,_,wins) <- io $ queryTree dpy rootw
struts <- (filter careAbout . concat) `fmap` mapM getStrut wins
-- we grab the window attributes of the root window rather than checking
-- the width of the screen because xlib caches this info and it tends to
-- be incorrect after RAndR
wa <- io $ getWindowAttributes dpy rootw
let screen = r2c $ Rectangle (fi $ wa_x wa) (fi $ wa_y wa) (fi $ wa_width wa) (fi $ wa_height wa)
return $ \r -> c2r $ foldr (reduce screen) (r2c r) struts
where careAbout (s,_,_,_) = s `S.member` ss
-- | Adjust layout automagically: don't cover up any docks, status
-- bars, etc.
avoidStruts :: LayoutClass l a => l a -> ModifiedLayout AvoidStruts l a
avoidStruts = avoidStrutsOn [U,D,L,R]
-- | Adjust layout automagically: don't cover up docks, status bars,
-- etc. on the indicated sides of the screen. Valid sides are U
-- (top), D (bottom), R (right), or L (left).
avoidStrutsOn :: LayoutClass l a =>
[Direction2D]
-> l a
-> ModifiedLayout AvoidStruts l a
avoidStrutsOn ss = ModifiedLayout $ AvoidStruts $ S.fromList ss
data AvoidStruts a = AvoidStruts (S.Set Direction2D) deriving ( Read, Show )
-- | Message type which can be sent to an 'AvoidStruts' layout
-- modifier to alter its behavior.
data ToggleStruts = ToggleStruts
| ToggleStrut Direction2D
deriving (Read,Show,Typeable)
instance Message ToggleStruts
-- | SetStruts is a message constructor used to set or unset specific struts,
-- regardless of whether or not the struts were originally set. Here are some
-- example bindings:
--
-- Show all gaps:
--
-- > ,((modm .|. shiftMask ,xK_b),sendMessage $ SetStruts [minBound .. maxBound] [])
--
-- Hide all gaps:
--
-- > ,((modm .|. controlMask,xK_b),sendMessage $ SetStruts [] [minBound .. maxBound])
--
-- Show only upper and left gaps:
--
-- > ,((modm .|. controlMask .|. shiftMask,xK_b),sendMessage $ SetStruts [U,L] [minBound .. maxBound])
--
-- Hide the bottom keeping whatever the other values were:
--
-- > ,((modm .|. controlMask .|. shiftMask,xK_g),sendMessage $ SetStruts [] [D])
data SetStruts = SetStruts { addedStruts :: [Direction2D]
, removedStruts :: [Direction2D] -- ^ These are removed from the currently set struts before 'addedStruts' are added.
}
deriving (Read,Show,Typeable)
instance Message SetStruts
instance LayoutModifier AvoidStruts a where
modifyLayout (AvoidStruts ss) w r = do
nr <- fmap ($ r) (calcGap ss)
runLayout w nr
pureMess (AvoidStruts ss) m
| Just ToggleStruts <- fromMessage m = Just $ AvoidStruts (toggleAll ss)
| Just (ToggleStrut s) <- fromMessage m = Just $ AvoidStruts (toggleOne s ss)
| Just (SetStruts n k) <- fromMessage m
, let newSS = S.fromList n `S.union` (ss S.\\ S.fromList k)
, newSS /= ss = Just $ AvoidStruts newSS
| otherwise = Nothing
where toggleAll x | S.null x = S.fromList [minBound .. maxBound]
| otherwise = S.empty
toggleOne x xs | x `S.member` xs = S.delete x xs
| otherwise = x `S.insert` xs
-- | (Direction, height\/width, initial pixel, final pixel).
type Strut = (Direction2D, CLong, CLong, CLong)
-- | (Initial x pixel, initial y pixel,
-- final x pixel, final y pixel).
newtype RectC = RectC (CLong, CLong, CLong, CLong) deriving (Eq,Show)
-- | Invertible conversion.
r2c :: Rectangle -> RectC
r2c (Rectangle x y w h) = RectC (fi x, fi y, fi x + fi w - 1, fi y + fi h - 1)
-- | Invertible conversion.
c2r :: RectC -> Rectangle
c2r (RectC (x1, y1, x2, y2)) = Rectangle (fi x1) (fi y1) (fi $ x2 - x1 + 1) (fi $ y2 - y1 + 1)
reduce :: RectC -> Strut -> RectC -> RectC
reduce (RectC (sx0, sy0, sx1, sy1)) (s, n, l, h) (RectC (x0, y0, x1, y1)) =
RectC $ case s of
L | p (y0, y1) && qh x1 -> (mx x0 sx0, y0 , x1 , y1 )
R | p (y0, y1) && qv sx1 x0 -> (x0 , y0 , mn x1 sx1, y1 )
U | p (x0, x1) && qh y1 -> (x0 , mx y0 sy0, x1 , y1 )
D | p (x0, x1) && qv sy1 y0 -> (x0 , y0 , x1 , mn y1 sy1)
_ -> (x0 , y0 , x1 , y1 )
where
mx a b = max a (b + n)
mn a b = min a (b - n)
p r = r `overlaps` (l, h)
-- Filter out struts that cover the entire rectangle:
qh d1 = n <= d1
qv sd1 d0 = sd1 - n >= d0
-- | Do the two ranges overlap?
--
-- Precondition for every input range @(x, y)@: @x '<=' y@.
--
-- A range @(x, y)@ is assumed to include every pixel from @x@ to @y@.
overlaps :: Ord a => (a, a) -> (a, a) -> Bool
(a, b) `overlaps` (x, y) =
inRange (a, b) x || inRange (a, b) y || inRange (x, y) a
where
inRange (i, j) k = i <= k && k <= j
| markus1189/xmonad-contrib-710 | XMonad/Hooks/ManageDocks.hs | bsd-3-clause | 10,132 | 0 | 16 | 2,394 | 2,277 | 1,267 | 1,010 | 106 | 5 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>Revisitar | Extensão ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Conteúdo</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Busca</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoritos</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/revisit/src/main/javahelp/org/zaproxy/zap/extension/revisit/resources/help_pt_BR/helpset_pt_BR.hs | apache-2.0 | 972 | 78 | 66 | 158 | 416 | 210 | 206 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Lazyfoo.Lesson07 (main) where
import Control.Monad
import Foreign.C.Types
import Linear
import SDL (($=))
import qualified SDL
import Paths_sdl2 (getDataFileName)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
screenWidth, screenHeight :: CInt
(screenWidth, screenHeight) = (640, 480)
main :: IO ()
main = do
SDL.initialize [SDL.InitVideo]
SDL.HintRenderScaleQuality $= SDL.ScaleLinear
do renderQuality <- SDL.get SDL.HintRenderScaleQuality
when (renderQuality /= SDL.ScaleLinear) $
putStrLn "Warning: Linear texture filtering not enabled!"
window <-
SDL.createWindow
"SDL Tutorial"
SDL.defaultWindow {SDL.windowInitialSize = V2 screenWidth screenHeight}
SDL.showWindow window
renderer <-
SDL.createRenderer
window
(-1)
(SDL.RendererConfig
{ SDL.rendererType = SDL.AcceleratedRenderer
, SDL.rendererTargetTexture = False
})
SDL.rendererDrawColor renderer $= V4 maxBound maxBound maxBound maxBound
xOutSurface <- getDataFileName "examples/lazyfoo/texture.bmp" >>= SDL.loadBMP
texture <- SDL.createTextureFromSurface renderer xOutSurface
SDL.freeSurface xOutSurface
let loop = do
let collectEvents = do
e <- SDL.pollEvent
case e of
Nothing -> return []
Just e' -> (e' :) <$> collectEvents
events <- collectEvents
let quit = any (== SDL.QuitEvent) $ map SDL.eventPayload events
SDL.clear renderer
SDL.copy renderer texture Nothing Nothing
SDL.present renderer
unless quit loop
loop
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
| tejon/sdl2 | examples/lazyfoo/Lesson07.hs | bsd-3-clause | 1,760 | 0 | 21 | 414 | 463 | 228 | 235 | 51 | 2 |
module Foo () where
import Data.Set (Set(..))
{-@ include <selfList.hquals> @-}
{-@ invariant {v0:[{v: a | (Set_mem v (listElts v0))}] | true } @-}
{-@ type IList a = {v0: [{v:a | (Set_mem v (listElts v0))}] | true } @-}
{-@ moo :: [a] -> IList a @-}
moo [] = []
moo (_:xs) = xs
goo [] = []
goo (_:xs) = xs
{-@ poo :: IList Int @-}
poo = goo xs
where
xs :: [Int]
xs = [2,1,3,2]
| abakst/liquidhaskell | tests/pos/selfList.hs | bsd-3-clause | 410 | 0 | 7 | 113 | 115 | 68 | 47 | 9 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Prompt.Theme
-- Copyright : (C) 2007 Andrea Rossato
-- License : BSD3
--
-- Maintainer : andrea.rossato@unibz.it
-- Stability : unstable
-- Portability : unportable
--
-- A prompt for changing the theme of the current workspace
-----------------------------------------------------------------------------
module XMonad.Prompt.Theme
( -- * Usage
-- $usage
themePrompt,
ThemePrompt,
) where
import Control.Arrow ( (&&&) )
import qualified Data.Map as M
import Data.Maybe ( fromMaybe )
import XMonad
import XMonad.Prompt
import XMonad.Layout.Decoration
import XMonad.Util.Themes
-- $usage
-- You can use this module with the following in your
-- @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Prompt
-- > import XMonad.Prompt.Theme
--
-- in your keybindings add:
--
-- > , ((modm .|. controlMask, xK_t), themePrompt def)
--
-- For detailed instruction on editing the key binding see
-- "XMonad.Doc.Extending#Editing_key_bindings".
data ThemePrompt = ThemePrompt
instance XPrompt ThemePrompt where
showXPrompt ThemePrompt = "Select a theme: "
commandToComplete _ c = c
nextCompletion _ = getNextCompletion
themePrompt :: XPConfig -> X ()
themePrompt c = mkXPrompt ThemePrompt c (mkComplFunFromList' . map ppThemeInfo $ listOfThemes) changeTheme
where changeTheme t = sendMessage . SetTheme . fromMaybe def $ M.lookup t mapOfThemes
mapOfThemes :: M.Map String Theme
mapOfThemes = M.fromList . uncurry zip . (map ppThemeInfo &&& map theme) $ listOfThemes
| pjones/xmonad-test | vendor/xmonad-contrib/XMonad/Prompt/Theme.hs | bsd-2-clause | 1,628 | 0 | 9 | 284 | 258 | 152 | 106 | 21 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Makefile Dependency Generation
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module DriverMkDepend (
doMkDependHS
) where
#include "HsVersions.h"
import qualified GHC
import GhcMonad
import HsSyn ( ImportDecl(..) )
import DynFlags
import Util
import HscTypes
import SysTools ( newTempName )
import qualified SysTools
import Module
import Digraph ( SCC(..) )
import Finder
import Outputable
import Panic
import SrcLoc
import Data.List
import FastString
import Exception
import ErrUtils
import System.Directory
import System.FilePath
import System.IO
import System.IO.Error ( isEOFError )
import Control.Monad ( when )
import Data.Maybe ( isJust )
-----------------------------------------------------------------
--
-- The main function
--
-----------------------------------------------------------------
doMkDependHS :: GhcMonad m => [FilePath] -> m ()
doMkDependHS srcs = do
-- Initialisation
dflags0 <- GHC.getSessionDynFlags
-- We kludge things a bit for dependency generation. Rather than
-- generating dependencies for each way separately, we generate
-- them once and then duplicate them for each way's osuf/hisuf.
-- We therefore do the initial dependency generation with an empty
-- way and .o/.hi extensions, regardless of any flags that might
-- be specified.
let dflags = dflags0 {
ways = [],
buildTag = mkBuildTag [],
hiSuf = "hi",
objectSuf = "o"
}
_ <- GHC.setSessionDynFlags dflags
when (null (depSuffixes dflags)) $ liftIO $
throwGhcExceptionIO (ProgramError "You must specify at least one -dep-suffix")
files <- liftIO $ beginMkDependHS dflags
-- Do the downsweep to find all the modules
targets <- mapM (\s -> GHC.guessTarget s Nothing) srcs
GHC.setTargets targets
let excl_mods = depExcludeMods dflags
mod_summaries <- GHC.depanal excl_mods True {- Allow dup roots -}
-- Sort into dependency order
-- There should be no cycles
let sorted = GHC.topSortModuleGraph False mod_summaries Nothing
-- Print out the dependencies if wanted
liftIO $ debugTraceMsg dflags 2 (text "Module dependencies" $$ ppr sorted)
-- Prcess them one by one, dumping results into makefile
-- and complaining about cycles
hsc_env <- getSession
root <- liftIO getCurrentDirectory
mapM_ (liftIO . processDeps dflags hsc_env excl_mods root (mkd_tmp_hdl files)) sorted
-- If -ddump-mod-cycles, show cycles in the module graph
liftIO $ dumpModCycles dflags mod_summaries
-- Tidy up
liftIO $ endMkDependHS dflags files
-- Unconditional exiting is a bad idea. If an error occurs we'll get an
--exception; if that is not caught it's fine, but at least we have a
--chance to find out exactly what went wrong. Uncomment the following
--line if you disagree.
--`GHC.ghcCatch` \_ -> io $ exitWith (ExitFailure 1)
-----------------------------------------------------------------
--
-- beginMkDependHs
-- Create a temporary file,
-- find the Makefile,
-- slurp through it, etc
--
-----------------------------------------------------------------
data MkDepFiles
= MkDep { mkd_make_file :: FilePath, -- Name of the makefile
mkd_make_hdl :: Maybe Handle, -- Handle for the open makefile
mkd_tmp_file :: FilePath, -- Name of the temporary file
mkd_tmp_hdl :: Handle } -- Handle of the open temporary file
beginMkDependHS :: DynFlags -> IO MkDepFiles
beginMkDependHS dflags = do
-- open a new temp file in which to stuff the dependency info
-- as we go along.
tmp_file <- newTempName dflags "dep"
tmp_hdl <- openFile tmp_file WriteMode
-- open the makefile
let makefile = depMakefile dflags
exists <- doesFileExist makefile
mb_make_hdl <-
if not exists
then return Nothing
else do
makefile_hdl <- openFile makefile ReadMode
-- slurp through until we get the magic start string,
-- copying the contents into dep_makefile
let slurp = do
l <- hGetLine makefile_hdl
if (l == depStartMarker)
then return ()
else do hPutStrLn tmp_hdl l; slurp
-- slurp through until we get the magic end marker,
-- throwing away the contents
let chuck = do
l <- hGetLine makefile_hdl
if (l == depEndMarker)
then return ()
else chuck
catchIO slurp
(\e -> if isEOFError e then return () else ioError e)
catchIO chuck
(\e -> if isEOFError e then return () else ioError e)
return (Just makefile_hdl)
-- write the magic marker into the tmp file
hPutStrLn tmp_hdl depStartMarker
return (MkDep { mkd_make_file = makefile, mkd_make_hdl = mb_make_hdl,
mkd_tmp_file = tmp_file, mkd_tmp_hdl = tmp_hdl})
-----------------------------------------------------------------
--
-- processDeps
--
-----------------------------------------------------------------
processDeps :: DynFlags
-> HscEnv
-> [ModuleName]
-> FilePath
-> Handle -- Write dependencies to here
-> SCC ModSummary
-> IO ()
-- Write suitable dependencies to handle
-- Always:
-- this.o : this.hs
--
-- If the dependency is on something other than a .hi file:
-- this.o this.p_o ... : dep
-- otherwise
-- this.o ... : dep.hi
-- this.p_o ... : dep.p_hi
-- ...
-- (where .o is $osuf, and the other suffixes come from
-- the cmdline -s options).
--
-- For {-# SOURCE #-} imports the "hi" will be "hi-boot".
processDeps dflags _ _ _ _ (CyclicSCC nodes)
= -- There shouldn't be any cycles; report them
throwGhcExceptionIO (ProgramError (showSDoc dflags $ GHC.cyclicModuleErr nodes))
processDeps dflags hsc_env excl_mods root hdl (AcyclicSCC node)
= do { let extra_suffixes = depSuffixes dflags
include_pkg_deps = depIncludePkgDeps dflags
src_file = msHsFilePath node
obj_file = msObjFilePath node
obj_files = insertSuffixes obj_file extra_suffixes
do_imp loc is_boot pkg_qual imp_mod
= do { mb_hi <- findDependency hsc_env loc pkg_qual imp_mod
is_boot include_pkg_deps
; case mb_hi of {
Nothing -> return () ;
Just hi_file -> do
{ let hi_files = insertSuffixes hi_file extra_suffixes
write_dep (obj,hi) = writeDependency root hdl [obj] hi
-- Add one dependency for each suffix;
-- e.g. A.o : B.hi
-- A.x_o : B.x_hi
; mapM_ write_dep (obj_files `zip` hi_files) }}}
-- Emit std dependency of the object(s) on the source file
-- Something like A.o : A.hs
; writeDependency root hdl obj_files src_file
-- Emit a dependency for each import
; let do_imps is_boot idecls = sequence_
[ do_imp loc is_boot (ideclPkgQual i) mod
| L loc i <- idecls,
let mod = unLoc (ideclName i),
mod `notElem` excl_mods ]
; do_imps True (ms_srcimps node)
; do_imps False (ms_imps node)
}
findDependency :: HscEnv
-> SrcSpan
-> Maybe FastString -- package qualifier, if any
-> ModuleName -- Imported module
-> IsBootInterface -- Source import
-> Bool -- Record dependency on package modules
-> IO (Maybe FilePath) -- Interface file file
findDependency hsc_env srcloc pkg imp is_boot include_pkg_deps
= do { -- Find the module; this will be fast because
-- we've done it once during downsweep
r <- findImportedModule hsc_env imp pkg
; case r of
Found loc _
-- Home package: just depend on the .hi or hi-boot file
| isJust (ml_hs_file loc) || include_pkg_deps
-> return (Just (addBootSuffix_maybe is_boot (ml_hi_file loc)))
-- Not in this package: we don't need a dependency
| otherwise
-> return Nothing
fail ->
let dflags = hsc_dflags hsc_env
in throwOneError $ mkPlainErrMsg dflags srcloc $
cannotFindModule dflags imp fail
}
-----------------------------
writeDependency :: FilePath -> Handle -> [FilePath] -> FilePath -> IO ()
-- (writeDependency r h [t1,t2] dep) writes to handle h the dependency
-- t1 t2 : dep
writeDependency root hdl targets dep
= do let -- We need to avoid making deps on
-- c:/foo/...
-- on cygwin as make gets confused by the :
-- Making relative deps avoids some instances of this.
dep' = makeRelative root dep
forOutput = escapeSpaces . reslash Forwards . normalise
output = unwords (map forOutput targets) ++ " : " ++ forOutput dep'
hPutStrLn hdl output
-----------------------------
insertSuffixes
:: FilePath -- Original filename; e.g. "foo.o"
-> [String] -- Suffix prefixes e.g. ["x_", "y_"]
-> [FilePath] -- Zapped filenames e.g. ["foo.x_o", "foo.y_o"]
-- Note that that the extra bit gets inserted *before* the old suffix
-- We assume the old suffix contains no dots, so we know where to
-- split it
insertSuffixes file_name extras
= [ basename <.> (extra ++ suffix) | extra <- extras ]
where
(basename, suffix) = case splitExtension file_name of
-- Drop the "." from the extension
(b, s) -> (b, drop 1 s)
-----------------------------------------------------------------
--
-- endMkDependHs
-- Complete the makefile, close the tmp file etc
--
-----------------------------------------------------------------
endMkDependHS :: DynFlags -> MkDepFiles -> IO ()
endMkDependHS dflags
(MkDep { mkd_make_file = makefile, mkd_make_hdl = makefile_hdl,
mkd_tmp_file = tmp_file, mkd_tmp_hdl = tmp_hdl })
= do
-- write the magic marker into the tmp file
hPutStrLn tmp_hdl depEndMarker
case makefile_hdl of
Nothing -> return ()
Just hdl -> do
-- slurp the rest of the original makefile and copy it into the output
let slurp = do
l <- hGetLine hdl
hPutStrLn tmp_hdl l
slurp
catchIO slurp
(\e -> if isEOFError e then return () else ioError e)
hClose hdl
hClose tmp_hdl -- make sure it's flushed
-- Create a backup of the original makefile
when (isJust makefile_hdl)
(SysTools.copy dflags ("Backing up " ++ makefile)
makefile (makefile++".bak"))
-- Copy the new makefile in place
SysTools.copy dflags "Installing new makefile" tmp_file makefile
-----------------------------------------------------------------
-- Module cycles
-----------------------------------------------------------------
dumpModCycles :: DynFlags -> [ModSummary] -> IO ()
dumpModCycles dflags mod_summaries
| not (dopt Opt_D_dump_mod_cycles dflags)
= return ()
| null cycles
= putMsg dflags (ptext (sLit "No module cycles"))
| otherwise
= putMsg dflags (hang (ptext (sLit "Module cycles found:")) 2 pp_cycles)
where
cycles :: [[ModSummary]]
cycles = [ c | CyclicSCC c <- GHC.topSortModuleGraph True mod_summaries Nothing ]
pp_cycles = vcat [ (ptext (sLit "---------- Cycle") <+> int n <+> ptext (sLit "----------"))
$$ pprCycle c $$ blankLine
| (n,c) <- [1..] `zip` cycles ]
pprCycle :: [ModSummary] -> SDoc
-- Print a cycle, but show only the imports within the cycle
pprCycle summaries = pp_group (CyclicSCC summaries)
where
cycle_mods :: [ModuleName] -- The modules in this cycle
cycle_mods = map (moduleName . ms_mod) summaries
pp_group (AcyclicSCC ms) = pp_ms ms
pp_group (CyclicSCC mss)
= ASSERT( not (null boot_only) )
-- The boot-only list must be non-empty, else there would
-- be an infinite chain of non-boot imoprts, and we've
-- already checked for that in processModDeps
pp_ms loop_breaker $$ vcat (map pp_group groups)
where
(boot_only, others) = partition is_boot_only mss
is_boot_only ms = not (any in_group (map (ideclName.unLoc) (ms_imps ms)))
in_group (L _ m) = m `elem` group_mods
group_mods = map (moduleName . ms_mod) mss
loop_breaker = head boot_only
all_others = tail boot_only ++ others
groups = GHC.topSortModuleGraph True all_others Nothing
pp_ms summary = text mod_str <> text (take (20 - length mod_str) (repeat ' '))
<+> (pp_imps empty (map (ideclName.unLoc) (ms_imps summary)) $$
pp_imps (ptext (sLit "{-# SOURCE #-}")) (map (ideclName.unLoc) (ms_srcimps summary)))
where
mod_str = moduleNameString (moduleName (ms_mod summary))
pp_imps :: SDoc -> [Located ModuleName] -> SDoc
pp_imps _ [] = empty
pp_imps what lms
= case [m | L _ m <- lms, m `elem` cycle_mods] of
[] -> empty
ms -> what <+> ptext (sLit "imports") <+>
pprWithCommas ppr ms
-----------------------------------------------------------------
--
-- Flags
--
-----------------------------------------------------------------
depStartMarker, depEndMarker :: String
depStartMarker = "# DO NOT DELETE: Beginning of Haskell dependencies"
depEndMarker = "# DO NOT DELETE: End of Haskell dependencies"
| forked-upstream-packages-for-ghcjs/ghc | compiler/main/DriverMkDepend.hs | bsd-3-clause | 14,693 | 0 | 21 | 4,624 | 2,716 | 1,422 | 1,294 | 215 | 6 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
module Shexkell.Data.ShapeMap where
import qualified Data.Map as Map
import Data.RDF
import Data.List
import Data.Maybe (fromJust)
import Shexkell.Data.ShEx hiding (shapes)
import Shexkell.Data.Common
-- | Relation of nodes and shapes that the nodes are expected to match or have
-- been validated
newtype ShapeMap = ShapeMap { shapeMap :: Map.Map Node [(ShapeExpr, ValidationResult)] }
deriving (Show)
-- | Result of the validation
data ValidationResult = Positive | Negative
deriving (Show, Eq, Ord)
-- | Instances of this class can be used to build a ShapeMap when given a
-- graph and schema
class ShapeMapRef a n s | a -> n, a -> s where
-- | Create a ShapeMap
mkMap :: Rdf gr => RDF gr -> Schema -> a -> ShapeMap
mkMap _ schema ref = ShapeMap $ Map.fromList $ map findShapes $ nodes ref where
findShapes ns = (mkNode ref ns, map mkShape (shapes ref ns))
mkShape sh = (findShape sh, result ref sh)
findShape sh = fromJust $ findShapeByLabel (mkShapeLabel ref sh) schema
-- | Get the structure that lists the nodes of the shape map
nodes :: a -> [n]
-- | Get the shape references to validate agains a given node
shapes :: a -> n -> [s]
-- | Create a node from a node reference
mkNode :: a -> n -> Node
-- | Create a Shape Label from a shape reference
mkShapeLabel :: a -> s -> ShapeLabel
-- | Create a expected validation result from a shape reference
result :: a -> s -> ValidationResult
-- | Find the result of a previous validation of a given node against
-- a given Shape
findResult :: Node -> ShapeExpr -> ShapeMap -> Maybe ValidationResult
findResult node shape (ShapeMap shMap) =
snd <$> (Map.lookup node shMap >>= find ((== shape) . fst))
combineShapeMaps :: ShapeMap -> ShapeMap -> ShapeMap
combineShapeMaps (ShapeMap shMap) (ShapeMap shMap') = ShapeMap $ Map.unionWith combineShapes shMap shMap'
where combineShapes shapes shapes' = combineShape $ groupBy shape $ union shapes shapes'
shape :: (ShapeExpr, ValidationResult) -> (ShapeExpr, ValidationResult) -> Bool
shape (sh, _) (sh', _) = sh == sh'
combineShape :: [[(ShapeExpr, ValidationResult)]] -> [(ShapeExpr, ValidationResult)]
combineShape = map combineResult
combineResult :: [(ShapeExpr, ValidationResult)] -> (ShapeExpr, ValidationResult)
combineResult = foldl1 combine
combine (sh , r) (_, r')
| r == r' = (sh, Positive)
| otherwise = (sh, Negative)
| weso/shexkell | src/Shexkell/Data/ShapeMap.hs | mit | 2,610 | 0 | 12 | 601 | 680 | 379 | 301 | 39 | 1 |
isPrime :: Integer -> Bool
isPrime n
| n == 1 = False
| n > 1 = and [ mod n i /= 0 | i <- [2..n], i*i <= n]
| otherwise = False
factorize :: Integer -> [Integer]
factorize n = factorize_worker n 2
factorize_worker :: Integer -> Integer -> [Integer]
factorize_worker n factor
| n == 1 = []
| mod n factor == 0 = factor:factorize_worker (div n factor) factor
| otherwise = factorize_worker n (factor+1)
factorList :: [Integer] -> [(Integer, Integer)]
factorList x = factorList_worker x 0
factorList_worker :: [Integer] -> Integer -> [(Integer,Integer)]
factorList_worker [] n = []
factorList_worker [x] n = [(x, n + 1)]
factorList_worker (x1:x2:xs) n
| x1 == x2 = factorList_worker (x2:xs) (n + 1)
| x1 /= x2 = (x1, n + 1):factorList_worker (x2:xs) 0
main = do
print $ isPrime 156789613
print $ (factorList.factorize) 156789613
| mino2357/Hello_Haskell | src/test.hs | mit | 860 | 0 | 10 | 182 | 427 | 216 | 211 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-volumes-host.html
module Stratosphere.ResourceProperties.ECSTaskDefinitionHostVolumeProperties where
import Stratosphere.ResourceImports
-- | Full data type definition for ECSTaskDefinitionHostVolumeProperties. See
-- 'ecsTaskDefinitionHostVolumeProperties' for a more convenient
-- constructor.
data ECSTaskDefinitionHostVolumeProperties =
ECSTaskDefinitionHostVolumeProperties
{ _eCSTaskDefinitionHostVolumePropertiesSourcePath :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON ECSTaskDefinitionHostVolumeProperties where
toJSON ECSTaskDefinitionHostVolumeProperties{..} =
object $
catMaybes
[ fmap (("SourcePath",) . toJSON) _eCSTaskDefinitionHostVolumePropertiesSourcePath
]
-- | Constructor for 'ECSTaskDefinitionHostVolumeProperties' containing
-- required fields as arguments.
ecsTaskDefinitionHostVolumeProperties
:: ECSTaskDefinitionHostVolumeProperties
ecsTaskDefinitionHostVolumeProperties =
ECSTaskDefinitionHostVolumeProperties
{ _eCSTaskDefinitionHostVolumePropertiesSourcePath = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-volumes-host.html#cfn-ecs-taskdefinition-volumes-host-sourcepath
ecstdhvpSourcePath :: Lens' ECSTaskDefinitionHostVolumeProperties (Maybe (Val Text))
ecstdhvpSourcePath = lens _eCSTaskDefinitionHostVolumePropertiesSourcePath (\s a -> s { _eCSTaskDefinitionHostVolumePropertiesSourcePath = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/ECSTaskDefinitionHostVolumeProperties.hs | mit | 1,676 | 0 | 12 | 158 | 174 | 101 | 73 | 22 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module VDOM.Adapter.Types where
import Data.Int
import Data.Word
import Data.Text
data Property = Property {
propertyName :: String
, propertyValue :: JSProp
} deriving (Show)
data VNodeAdapter = VNodeAdapter {
vNodeAdapterTagName :: String
, vNodeAdapterInnerText :: String
, vNodeAdapterProps :: [Property]
, vNodeAdapterChildren :: [VNodeAdapter]
} deriving (Show)
data JSProp = JSPBool Bool
| JSPText Text
| JSPInt Int
| JSPInt8 Int8
| JSPInt16 Int16
| JSPInt32 Int32
| JSPWord Word
| JSPWord8 Word8
| JSPWord16 Word16
| JSPWord32 Word32
| JSPFloat Float
| JSPDouble Double
deriving (Show)
test :: VNodeAdapter
test = VNodeAdapter "h1" "" [] [emptyDiv,buttonTag]
where emptyDiv = VNodeAdapter "div" "" [] []
buttonTag = VNodeAdapter "button" "Button Thing!" [buttonProp] []
buttonProp = Property "type" $ JSPText "button"
-- Should render to be like:
-- <h1>
-- <div>
-- <button type="button">Button Thing!
| smurphy8/shakespeare-dynamic | vdom-adapter/src/VDOM/Adapter/Types.hs | mit | 1,146 | 0 | 9 | 338 | 253 | 149 | 104 | 33 | 1 |
module Main(main) where
import Snap.Snaplet
import Snap
import Site
main :: IO ()
main = do
(_, site, _) <- runSnaplet Nothing haskitterInit
quickHttpServe site -- Start the Snap server
| lkania/Haskitter | src/Main.hs | mit | 204 | 0 | 8 | 48 | 63 | 35 | 28 | 8 | 1 |
{-# htermination show :: (Show a) => (Maybe a) -> String #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_show_10.hs | mit | 61 | 0 | 2 | 12 | 3 | 2 | 1 | 1 | 0 |
module Y2017.M08.D04.Exercise where
-- below import available via 1HaskellADay git repository
import Y2017.M08.D01.Exercise
{--
Yesterday we looked at a rot13 cypher, but that was unsatifactory for a couple
of reasons:
1. it rotted down low on the 13 only. What if you want to rot 12? Would you
have to write a whole new function to rot 12? Why, yes, you would.
2. It was stuck on uppercase ASCII, if you went outside that range, you're
in uppercase ASCII land and descrambling would become confused. Try munging
then demunging pi to 20 places. What would you get? Now switch to Icelandic,
how do you munge then demunge "Verði þér að góðu"
Let's fix this.
How?
Well, when I ran into error rot13 lowercase letters I did some trolling on
twitter (much to the autistic screeching of some purists) and on the interwebz,
and, thanks to SamirTalwar I came across the Cæsar Cipher.
What is the Cæsar Cipher?
well from https://gist.github.com/SamirTalwar/2f93b85c08918d91015d47d45529c82e
we see it is:
--}
caesarCipher :: Int -> [a] -> [(a, a)]
caesarCipher n xs = zip xs (drop n (cycle xs))
-- Cool! How do you use it? Today's Haskell exercise.
-- Given quick from yesterday, imported above, munge and and demunge that
-- string using the caesarCipher 12. Hint: SamirTalwar shows how to use upper
-- and lowercase character sets when munging and demunging.
-- Hint-hint: how do you demunge caesarCipher 12 text?
-- Now, given the Icelandic alphabet, upper and lower case:
iceHi, iceLo :: String
iceHi = "AÁBDÐEÉFGHIÍJKLMNOÓPRSTUÚVXYÝÞÆÖ"
iceLo = "aábdðeéfghiíjklmnoóprstuúvxyýþæö"
-- munge and demunge 'you're welcome' in Icelandic using caesarCipher 17
youreWelcome :: String
youreWelcome = "Verði þér að góðu"
| geophf/1HaskellADay | exercises/HAD/Y2017/M08/D04/Exercise.hs | mit | 1,756 | 0 | 9 | 293 | 105 | 66 | 39 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
module Unison.TypePrinter where
import Unison.Prelude
import qualified Data.Map as Map
import Unison.HashQualified (HashQualified)
import Unison.Name ( Name )
import Unison.NamePrinter (styleHashQualified'')
import Unison.PrettyPrintEnv (PrettyPrintEnv, Imports, elideFQN)
import qualified Unison.PrettyPrintEnv as PrettyPrintEnv
import Unison.Reference (pattern Builtin)
import Unison.Type
import Unison.Util.Pretty (ColorText, Pretty, Width)
import Unison.Util.ColorText (toPlain)
import qualified Unison.Util.SyntaxText as S
import Unison.Util.SyntaxText (SyntaxText)
import qualified Unison.Util.Pretty as PP
import Unison.Var (Var)
import qualified Unison.Var as Var
import qualified Unison.Builtin.Decls as DD
pretty :: forall v a . (Var v) => PrettyPrintEnv -> Type v a -> Pretty ColorText
pretty ppe = PP.syntaxToColor . prettySyntax ppe
prettySyntax :: forall v a . (Var v) => PrettyPrintEnv -> Type v a -> Pretty SyntaxText
prettySyntax ppe = pretty0 ppe mempty (-1)
pretty' :: Var v => Maybe Width -> PrettyPrintEnv -> Type v a -> String
pretty' (Just width) n t =
toPlain $ PP.render width $ PP.syntaxToColor $ pretty0 n Map.empty (-1) t
pretty' Nothing n t =
toPlain $ PP.render maxBound $ PP.syntaxToColor $ pretty0 n Map.empty (-1) t
{- Explanation of precedence handling
We illustrate precedence rules as follows.
>=10
10f 10x
This example shows that a type application f x is enclosed in parentheses
whenever the ambient precedence around it is >= 10, and that when printing
its two components, an ambient precedence of 10 is used in both places.
The pretty-printer uses the following rules for printing types.
>=10
10f 10x
{ 0e } 10t
>=0
0a -> 0b
-}
pretty0
:: forall v a . (Var v)
=> PrettyPrintEnv
-> Imports
-> Int
-> Type v a
-> Pretty SyntaxText
pretty0 n im p tp = prettyRaw n im p (cleanup (removePureEffects tp))
prettyRaw
:: forall v a . (Var v)
=> PrettyPrintEnv
-> Imports
-> Int
-> Type v a
-> Pretty SyntaxText
-- p is the operator precedence of the enclosing context (a number from 0 to
-- 11, or -1 to avoid outer parentheses unconditionally). Function
-- application has precedence 10.
prettyRaw n im p tp = go n im p tp
where
go :: PrettyPrintEnv -> Imports -> Int -> Type v a -> Pretty SyntaxText
go n im p tp = case stripIntroOuters tp of
Var' v -> fmt S.Var $ PP.text (Var.name v)
DD.TupleType' xs | length xs /= 1 -> PP.parenthesizeCommas $ map (go n im 0) xs
-- Would be nice to use a different SyntaxHighlights color if the reference is an ability.
Ref' r -> styleHashQualified'' (fmt $ S.Reference r) $ elideFQN im (PrettyPrintEnv.typeName n r)
Cycle' _ _ -> fromString "error: TypeParser does not currently emit Cycle"
Abs' _ -> fromString "error: TypeParser does not currently emit Abs"
Ann' _ _ -> fromString "error: TypeParser does not currently emit Ann"
App' (Ref' (Builtin "Sequence")) x ->
PP.group $ (fmt S.DelimiterChar "[") <> go n im 0 x <> (fmt S.DelimiterChar "]")
Apps' f xs -> PP.parenthesizeIf (p >= 10) $ go n im 9 f `PP.hang` PP.spaced
(go n im 10 <$> xs)
Effect1' e t ->
PP.parenthesizeIf (p >= 10) $ go n im 9 e <> " " <> go n im 10 t
Effects' es -> effects (Just es)
ForallsNamed' vs' body ->
let vs = filter (\v -> Var.name v /= "()") vs'
in if p < 0 && all Var.universallyQuantifyIfFree vs
then go n im p body
else paren (p >= 0) $
let vformatted = PP.sep " " (fmt S.Var . PP.text . Var.name <$> vs)
in (fmt S.TypeOperator "∀ " <> vformatted <> fmt S.TypeOperator ".")
`PP.hang` go n im (-1) body
t@(Arrow' _ _) -> case t of
EffectfulArrows' (Ref' DD.UnitRef) rest -> arrows True True rest
EffectfulArrows' fst rest ->
case fst of
Var' v | Var.name v == "()"
-> fmt S.DelayForceChar "'" <> arrows False True rest
_ -> PP.parenthesizeIf (p >= 0) $
go n im 0 fst <> arrows False False rest
_ -> "error"
_ -> "error"
effects Nothing = mempty
effects (Just es) = PP.group $ (fmt S.AbilityBraces "{") <> PP.commas (go n im 0 <$> es) <> (fmt S.AbilityBraces "}")
arrow delay first mes =
(if first then mempty else PP.softbreak <> (fmt S.TypeOperator "->"))
<> (if delay then (if first then (fmt S.DelayForceChar "'") else (fmt S.DelayForceChar " '")) else mempty)
<> effects mes
<> if (isJust mes) || (not delay) && (not first) then " " else mempty
arrows delay first [(mes, Ref' DD.UnitRef)] = arrow delay first mes <> (fmt S.Unit "()")
arrows delay first ((mes, Ref' DD.UnitRef) : rest) =
arrow delay first mes <> (parenNoGroup delay $ arrows True True rest)
arrows delay first ((mes, arg) : rest) =
arrow delay first mes
<> ( parenNoGroup (delay && (not $ null rest))
$ go n im 0 arg
<> arrows False False rest
)
arrows False False [] = mempty
arrows False True [] = mempty -- not reachable
arrows True _ [] = mempty -- not reachable
paren True s = PP.group $ ( fmt S.Parenthesis "(" ) <> s <> ( fmt S.Parenthesis ")" )
paren False s = PP.group s
parenNoGroup True s = ( fmt S.Parenthesis "(" ) <> s <> ( fmt S.Parenthesis ")" )
parenNoGroup False s = s
fmt :: S.Element r -> Pretty (S.SyntaxText' r) -> Pretty (S.SyntaxText' r)
fmt = PP.withSyntax
-- todo: provide sample output in comment
prettySignatures'
:: Var v => PrettyPrintEnv
-> [(HashQualified Name, Type v a)]
-> [Pretty ColorText]
prettySignatures' env ts = map PP.syntaxToColor $ prettySignatures'' env ts
prettySignatures''
:: Var v => PrettyPrintEnv
-> [(HashQualified Name, Type v a)]
-> [Pretty SyntaxText]
prettySignatures'' env ts = PP.align
[ ( styleHashQualified'' (fmt $ S.HashQualifier name) name
, (fmt S.TypeAscriptionColon ": " <> pretty0 env Map.empty (-1) typ)
`PP.orElse` ( fmt S.TypeAscriptionColon ": "
<> PP.indentNAfterNewline 2 (pretty0 env Map.empty (-1) typ)
)
)
| (name, typ) <- ts
]
-- todo: provide sample output in comment; different from prettySignatures'
prettySignaturesAlt'
:: Var v => PrettyPrintEnv
-> [([HashQualified Name], Type v a)]
-> [Pretty ColorText]
prettySignaturesAlt' env ts = map PP.syntaxToColor $ PP.align
[ ( PP.commas . fmap (\name -> styleHashQualified'' (fmt $ S.HashQualifier name) name) $ names
, (fmt S.TypeAscriptionColon ": " <> pretty0 env Map.empty (-1) typ)
`PP.orElse` ( fmt S.TypeAscriptionColon ": "
<> PP.indentNAfterNewline 2 (pretty0 env Map.empty (-1) typ)
)
)
| (names, typ) <- ts
]
-- prettySignatures'' :: Var v => PrettyPrintEnv -> [(Name, Type v a)] -> [Pretty ColorText]
-- prettySignatures'' env ts = prettySignatures' env (first HQ.fromName <$> ts)
prettySignatures
:: Var v
=> PrettyPrintEnv
-> [(HashQualified Name, Type v a)]
-> Pretty ColorText
prettySignatures env ts = PP.lines $
PP.group <$> prettySignatures' env ts
prettySignaturesAlt
:: Var v
=> PrettyPrintEnv
-> [([HashQualified Name], Type v a)]
-> Pretty ColorText
prettySignaturesAlt env ts = PP.lines $
PP.group <$> prettySignaturesAlt' env ts
| unisonweb/platform | parser-typechecker/src/Unison/TypePrinter.hs | mit | 7,559 | 0 | 24 | 1,922 | 2,500 | 1,275 | 1,225 | -1 | -1 |
{-# LANGUAGE ExistentialQuantification #-}
module Jakway.Blackjack.Util where
import Data.List (elemIndex)
import System.IO
import System.Exit hiding (die)
import System.Console.GetOpt
import Data.Maybe (catMaybes)
innerMapTuple4 :: forall t t1. (t -> t1) -> (t, t, t, t) -> (t1, t1, t1, t1)
innerMapTuple4 f (a,b,c,d) = (f a, f b, f c, f d)
innerMapTuple3 :: forall t t1. (t -> t1) -> (t, t, t) -> (t1, t1, t1)
innerMapTuple3 f (a,b,c) = (f a, f b, f c)
innerMapTuple2 :: forall t t1. (t -> t1) -> (t, t) -> (t1, t1)
innerMapTuple2 f (a,b) = (f a, f b)
flipInner2 :: (a -> b -> c -> d) -> a -> c -> b -> d
flipInner2 f x y z = f x z y
--insert the 2nd string into the first, replacing the character at the
--passed position
replaceElem :: Int -> String -> String -> String
replaceElem pos orig ins = let (front, back) = splitAt pos orig
in front ++ ins ++ (tail back)
default_replacement_character :: Char
default_replacement_character = '?'
-- |"static substitution with replacement character"
-- it's an error to have `length orig` be less than the number of
-- replacement strings
ssub_wrep_char :: Char -> String -> [String] -> String
ssub_wrep_char rep_char orig [] = if (rep_char `elem` orig) == False then orig else error $ "Not enough replacement strings in string " ++ orig
ssub_wrep_char rep_char orig (x:xs) = case xIndex of Nothing -> error "Could not find string to replace!"
Just pos -> ssub_wrep_char rep_char (replaceElem pos orig x) xs
where xIndex = elemIndex rep_char orig
--use the default replacement character, ?
ssub :: String -> [String] -> String
ssub = ssub_wrep_char default_replacement_character
-- |simple implementation of die
-- only exists in base >= 4.8
die :: String -> IO a
die errMsg = hPutStrLn stderr errMsg >> exitFailure
parseOptions :: [String] -> [OptDescr a] -> String -> IO ([a], [String])
parseOptions argv options usage =
case getOpt Permute options argv of
(o,n,[] ) -> return (o,n)
(_,_,errs) -> ioError (userError (concat errs ++ usageInfo usage options))
-- |returns the underlying value of the underlying flag if exactly one
-- exists
getSingleFlag :: (a -> Maybe b) -> [a] -> Maybe b
getSingleFlag f allFlags = case catMaybes $ map f allFlags of [x] -> Just x
[] -> Nothing
| tjakway/blackjack-simulator | src/Jakway/Blackjack/Util.hs | mit | 2,422 | 0 | 13 | 583 | 823 | 452 | 371 | 37 | 3 |
import Control.Monad
import Data.Set (Set)
import qualified Data.Set as Set
data Pos = Pos { x :: Int, y :: Int } deriving (Eq, Ord)
data Node = Node { pos :: Pos
, size :: Int
, used :: Int
} deriving (Eq, Ord)
type State = (Pos, Pos)
data Grid = Grid { wall :: Pos
, width :: Int
, height :: Int
}
avail :: Node -> Int
avail node = size node - used node
viablePairs :: [Node] -> [(Node, Node)]
viablePairs nodes = do
a <- nodes
b <- nodes
guard $ used a > 0
guard $ pos a /= pos b
guard $ used a <= avail b
return (a, b)
neighbors :: Pos -> [Pos]
neighbors (Pos x y) = do
(x', y') <- [(x-1, y), (x, y-1), (x, y+1), (x+1, y)]
return $ Pos x' y'
next :: Grid -> State -> [State]
next grid (b, goal) = do
a <- neighbors b
guard $ x a >= 0 && x a <= width grid
guard $ y a >= 0 && y a <= height grid
guard $ y a /= y (wall grid) || x a < x (wall grid)
return (a, if goal == a then b else goal)
bfs :: Grid -> Set State -> Set State -> Int
bfs grid visited unvisited =
if any ((== Pos 0 0) . snd) unvisited
then 0
else let unvisited' = Set.filter (`Set.notMember` visited) $ Set.fromList $ concatMap (next grid) unvisited
visited' = Set.union visited unvisited
in 1 + bfs grid visited' unvisited'
parse :: [String] -> Node
parse [x, size, used, _, _] =
let [(x', y)] = reads $ drop (length "/dev/grid/node-x") x
y' = read $ drop (length "-y") y
[(size', _)] = reads size
[(used', _)] = reads used
in Node (Pos x' y') size' used'
part1 :: String -> Int
part1 = length . viablePairs . map (parse . words) . drop 2 . lines
part2 :: String -> Int
part2 input =
let nodes = map (parse . words) . drop 2 . lines $ input
empty = head [pos n | n <- nodes, used n == 0]
Pos width height = pos $ last nodes
wall = head [pos n | n <- nodes, size n > 500]
in bfs (Grid wall width height) Set.empty $ Set.singleton (empty, Pos width 0)
| seishun/aoc2016 | day22.hs | mit | 2,020 | 0 | 14 | 601 | 1,024 | 530 | 494 | 56 | 2 |
module Main where
import Network.Hubbub.Queue.Test
import Network.Hubbub.SubscriptionDb.Test
import Network.Hubbub.Http.Test
import Network.Hubbub.Hmac.Test
import Network.Hubbub.Internal.Test
import Network.Hubbub.Test
import Prelude (IO)
import Test.Tasty (defaultMain,testGroup,TestTree)
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "All Tests"
[ subscriptionDbSuite
, queueSuite
, httpSuite
, hmacSuite
, internalSuite
, hubbubSuite
]
| benkolera/haskell-hubbub | test/Test.hs | mit | 489 | 0 | 6 | 70 | 120 | 76 | 44 | 19 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-vpccidrblock.html
module Stratosphere.Resources.EC2VPCCidrBlock where
import Stratosphere.ResourceImports
-- | Full data type definition for EC2VPCCidrBlock. See 'ec2VPCCidrBlock' for
-- a more convenient constructor.
data EC2VPCCidrBlock =
EC2VPCCidrBlock
{ _eC2VPCCidrBlockAmazonProvidedIpv6CidrBlock :: Maybe (Val Bool)
, _eC2VPCCidrBlockCidrBlock :: Maybe (Val Text)
, _eC2VPCCidrBlockVpcId :: Val Text
} deriving (Show, Eq)
instance ToResourceProperties EC2VPCCidrBlock where
toResourceProperties EC2VPCCidrBlock{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::EC2::VPCCidrBlock"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("AmazonProvidedIpv6CidrBlock",) . toJSON) _eC2VPCCidrBlockAmazonProvidedIpv6CidrBlock
, fmap (("CidrBlock",) . toJSON) _eC2VPCCidrBlockCidrBlock
, (Just . ("VpcId",) . toJSON) _eC2VPCCidrBlockVpcId
]
}
-- | Constructor for 'EC2VPCCidrBlock' containing required fields as
-- arguments.
ec2VPCCidrBlock
:: Val Text -- ^ 'ecvpccbVpcId'
-> EC2VPCCidrBlock
ec2VPCCidrBlock vpcIdarg =
EC2VPCCidrBlock
{ _eC2VPCCidrBlockAmazonProvidedIpv6CidrBlock = Nothing
, _eC2VPCCidrBlockCidrBlock = Nothing
, _eC2VPCCidrBlockVpcId = vpcIdarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-vpccidrblock.html#cfn-ec2-vpccidrblock-amazonprovidedipv6cidrblock
ecvpccbAmazonProvidedIpv6CidrBlock :: Lens' EC2VPCCidrBlock (Maybe (Val Bool))
ecvpccbAmazonProvidedIpv6CidrBlock = lens _eC2VPCCidrBlockAmazonProvidedIpv6CidrBlock (\s a -> s { _eC2VPCCidrBlockAmazonProvidedIpv6CidrBlock = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-vpccidrblock.html#cfn-ec2-vpccidrblock-cidrblock
ecvpccbCidrBlock :: Lens' EC2VPCCidrBlock (Maybe (Val Text))
ecvpccbCidrBlock = lens _eC2VPCCidrBlockCidrBlock (\s a -> s { _eC2VPCCidrBlockCidrBlock = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-vpccidrblock.html#cfn-ec2-vpccidrblock-vpcid
ecvpccbVpcId :: Lens' EC2VPCCidrBlock (Val Text)
ecvpccbVpcId = lens _eC2VPCCidrBlockVpcId (\s a -> s { _eC2VPCCidrBlockVpcId = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/EC2VPCCidrBlock.hs | mit | 2,426 | 0 | 15 | 301 | 370 | 211 | 159 | 35 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-fleet-domainjoininfo.html
module Stratosphere.ResourceProperties.AppStreamFleetDomainJoinInfo where
import Stratosphere.ResourceImports
-- | Full data type definition for AppStreamFleetDomainJoinInfo. See
-- 'appStreamFleetDomainJoinInfo' for a more convenient constructor.
data AppStreamFleetDomainJoinInfo =
AppStreamFleetDomainJoinInfo
{ _appStreamFleetDomainJoinInfoDirectoryName :: Maybe (Val Text)
, _appStreamFleetDomainJoinInfoOrganizationalUnitDistinguishedName :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON AppStreamFleetDomainJoinInfo where
toJSON AppStreamFleetDomainJoinInfo{..} =
object $
catMaybes
[ fmap (("DirectoryName",) . toJSON) _appStreamFleetDomainJoinInfoDirectoryName
, fmap (("OrganizationalUnitDistinguishedName",) . toJSON) _appStreamFleetDomainJoinInfoOrganizationalUnitDistinguishedName
]
-- | Constructor for 'AppStreamFleetDomainJoinInfo' containing required fields
-- as arguments.
appStreamFleetDomainJoinInfo
:: AppStreamFleetDomainJoinInfo
appStreamFleetDomainJoinInfo =
AppStreamFleetDomainJoinInfo
{ _appStreamFleetDomainJoinInfoDirectoryName = Nothing
, _appStreamFleetDomainJoinInfoOrganizationalUnitDistinguishedName = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-fleet-domainjoininfo.html#cfn-appstream-fleet-domainjoininfo-directoryname
asfdjiDirectoryName :: Lens' AppStreamFleetDomainJoinInfo (Maybe (Val Text))
asfdjiDirectoryName = lens _appStreamFleetDomainJoinInfoDirectoryName (\s a -> s { _appStreamFleetDomainJoinInfoDirectoryName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-fleet-domainjoininfo.html#cfn-appstream-fleet-domainjoininfo-organizationalunitdistinguishedname
asfdjiOrganizationalUnitDistinguishedName :: Lens' AppStreamFleetDomainJoinInfo (Maybe (Val Text))
asfdjiOrganizationalUnitDistinguishedName = lens _appStreamFleetDomainJoinInfoOrganizationalUnitDistinguishedName (\s a -> s { _appStreamFleetDomainJoinInfoOrganizationalUnitDistinguishedName = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/AppStreamFleetDomainJoinInfo.hs | mit | 2,315 | 0 | 12 | 205 | 264 | 151 | 113 | 27 | 1 |
import Control.Applicative ((<*>),(<$>))
class CompState s where
ret :: a -> s a
(=:) :: s a -> (a -> s b) -> s b
(!) :: s a -> s b -> s b
x ! y = x =: (\_ -> y)
newtype State s a = State { runState :: s -> (a,s) }
get :: State s s
get = State $ \s -> (s, s)
put :: s -> State s ()
put s = State $ \_ -> ((), s)
modify :: (s -> s) -> State s ()
modify f = State $ \s -> ((), f s)
instance CompState (State s) where
ret x = State $ \s -> (x,s)
h =: f = State $ \s ->
let (a, newState) = runState h s
in runState (f a) newState
type Stack a b = State [a] b
type IntStack a = Stack Int a
push :: a -> Stack a ()
push x = modify (x:)
unsafePop :: Stack a a
unsafePop =
get =: \(x:xs) ->
put xs !
ret x
pop :: Stack a (Maybe a)
pop =
get =: \ls ->
case ls of
[] -> ret Nothing
x:xs -> put xs ! ret (Just x)
binOpN :: Int -> (a -> a -> a) -> Stack a (Maybe a)
binOpN 1 op = binOp op
binOpN n op =
binOp op !
binOpN (n - 1) op
-- Stack a (Maybe a) is a new monad?
stackMaybe :: Maybe a -> Stack a (Maybe a)
stackMaybe r =
case r of
Just x -> push x ! ret r
Nothing -> ret r
binOp :: (a -> a -> a) -> Stack a (Maybe a)
binOp op =
pop =: \x ->
pop =: \y ->
stackMaybe (op <$> x <*> y)
type StackLoc = Int
type InstrLoc = Int
type ClassName = String
type MethodName = String
data Instruction =
MOV StackLoc StackLoc
| LOADIMM StackLoc Int
| ADD StackLoc StackLoc StackLoc
| SUB StackLoc StackLoc StackLoc
| MULT StackLoc StackLoc StackLoc
| DIV StackLoc StackLoc StackLoc
| LESS StackLoc StackLoc StackLoc
| AND StackLoc StackLoc StackLoc
| OR StackLoc StackLoc StackLoc
| EQUAL StackLoc StackLoc StackLoc
| JUMP InstrLoc
| CJUMP StackLoc InstrLoc InstrLoc
| INT2STRING StackLoc StackLoc
| BOOL2STRING StackLoc StackLoc
| GETFLD StackLoc Int
| PUTFLD Int StackLoc
| NEWSTRING StackLoc String
| CATSTRINGS StackLoc StackLoc StackLoc
| NEWOBJECT StackLoc ClassName Int
| NEWARRAY StackLoc StackLoc
| RETURN StackLoc
| LOADRESULT StackLoc
| INVOKE StackLoc MethodName [StackLoc]
| JUMPIND StackLoc
| ARRAYREF StackLoc StackLoc StackLoc
stackTest :: IntStack Int
stackTest =
push 5 !
push 2 !
pop !
push 23 !
pop =: \(Just x) ->
push (x + 4) !
unsafePop
| WraithM/SemiColonTutorial | SemiColon.hs | gpl-3.0 | 2,428 | 0 | 13 | 748 | 1,029 | 547 | 482 | 87 | 2 |
-- o MCD por euclides
{- empezamos con dous numeros x = 20 y = 16
- se x/y da como resto 0 y é o MCD
- se non x = y y = resto asi ate que o resto sexa 0 en tal caso teremos que y é o mcd
- -}
mcd x y = if resto == 0
then y
else mcd y resto
where resto = x `mod` y
mcdPm _ 1 = 1
mcdPm 1 _ = 1
mcdPm x y
| x `mod` y == 0 = y
mcdPm x y = mcdPm y (x `mod` y)
--podemos usar o case of
sayAmount n = case n of
1 -> "un"
2 -> "dous"
--n `mod` 2 == 0 -> "par" non podemos evaluar
n -> "a ostia deles"
-- ou pattern matching
--dicimos que pasara se o parametro é x
--no pattern matching non podemos facer comprovacions cando macheamos o argumento
-- _
--tamen podemos usar para o valor decoñecido un ward
sayAmountPM 1 = "un"
sayAmountPM 2 = "dous"
sayAmountPM n = "a ostia deles"
sayAmountGM n
| n == 1 = "un"
| n == 2 = "dous"
| n `mod` 2 == 0 = "par"
| otherwise = "a ostia deles" -- nun caso non contemplado
{- temos estructuras de control como if then else
- pattern matching tampouco podemos evaluar expresions
- guard matching é moi potente porque podemos evaluar expresions so booleanas
- case of non podemos evaluar expresions
-}
-- é tan potente e claro o pattern matching
isEmpty [] = True
--isEmpty aList = False
isEmpty _ = False -- como vemos co wildcard funciona igual pero non podemos usar a variable
--decimoslle que acepte unha lista e cando asignemos fara a decomposicion en head e tail
myhead (x:xs) = x
myhead [] = error "No head in a empty list"
mytail :: [a] -> [a]
mytail (x:xs) = xs
--mytail _ = error "No tail in a empity list"
mytail _ = []
| jmlb23/haskell | ch06/euclides.hs | gpl-3.0 | 1,739 | 0 | 9 | 513 | 326 | 174 | 152 | 28 | 3 |
{-# LANGUAGE RankNTypes, GADTs #-}
module Data.Exists where
import Unsafe.Coerce (unsafeCoerce)
data Exists f where
Exists :: f a -> Exists f
mkExists :: forall f a. f a -> Exists f
mkExists = unsafeCoerce
runExists :: forall f r. (forall a. f a -> r) -> (Exists f -> r)
runExists = unsafeCoerce
| graninas/Haskell-Algorithms | Tests/TransitionGraph/transition-graph/src/Data/Exists.hs | gpl-3.0 | 302 | 0 | 10 | 60 | 108 | 61 | 47 | 9 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# OPTIONS_GHC -fno-warn-overlapping-patterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Handler.DB.RouteTargetedtextmessage where
import Handler.DB.Enums
import Handler.DB.Esqueleto
import Handler.DB.Internal
import Handler.DB.Validation
import qualified Handler.DB.FilterSort as FS
import qualified Handler.DB.PathPieces as PP
import Prelude
import Database.Esqueleto
import Database.Esqueleto.Internal.Sql (unsafeSqlBinOp)
import qualified Database.Persist as P
import Database.Persist.TH
import Yesod.Auth (requireAuth, requireAuthId, YesodAuth, AuthId, YesodAuthPersist, AuthEntity)
import Yesod.Core hiding (fileName, fileContentType)
import Yesod.Persist (runDB, YesodPersist, YesodPersistBackend)
import Control.Monad (when)
import Data.Aeson ((.:), (.:?), (.!=), FromJSON, parseJSON, decode)
import Data.Aeson.TH
import Data.Int
import Data.Word
import Data.Time
import Data.Text.Encoding (encodeUtf8)
import Data.Typeable (Typeable)
import qualified Data.Attoparsec as AP
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as AT
import qualified Data.ByteString.Lazy as LBS
import Data.Maybe
import qualified Data.Text.Read
import qualified Data.Text as T
import Data.Text (Text)
import qualified Data.List as DL
import Control.Monad (mzero, forM_)
import Control.Monad.Trans.Resource (runResourceT)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import qualified Network.HTTP.Conduit as C
import qualified Network.Wai as W
import Data.Conduit.Lazy (lazyConsume)
import Network.HTTP.Types (status200, status400, status403, status404)
import Blaze.ByteString.Builder.ByteString (fromByteString)
import Control.Applicative ((<$>), (<*>))
import qualified Data.HashMap.Lazy as HML
import qualified Data.HashMap.Strict as HMS
import Handler.TextMessage (addReplyTextMessageRecipient,addTextMessageRecipients)
postTargetedtextmessageR :: forall master. (
YesodAuthPersist master,
AuthEntity master ~ User,
AuthId master ~ Key User,
YesodPersistBackend master ~ SqlBackend)
=> HandlerT DB (HandlerT master IO) A.Value
postTargetedtextmessageR = lift $ runDB $ do
authId <- lift $ requireAuthId
jsonResult <- parseJsonBody
jsonBody <- case jsonResult of
A.Error err -> sendResponseStatus status400 $ A.object [ "message" .= ( "Could not decode JSON object from request body : " ++ err) ]
A.Success o -> return o
jsonBodyObj <- case jsonBody of
A.Object o -> return o
v -> sendResponseStatus status400 $ A.object [ "message" .= ("Expected JSON object in the request body, got: " ++ show v) ]
attr_text <- case HML.lookup "text" jsonBodyObj of
Just v -> case A.fromJSON v of
A.Success v' -> return v'
A.Error err -> sendResponseStatus status400 $ A.object [
"message" .= ("Could not parse value from attribute text in the JSON object in request body" :: Text),
"error" .= err
]
Nothing -> sendResponseStatus status400 $ A.object [
"message" .= ("Expected attribute text in the JSON object in request body" :: Text)
]
attr_query <- case HML.lookup "query" jsonBodyObj of
Just v -> case A.fromJSON v of
A.Success v' -> return v'
A.Error err -> sendResponseStatus status400 $ A.object [
"message" .= ("Could not parse value from attribute query in the JSON object in request body" :: Text),
"error" .= err
]
Nothing -> sendResponseStatus status400 $ A.object [
"message" .= ("Expected attribute query in the JSON object in request body" :: Text)
]
attr_dateOfBirthMonth <- case HML.lookup "dateOfBirthMonth" jsonBodyObj of
Just v -> case A.fromJSON v of
A.Success v' -> return v'
A.Error err -> sendResponseStatus status400 $ A.object [
"message" .= ("Could not parse value from attribute dateOfBirthMonth in the JSON object in request body" :: Text),
"error" .= err
]
Nothing -> sendResponseStatus status400 $ A.object [
"message" .= ("Expected attribute dateOfBirthMonth in the JSON object in request body" :: Text)
]
__currentTime <- liftIO $ getCurrentTime
(Entity _ __auth) <- lift $ requireAuth
runDB_result <- do
e1 <- do
return $ TextMessage {
textMessageText = attr_text
,
textMessagePhone = Nothing
,
textMessageSenderClientId = Nothing
,
textMessageReplyToTextMessageId = Nothing
,
textMessageQueued = Nothing
,
textMessageSent = Nothing
,
textMessageAborted = Nothing
,
textMessageDeletedVersionId = Nothing
,
textMessageActiveId = Nothing
,
textMessageActiveStartTime = __currentTime
,
textMessageActiveEndTime = Nothing
,
textMessageInsertionTime = __currentTime
,
textMessageInsertedByUserId = (Just authId)
}
vErrors <- lift $ validate e1
case vErrors of
xs@(_:_) -> sendResponseStatus status400 (A.object [
"message" .= ("Entity validation failed" :: Text),
"errors" .= toJSON xs
])
_ -> return ()
result_tId <- P.insert (e1 :: TextMessage)
addTextMessageRecipients (authId) (result_tId) (attr_query) (attr_dateOfBirthMonth)
e3 <- do
return $ UserGroupContent {
userGroupContentUserGroupId = (userDefaultUserGroupId __auth)
,
userGroupContentFileContentId = Nothing
,
userGroupContentUserGroupContentId = Nothing
,
userGroupContentUserContentId = Nothing
,
userGroupContentClientContentId = Nothing
,
userGroupContentTextMessageContentId = (Just result_tId)
,
userGroupContentDeletedVersionId = Nothing
}
vErrors <- lift $ validate e3
case vErrors of
xs@(_:_) -> sendResponseStatus status400 (A.object [
"message" .= ("Entity validation failed" :: Text),
"errors" .= toJSON xs
])
_ -> return ()
P.insert (e3 :: UserGroupContent)
return $ A.object [ "id" .= (toJSON result_tId) ]
return $ runDB_result
| tlaitinen/sms | backend/Handler/DB/RouteTargetedtextmessage.hs | gpl-3.0 | 7,783 | 0 | 19 | 2,507 | 1,521 | 856 | 665 | 146 | 11 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Products.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing product in your Merchant Center account. Only
-- updates attributes provided in the request.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.products.update@.
module Network.Google.Resource.Content.Products.Update
(
-- * REST Resource
ProductsUpdateResource
-- * Creating a Request
, productsUpdate
, ProductsUpdate
-- * Request Lenses
, puXgafv
, puMerchantId
, puUploadProtocol
, puUpdateMask
, puAccessToken
, puUploadType
, puPayload
, puProductId
, puCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.products.update@ method which the
-- 'ProductsUpdate' request conforms to.
type ProductsUpdateResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Word64) :>
"products" :>
Capture "productId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "updateMask" GFieldMask :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Product :> Patch '[JSON] Product
-- | Updates an existing product in your Merchant Center account. Only
-- updates attributes provided in the request.
--
-- /See:/ 'productsUpdate' smart constructor.
data ProductsUpdate =
ProductsUpdate'
{ _puXgafv :: !(Maybe Xgafv)
, _puMerchantId :: !(Textual Word64)
, _puUploadProtocol :: !(Maybe Text)
, _puUpdateMask :: !(Maybe GFieldMask)
, _puAccessToken :: !(Maybe Text)
, _puUploadType :: !(Maybe Text)
, _puPayload :: !Product
, _puProductId :: !Text
, _puCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProductsUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'puXgafv'
--
-- * 'puMerchantId'
--
-- * 'puUploadProtocol'
--
-- * 'puUpdateMask'
--
-- * 'puAccessToken'
--
-- * 'puUploadType'
--
-- * 'puPayload'
--
-- * 'puProductId'
--
-- * 'puCallback'
productsUpdate
:: Word64 -- ^ 'puMerchantId'
-> Product -- ^ 'puPayload'
-> Text -- ^ 'puProductId'
-> ProductsUpdate
productsUpdate pPuMerchantId_ pPuPayload_ pPuProductId_ =
ProductsUpdate'
{ _puXgafv = Nothing
, _puMerchantId = _Coerce # pPuMerchantId_
, _puUploadProtocol = Nothing
, _puUpdateMask = Nothing
, _puAccessToken = Nothing
, _puUploadType = Nothing
, _puPayload = pPuPayload_
, _puProductId = pPuProductId_
, _puCallback = Nothing
}
-- | V1 error format.
puXgafv :: Lens' ProductsUpdate (Maybe Xgafv)
puXgafv = lens _puXgafv (\ s a -> s{_puXgafv = a})
-- | The ID of the account that contains the product. This account cannot be
-- a multi-client account.
puMerchantId :: Lens' ProductsUpdate Word64
puMerchantId
= lens _puMerchantId (\ s a -> s{_puMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
puUploadProtocol :: Lens' ProductsUpdate (Maybe Text)
puUploadProtocol
= lens _puUploadProtocol
(\ s a -> s{_puUploadProtocol = a})
-- | The comma-separated list of product attributes to be updated. Example:
-- \`\"title,salePrice\"\`. Attributes specified in the update mask without
-- a value specified in the body will be deleted from the product. Only
-- top-level product attributes can be updated. If not defined, product
-- attributes with set values will be updated and other attributes will
-- stay unchanged.
puUpdateMask :: Lens' ProductsUpdate (Maybe GFieldMask)
puUpdateMask
= lens _puUpdateMask (\ s a -> s{_puUpdateMask = a})
-- | OAuth access token.
puAccessToken :: Lens' ProductsUpdate (Maybe Text)
puAccessToken
= lens _puAccessToken
(\ s a -> s{_puAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
puUploadType :: Lens' ProductsUpdate (Maybe Text)
puUploadType
= lens _puUploadType (\ s a -> s{_puUploadType = a})
-- | Multipart request metadata.
puPayload :: Lens' ProductsUpdate Product
puPayload
= lens _puPayload (\ s a -> s{_puPayload = a})
-- | The REST ID of the product for which to update.
puProductId :: Lens' ProductsUpdate Text
puProductId
= lens _puProductId (\ s a -> s{_puProductId = a})
-- | JSONP
puCallback :: Lens' ProductsUpdate (Maybe Text)
puCallback
= lens _puCallback (\ s a -> s{_puCallback = a})
instance GoogleRequest ProductsUpdate where
type Rs ProductsUpdate = Product
type Scopes ProductsUpdate =
'["https://www.googleapis.com/auth/content"]
requestClient ProductsUpdate'{..}
= go _puMerchantId _puProductId _puXgafv
_puUploadProtocol
_puUpdateMask
_puAccessToken
_puUploadType
_puCallback
(Just AltJSON)
_puPayload
shoppingContentService
where go
= buildClient (Proxy :: Proxy ProductsUpdateResource)
mempty
| brendanhay/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/Products/Update.hs | mpl-2.0 | 6,144 | 0 | 20 | 1,465 | 965 | 561 | 404 | 134 | 1 |
{-
- This file is part of Bilder.
-
- Bilder is free software: you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- Bilder is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public License
- along with Bilder. If not, see <http://www.gnu.org/licenses/>.
-
- Copyright © 2012-2013 Filip Lundborg
- Copyright © 2012-2013 Ingemar Ådahl
-
-}
{-# LANGUAGE UnicodeSyntax #-}
module Compiler.Split where
import Control.Monad.State
import Control.Monad.Writer
import Control.Applicative
import Data.Maybe
import qualified Data.Map as Map
import Data.List
import Compiler.Utils
import TypeChecker.Utils
import FrontEnd.PrintGrammar
import Compiler.Dependencies (stmDeps, expDeps, Dep(Fun, Var), DepList)
import TypeChecker.Types as Types hiding (
functionName
, statements
, retType
, pixelwise
, paramVars
, functions
, variables
, varType
)
import qualified TypeChecker.Types as Source (Source(functions), Source(variables))
import qualified TypeChecker.Types as Function (
functionName
, statements
, retType
, pixelwise
, paramVars
)
import FrontEnd.AbsGrammar
import Text.Printf
type Chunk = ([(SlimFun, [Stm])], String, SlimFun)
data SlimFun = SlimFun {
functionName ∷ String
, retType ∷ Type
, pixelwise ∷ Bool
, args ∷ [SlimVar]
, statements ∷ [Stm]
}
deriving (Eq)
instance Show SlimFun where
show (SlimFun name ret _ params stms) = printf "%s %s(%s)\n{%s\n}\n"
(show ret)
name
(show params)
(concatMap printTree stms)
data SlimVar = SlimVar {
varName ∷ String
, varType ∷ Type
, value ∷ Maybe Exp
}
deriving (Show, Eq)
data Shader = Shader {
funs ∷ Map.Map String SlimFun
, vars ∷ Map.Map String SlimVar
, output ∷ String
, inputs ∷ [SlimVar]
}
instance Show Shader where
show (Shader fs vs o is) =
printf "-x- Shader (in %s) (out %s)\n -- Variables -------------\n%s\n -- Functions ----------\n%s\n"
(show is)
(show o)
(show vs)
(show fs)
data St = St {
functions ∷ Map.Map String SlimFun
, variables ∷ Map.Map String SlimVar
, currentFun ∷ SlimFun
, gobbled ∷ [(SlimFun, [Stm])]
, gobbledFuns ∷ Map.Map String SlimFun
, inlineAssigns ∷ Map.Map String [Stm]
, freeRefs ∷ [Int]
, pendingRef ∷ [String]
, chunks ∷ [Chunk]
, dependencies ∷ [Dep]
}
pushFun ∷ SlimFun → State St ()
pushFun f = modify (\st → st { gobbled = (f,[]):gobbled st, currentFun = f})
popFun ∷ State St ()
popFun = do
modify (\st → st { gobbled = tail (gobbled st)})
-- i don't even
modify (\st → st { currentFun = fst (head (gobbled st)) })
addStm ∷ Stm → State St ()
addStm stm = do
gobs ← gets gobbled
f ← gets currentFun
if null gobs
then modify (\st → st { gobbled = [(f, [stm])]})
else unless (f == fst (head gobs)) undefined >>
modify (\st → st { gobbled = (f, stm:snd (head gobs)):tail (gobbled st)})
getFun ∷ String → State St SlimFun
getFun s = liftM (\x → let Just v = Map.lookup s x in v) $ gets functions
getFunMaybe ∷ String → State St (Maybe SlimFun)
getFunMaybe s = liftM (Map.lookup s) $ gets functions
gather ∷ Monoid a => (Exp → Writer a Exp) → [Stm] → a
gather f ss = execWriter (mapM_ (mapStmExpM f) ss)
-- Get a list of all calls made
calls ∷ [Stm] → [String]
calls = nub . gather collect
where
collect ∷ Exp → Writer [String] Exp
collect e@(ECall cid es) = tell [cIdentToString cid] >> mapM_ (mapExpM collect) es >> return e
collect e = mapExpM collect e
stripFun ∷ Function → SlimFun
stripFun f = SlimFun {
functionName = Function.functionName f
, retType = Function.retType f
, pixelwise = Function.pixelwise f
, args = map stripVar $ Function.paramVars f
, statements = Function.statements f
}
stripVar ∷ Variable → SlimVar
stripVar (Variable name _ t e) = SlimVar name t e
splitShader ∷ Shader → State St [Shader]
splitShader sh = do
cfree ← gets freeRefs
let (ss, sta) = runState (split mainFun) St {
functions = funs sh
, variables = vars sh
, currentFun = mainFun
, gobbled = []
, gobbledFuns = Map.empty
, inlineAssigns = Map.empty
, freeRefs = cfree
, pendingRef = []
, chunks = []
, dependencies = []
}
modify (\st → st { freeRefs = freeRefs sta })
return $ (head ss) { output = output sh } : tail ss
where
mainFun ∷ SlimFun
mainFun = let Just v = Map.lookup "main" (funs sh) in v
splitSource ∷ Source → [Shader]
splitSource src = map stripExternals $ evalState (split mainFun)
St {
functions = Map.map stripFun $ Source.functions src
, variables = Map.map stripVar $ Source.variables src
, currentFun = mainFun
, gobbled = []
, gobbledFuns = Map.empty
, inlineAssigns = Map.empty
, freeRefs = [1..]
, pendingRef = []
, chunks = []
, dependencies = []
}
where
mainFun ∷ SlimFun
mainFun = stripFun $ let Just v = Map.lookup "main" (Source.functions src) in v
stripExternals ∷ Shader → Shader
stripExternals shd = shd { funs = Map.map stripExt (funs shd)}
stripExt ∷ SlimFun → SlimFun
stripExt fun = fun { statements = expandStm stripExt' (statements fun)}
where
stripExt' ∷ [Stm] → [Stm]
stripExt' (SDecl (Dec qs _):ss) | any isExternal qs = stripExt' ss
stripExt' ss = expandStm stripExt' ss
newRef ∷ String → State St String
newRef s = do
newId ← gets (head . freeRefs)
let ref = printf "img%03d%s" newId s
modify (\st → st { freeRefs = tail (freeRefs st), pendingRef = ref:pendingRef st})
return ref
split ∷ SlimFun → State St [Shader]
split fun = do
mainShd ← collectMain fun
modify (\st → st { dependencies = [] })
shaders ← gets chunks >>= mapM buildShader
liftM concat $ sequence [
if hasImages s
then splitShader s
else return [s]
| s ← mainShd:shaders
]
hasImages ∷ Shader → Bool
hasImages sh = True `elem` map createsImg stms
where
stms = concatMap (statements . snd) $ (Map.toList . funs) sh
buildShader ∷ Chunk → State St Shader
buildShader (gs,ref,fun) = do
-- find all functions that will form the new "main".
inlinable ← addAssignments $ reverse $ dropWhile (\(f,_) → functionName f /= "main") (reverse gs)
let fs = Map.fromList $ (functionName fun, fun) : map ((\f → (functionName f, f)) . buildFun) gs
mainFun = (let Just v = Map.lookup "main" fs in v) { statements = concatMap snd (reverse inlinable) }
-- fetch the rest of the functions that are not to be inlined.
restFuns = map (\(f,_) → (functionName f, f)) (takeWhile (\(f,_) → functionName f /= "main") (reverse gs))
fs' ← gets functions
vs ← gets variables
return Shader {
funs = Map.unionWith (\vl _ → vl) (Map.fromList $ ("main", mainFun) : restFuns) fs'
, vars = vs
, output = ref
, inputs = nub $ findExternals (statements mainFun)
}
-- | Adds declerations and assignments for a functions arguments
-- needed when inlining the function into main.
addAssignments ∷ [(SlimFun, [Stm])] → State St [(SlimFun, [Stm])]
addAssignments gs = do
is ← gets inlineAssigns
sequence [
if functionName f == "main"
then return (f, ss)
else case Map.lookup (functionName f) is of
Nothing → return (f, ss)
Just assigns → return (f, assigns ++ ss)
| (f, ss) ← gs ]
buildFun ∷ (SlimFun, [Stm]) → SlimFun
buildFun (f, ss) = f { statements = ss }
collectMain ∷ SlimFun → State St Shader
collectMain fun = do
modify (\st → st { gobbled = [(fun,[])], currentFun = fun })
mapM_ gobbleStm (statements fun)
gets (snd . head . gobbled) >>= buildMain fun
buildMain ∷ SlimFun → [Stm] → State St Shader
buildMain oldMain stms = do
let mainFun = oldMain { statements = reverse stms }
modify (\st → st { gobbled = [(mainFun,stms)], dependencies = [] })
mapM_ (uncurry addBoth) $ stmDeps $ head stms
-- get rid of statements nothing depends on.
stms' ← depends []
let fs' = Map.fromList $ map (\(f, st) → (functionName f, f { statements = st})) stms'
let mainFun' = let Just v = Map.lookup "main" fs' in v
ref ← gets pendingRef
modify (\st → st { pendingRef = [] })
vs ← gets variables
return Shader {
funs = fs'
, vars = vs
, inputs = findExternals (statements mainFun') ++ map (\v → SlimVar v TImage Nothing) ref
, output = "result_image"
}
findExternals ∷ [Stm] → [SlimVar]
findExternals ss = execWriter (mapM_ (mapStmM findExternal) ss >>
mapM_ (mapStmExpM findEVarTypes ) ss)
findExternal ∷ Stm → Writer [SlimVar] Stm
findExternal s@(SDecl (Dec qs (Vars [cid]))) | any isExternal qs =
tell [SlimVar (cIdentToString cid) (qualsToType qs) Nothing] >> return s
findExternal s = mapStmM findExternal s
isExternal ∷ Qualifier → Bool
isExternal (QExternal _) = True
isExternal _ = False
findEVarTypes ∷ Exp → Writer [SlimVar] Exp
findEVarTypes e@(EVarType cid t) = tell [SlimVar (cIdentToString cid) t Nothing] >>
return e
findEVarTypes e = return e
collectRewrite ∷ SlimFun → [Exp] → State St ()
collectRewrite fun es = do
pushFun fun
mapM_ gobbleStm (statements fun)
-- store them for later use
storeGobbledFun (functionName fun) assigns
popFun
where
assigns = zipWith mkAss es (args fun)
tkass = TkAss ((0,0),"=")
mkAss ∷ Exp → SlimVar → Stm
mkAss e v = SDecl (Dec [QType (varType v)] (DecAss [CIdent ((0,0),varName v)] tkass e))
storeGobbledFun ∷ String → [Stm] → State St ()
storeGobbledFun n ss = do
gs ← gets gobbled
let (fun, stms) = head $ filter ((==n) . functionName . fst) gs
-- store gobbled version.
gfs ← gets gobbledFuns
modify (\st → st { gobbledFuns = Map.insert n (fun { statements = reverse stms }) gfs })
-- store declarations + assignments needed for inlining.
ifs ← gets inlineAssigns
modify (\st → st { inlineAssigns = Map.insert n ss ifs })
gobbleStm ∷ Stm → State St ()
gobbleStm stm = mapStmExpM gobble stm >>= addStm
depFun ∷ SlimFun → State St ()
depFun f = do
-- add the actual function.
add (Fun name) >> add (Var name)
mapM_ (uncurry addBoth) $ concatMap stmDeps (statements f)
mapM_ (mapStmExpM addDepFun) (statements f)
where
name = functionName f
addDepFun ∷ Exp → State St Exp
addDepFun e@(EPartCall cid _ _) = do
f ← getFun (cIdentToString cid)
depFun f
return e
addDepFun e = return e
gobble ∷ Exp → State St Exp
gobble (EPartCall cid es _) = do
-- add a dependency to the called function (and all its dependencies).
f ← getFun name
unless (all isNum $ map varType (drop (length es) (args f))) $
error "EPartCall with invalid type."
modify (\st → st { dependencies = [] })
depFun f
-- calculate all the needed (already gobbled) statements.
d ← depends es
-- add a SReturn to the top function.
let mainFun = fst $ head $ filter (\(fun, _) → functionName fun == "main") d
d' = (\(fun, ss) → (fun, addSReturn mainFun ss name es)) (head d) : tail d
r ← newRef name
addChunk (d',r,f)
return (EVarType (CIdent ((0,0),r)) TImage)
where
name = cIdentToString cid
gobble e@(ECall cid es) = do
-- add dependencies for all the arguments passed to the function call.
mapM_ (uncurry addBoth) $ concatMap expDeps es
fun ← getFunMaybe (cIdentToString cid)
case fun of
Nothing → return e
Just f → collectRewrite f es >> return e
gobble e = mapExpM gobble e
addChunk ∷ Chunk → State St ()
addChunk c = modify (\st → st { chunks = c:chunks st })
createsImg ∷ Stm → Bool
createsImg s = or $ gather collect [s]
where
collect ∷ Exp → Writer [Bool] Exp
collect e@(EPartCall _ es _) = tell [True] >> mapM collect es >> return e
collect e = mapExpM collect e
branches ∷ Exp → Bool
branches = isJust . branchTarget
branchTarget ∷ Exp → Maybe String
branchTarget = foldExp f Nothing
where
f ∷ Maybe String → Exp → Maybe String
f p (ECall cid _) = p `mplus` Just (cIdentToString cid)
f p _ = p
imgType ∷ Type
imgType = TFun TVec4 [TFloat, TFloat]
-- Dependency helpers {{{
depends ∷ [Exp] → State St [(SlimFun, [Stm])]
depends es = do
-- add all initial dependencies.
mapM_ (mapM_ (uncurry addDeps) . expDeps) es
gets (Map.keys . variables) >>= mapM_ (add . Var)
-- find all needed dependencies
gb ← gets gobbled
deps ← mapM (\(f,stms) → (,) <$> pure f <*> foldM isNeeded [] stms) gb
-- add all needed functions and map through them to get rid of unneeded stuff.
(deps ++) <$> neededFuns >>= mapM (\(f,stms) → (,) <$> pure f <*> foldM isNeeded [] (reverse stms))
-- | Adds a return-statement calling the given function.
addSReturn ∷ SlimFun → [Stm] → String → [Exp] → [Stm]
addSReturn f stms n es = stms ++ [SReturn (TkReturn ((0,0),"return")) ecall]
where
ecall = ECall cid (es ++ map fattenVar (args f))
cid = CIdent ((0,0),n)
fattenVar ∷ SlimVar → Exp
fattenVar s = EVar (CIdent ((0,0),varName s))
calledFuns ∷ [Stm] → State St [(SlimFun, [Stm])]
calledFuns ss = liftM concat $ mapM getFunMaybe (calls ss) >>= mapM (maybe (return []) calledFun)
calledFun ∷ SlimFun → State St [(SlimFun, [Stm])]
calledFun f = do
gfs ← gets gobbledFuns
-- if it creates an image it has been gobbled and the new version should be used.
if True `elem` map createsImg (statements f)
then case Map.lookup name gfs of
Just gf → (:) (gf, statements gf) <$> calledFuns (statements gf)
-- unless it's an nestled partial application - then it's not yet gobbled.
Nothing → (:) (f, statements f) <$> calledFuns (statements f)
else (:) (f, statements f) <$> calledFuns (statements f)
where
name = functionName f
-- | Returns all the functions that the state depends on.
neededFuns ∷ State St [(SlimFun, [Stm])]
neededFuns = do
deps ← gets dependencies >>= filterM isFun
liftM concat $ sequence [
getFun f >>= calledFun
| (Fun f) ← deps
]
where
isFun ∷ Dep → State St Bool
isFun (Fun name) = do
-- handles both "made up"-functions (saturated partial applications)
-- and built in functions.
fun ← getFunMaybe name
case fun of
Nothing → return False
Just _ → return True
isFun _ = return False
addDeps ∷ Dep → [Dep] → State St ()
addDeps _ = mapM_ add
addAffected ∷ Dep → [Dep] → State St ()
addAffected d _ = add d
addBoth ∷ Dep → [Dep] → State St ()
addBoth d ds = mapM_ add (d : ds)
add ∷ Dep → State St ()
add d = do
deps ← gets dependencies
modify (\s → s { dependencies = nub $ d : deps })
isNeeded ∷ [Stm] → Stm → State St [Stm]
isNeeded p stm = do
let stmdeps = stmDeps stm
deps ← gets dependencies
if isReturn stm || True `elem` [a `elem` deps | a ← affected stmdeps]
then do
mapM_ (uncurry addDeps) stmdeps
return $ stm:p
else return $ stm:p
where
affected ∷ DepList → [Dep]
affected = map fst
isReturn ∷ Stm → Bool
isReturn = foldStm isret False
where
isret ∷ Bool → Stm → Bool
isret _ (SReturn {}) = True
isret pr _ = pr
| ingemaradahl/bilder | src/Compiler/Split.hs | lgpl-3.0 | 15,643 | 0 | 19 | 3,517 | 5,874 | 3,055 | 2,819 | 357 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module Control.Monad.Shmonad.CommandSpec (main, spec) where
import Test.Hspec
import GHC.TypeLits
import Control.Monad.Shmonad.Expression
import Control.Monad.Shmonad.Command
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "A Cmd a" $ do
it "should have a command path, list of args, and list of redirects" $ do
let (ls' :: Cmd Ls () [StrSum]) = toCmd (path "ls") defaults []
let p = shExpr (cmdName ls')
p `shouldBe` "\"ls\""
it "can be combined with And" $ do
let a = ls `andThen` cd (varFromEnvUnsafe "HOME")
shExpr a `shouldBe` "\"ls\" && cd ${HOME}"
it "can be combined with Or" $ do
let o = ls `orElse` cd (varFromEnvUnsafe "HOME")
shExpr o `shouldBe` "\"ls\" || cd ${HOME}"
it "can be combined with Pipe" $ do
let p = ls `pipe` tee [path "/tmp/log.txt"]
shExpr p `shouldBe` "\"ls\" | \"tee\" \"/tmp/log.txt\""
describe "A Command" $ do
it "defines an Args type" $ do
let args = defaults :: Args Ls () [StrSum]
symbolVal (flagSymbol (lsShowAll args)) `shouldBe` "-A"
symbolVal (flagSymbol (lsLong args)) `shouldBe` "-l"
it "stores default arguments" $ do
let args = defaults :: Args Ls () [StrSum]
flagBool (lsShowAll args) `shouldBe` False
flagBool (lsLong args) `shouldBe` False
it "turns Args into a list of Str expressions" $ do
let argStr = argsToStr (defaults :: Args Ls () [StrSum])
null argStr `shouldBe` True
it "turns a path, args, and list of redirects into a Cmd a" $ do
let c = cmd' (path "ls") ["-1"] [toFile (path "/tmp/log.txt")]
shExpr c `shouldBe` "\"ls\" -1 > \"/tmp/log.txt\""
| corajr/shmonad | test/Control/Monad/Shmonad/CommandSpec.hs | bsd-2-clause | 1,744 | 0 | 20 | 415 | 572 | 283 | 289 | 40 | 1 |
{- |
Module : Cantor.Project
Copyright : Copyright (C) 2014 Krzysztof Langner
License : BSD3
Maintainer : Krzysztof Langner <klangner@gmail.com>
Stability : alpha
Portability : portable
Data Types and functions for procesing project
-}
module Cantor.Project ( Project
, projectBuildSystem
, projectPath
, projectFiles
, projectLanguages
, projectReqs
, scanProject ) where
import Data.List
import Data.Maybe (isJust)
import Cantor.Utils.Folder (listFilesR)
import System.FilePath (takeExtension)
import System.Directory
import Cantor.KnowledgeDB (KnowledgeDB, bsFromFilePath, langFromExt, reqFromKey)
import Cantor.Build.BuildSystem (BuildSystem, mkBuildSystem)
import qualified Cantor.Build.Maven as Maven
import qualified Cantor.Build.Cabal as Cabal
data Project = Prj { projectPath :: FilePath
, projectFiles :: [FilePath] -- All project files
, projectLanguages :: [(String, Int)] -- Language name and file count
, projectReqs :: [String] -- Project requirements (eq. Haskell Platform)
, projectBuildSystem :: BuildSystem }
-- | Create new project by scanning all files at given path
scanProject :: KnowledgeDB -> FilePath -> IO Project
scanProject db path = do
dp <- canonicalizePath path
files <- listFilesR (const True) dp
let n = length dp
let fps = map (drop n) files
let ls = countSourceFiles db fps
bs <- readBS path (findBuildSystem db fps)
let reqs = findRequirements db (map fst ls) bs
return $ Prj path fps ls reqs bs
-- | Count number of files for each language used in project
countSourceFiles :: KnowledgeDB -> [FilePath] -> [(String, Int)]
countSourceFiles db fps = map (\as -> (head as, length as)) ls3
where ls1 = map ((langFromExt db) . takeExtension) fps
ls2 = filter (not . null) ls1
ls3 = group (sort (map head ls2))
-- | Find build system used by project. Return build system name and path to build file.
findBuildSystem :: KnowledgeDB -> [FilePath] -> Maybe (FilePath, String)
findBuildSystem db fps = if null xs2 then Nothing else f(head xs2)
where xs1 = map (\x -> (x, bsFromFilePath db x)) fps
xs2 = filter (\(_,y) -> isJust y) xs1
f (x, Just y) = Just (x,y)
f (_, _) = Nothing
-- Read build system data
readBS :: FilePath -> Maybe (FilePath, String) -> IO BuildSystem
readBS path Nothing = return $ mkBuildSystem path "None" path
readBS path (Just (fp, "Maven")) = Maven.parseFile (path ++ fp)
readBS path (Just (fp, "Cabal")) = Cabal.parseFile (path ++ fp)
readBS path (Just (_, xs)) = return $ mkBuildSystem path xs path
-- Get requirements from build system
findRequirements :: KnowledgeDB -> [String] -> BuildSystem -> [String]
findRequirements db lang _ = concatMap (reqFromKey db) lang | klangner/cantor | src/Cantor/Project.hs | bsd-2-clause | 2,960 | 0 | 12 | 746 | 816 | 445 | 371 | 49 | 3 |
module Rede.MainLoop.Framer(
readNextChunk
,readLength
,Framer
,LengthCallback
) where
import Control.Monad.Trans.Class (lift)
import Control.Monad.IO.Class (MonadIO
-- , liftIO
)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as LB
import Data.Conduit
import Data.Monoid (mappend, mempty)
type Framer m = LB.ByteString -- Input left overs
-> m B.ByteString -- Generator
-> Maybe Int -- Length to read, if we know now
-> m (LB.ByteString, LB.ByteString) -- To yield, left-overs...
-- * Doing it by parts
type LengthCallback = B.ByteString -> Maybe Int
readNextChunk :: Monad m =>
LengthCallback -- ^ How to know if we can split somewhere
-> B.ByteString -- ^ Input left-overs
-> m B.ByteString -- ^ Generator action
-> Source m B.ByteString -- ^ Packet and leftovers, if we could get them
readNextChunk length_callback input_leftovers gen = do
let
maybe_length = length_callback input_leftovers
readUpTo lo the_length | (B.length lo) >= the_length =
return $ B.splitAt the_length lo
readUpTo lo the_length = do
frag <- lift gen
readUpTo (lo `mappend` frag) the_length
case maybe_length of
Just the_length -> do
-- Just need to read the rest ....
(package_bytes, newnewleftovers) <- readUpTo input_leftovers the_length
yield package_bytes
readNextChunk length_callback newnewleftovers gen
Nothing -> do
-- Read a bit more
new_fragment <- lift gen
let new_leftovers = input_leftovers `mappend` new_fragment
readNextChunk length_callback new_leftovers gen
-- Some protocols, e.g., http/2, have the client transmit a fixed-length
-- prefix. This function reads both that prefix and returns whatever get's
-- trapped up there....
readLength :: MonadIO m => Int -> m B.ByteString -> m (B.ByteString, B.ByteString)
readLength the_length gen =
readUpTo mempty
where
readUpTo lo
| (B.length lo) >= the_length = do
-- liftIO $ putStrLn "Full read"
return $ B.splitAt the_length lo
| otherwise = do
-- liftIO $ putStrLn $ "fragment read " ++ (show lo)
frag <- gen
readUpTo (lo `mappend` frag)
| loadimpact/http2-test | hs-src/Rede/MainLoop/Framer.hs | bsd-3-clause | 2,730 | 2 | 16 | 1,019 | 509 | 267 | 242 | 47 | 3 |
{-# language PackageImports #-}
-- | This module re-exports <https://hackage.haskell.org/package/indentation-parsec/docs/Text-Parsec-Indentation.html Text.Parsec.Indentation> from <https://hackage.haskell.org/package/indentation-parsec indentation-parsec>.
module Text.Parsec.Indentation (module Impl) where
import "indentation-parsec" Text.Parsec.Indentation as Impl
| lambdageek/indentation | indentation/src/Text/Parsec/Indentation.hs | bsd-3-clause | 369 | 0 | 4 | 25 | 24 | 18 | 6 | 3 | 0 |
{-# LANGUAGE
EmptyDataDecls
, OverloadedStrings
, GeneralizedNewtypeDeriving
, FlexibleInstances
#-}
module Clay.Size
(
-- * Size type.
Size
, Abs
, Rel
-- * Size constructors.
, px
, pt
, em
, ex
, pct
-- * Shorthands for mutli size-valued properties.
, sym
, sym2
, sym3
-- * Angle type.
, Angle
, Deg
, Rad
-- * Constructing angles.
, deg
, rad
)
where
import Data.Monoid
import Data.Text (pack)
import Clay.Common
import Clay.Property
import Clay.Stylesheet
-------------------------------------------------------------------------------
-- | Sizes can be relative like percentages.
data Rel
-- | Sizes can be absolute like pixels, points, etc.
data Abs
newtype Size a = Size Value
deriving (Val, Auto, Normal, Inherit, None, Other)
-- | Size in pixels.
px :: Integer -> Size Abs
px i = Size (value (pack (show i) <> "px"))
-- | Size in points.
pt :: Double -> Size Abs
pt i = Size (value (pack (show i) <> "pt"))
-- | Size in em's.
em :: Double -> Size Abs
em i = Size (value (pack (show i) <> "em"))
-- | Size in ex'es.
ex :: Double -> Size Abs
ex i = Size (value (pack (show i) <> "ex"))
-- | Size in percentages.
pct :: Double -> Size Rel
pct i = Size (value (pack (show i) <> "%"))
instance Num (Size Abs) where
fromInteger = px
(+) = error "plus not implemented for Size"
(*) = error "times not implemented for Size"
abs = error "abs not implemented for Size"
signum = error "signum not implemented for Size"
instance Fractional (Size Abs) where
fromRational = em . fromRational
instance Num (Size Rel) where
fromInteger = pct . fromInteger
(+) = error "plus not implemented for Size"
(*) = error "times not implemented for Size"
abs = error "abs not implemented for Size"
signum = error "signum not implemented for Size"
instance Fractional (Size Rel) where
fromRational = pct . fromRational
-------------------------------------------------------------------------------
sym :: (Size a -> Size a -> Size a -> Size a -> Css) -> Size a -> Css
sym k a = k a a a a
sym3 :: (Size a -> Size a -> Size a -> Size a -> Css) -> Size a -> Size a -> Size a -> Css
sym3 k tb l r = k tb l tb r
sym2 :: (Size a -> Size a -> Size a -> Size a -> Css) -> Size a -> Size a -> Css
sym2 k tb lr = k tb lr tb lr
-------------------------------------------------------------------------------
data Deg
data Rad
newtype Angle a = Angle Value
deriving (Val, Auto, Inherit, Other)
-- | Angle in degrees.
deg :: Double -> Angle Deg
deg i = Angle (value (pack (show i) <> "deg"))
-- | Angle in radians.
rad :: Double -> Angle Rad
rad i = Angle (value (pack (show i) <> "rad"))
instance Num (Angle Deg) where
fromInteger = deg . fromInteger
(+) = error "plus not implemented for Angle"
(*) = error "times not implemented for Angle"
abs = error "abs not implemented for Angle"
signum = error "signum not implemented for Angle"
instance Fractional (Angle Deg) where
fromRational = deg . fromRational
instance Num (Angle Rad) where
fromInteger = rad . fromInteger
(+) = error "plus not implemented for Angle"
(*) = error "times not implemented for Angle"
abs = error "abs not implemented for Angle"
signum = error "signum not implemented for Angle"
instance Fractional (Angle Rad) where
fromRational = rad . fromRational
| bergmark/clay | src/Clay/Size.hs | bsd-3-clause | 3,373 | 0 | 12 | 757 | 1,044 | 550 | 494 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
-- | This module provides convenience functions for interfacing @tls@.
--
-- This module is intended to be imported @qualified@, e.g.:
--
-- @
-- import "Data.Connection"
-- import qualified "System.IO.Streams.TLS" as TLS
-- @
--
module System.IO.Streams.TLS
( TLSConnection
-- * client
, connect
, connectTLS
, tLsToConnection
-- * server
, accept
-- * re-export
, module Data.TLSSetting
) where
import qualified Control.Exception as E
import Data.Connection
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import Data.TLSSetting
import qualified Network.Socket as N
import Network.TLS (ClientParams, Context, ServerParams)
import qualified Network.TLS as TLS
import qualified System.IO.Streams as Stream
import qualified System.IO.Streams.TCP as TCP
-- | Type alias for tls connection.
--
-- Normally you shouldn't use 'TLS.Context' in 'connExtraInfo' directly.
--
type TLSConnection = Connection (TLS.Context, N.SockAddr)
-- | Make a 'Connection' from a 'Context'.
--
tLsToConnection :: (Context, N.SockAddr) -- ^ TLS connection / socket address pair
-> IO TLSConnection
tLsToConnection (ctx, addr) = do
is <- Stream.makeInputStream input
return (Connection is write (closeTLS ctx) (ctx, addr))
where
input = (do
s <- TLS.recvData ctx
return $! if B.null s then Nothing else Just s
) `E.catch` (\(_::E.SomeException) -> return Nothing)
write s = TLS.sendData ctx s
-- | Close a TLS 'Context' and its underlying socket.
--
closeTLS :: Context -> IO ()
closeTLS ctx = (TLS.bye ctx >> TLS.contextClose ctx) -- sometimes socket was closed before 'TLS.bye'
`E.catch` (\(_::E.SomeException) -> return ()) -- so we catch the 'Broken pipe' error here
-- | Convenience function for initiating an TLS connection to the given
-- @('HostName', 'PortNumber')@ combination.
--
-- This operation may throw 'TLS.TLSException' on failure.
--
connectTLS :: ClientParams -- ^ check "Data.TLSSetting"
-> Maybe String -- ^ Optional certificate subject name, if set to 'Nothing'
-- then we will try to verify 'HostName' as subject name
-> N.HostName -- ^ hostname to connect to
-> N.PortNumber -- ^ port number to connect to
-> IO (Context, N.SockAddr)
connectTLS prms subname host port = do
let subname' = maybe host id subname
prms' = prms { TLS.clientServerIdentification = (subname', BC.pack (show port)) }
(sock, addr) <- TCP.connectSocket host port
E.bracketOnError (TLS.contextNew sock prms') closeTLS $ \ ctx -> do
TLS.handshake ctx
return (ctx, addr)
-- | Connect to server using TLS and return a 'Connection'.
--
connect :: ClientParams -- ^ check "Data.TLSSetting"
-> Maybe String -- ^ Optional certificate subject name, if set to 'Nothing'
-- then we will try to verify 'HostName' as subject name
-> N.HostName -- ^ hostname to connect to
-> N.PortNumber -- ^ port number to connect to
-> IO TLSConnection
connect prms subname host port = connectTLS prms subname host port >>= tLsToConnection
-- | Accept a new TLS connection from remote client with listening socket.
--
-- This operation may throw 'TLS.TLSException' on failure.
--
accept :: ServerParams -- ^ check "Data.TLSSetting"
-> N.Socket -- ^ the listening 'Socket'
-> IO TLSConnection
accept prms sock = do
(sock', addr) <- N.accept sock
E.bracketOnError (TLS.contextNew sock' prms) closeTLS $ \ ctx -> do
TLS.handshake ctx
conn <- tLsToConnection (ctx, addr)
return conn
| didi-FP/tcp-streams | System/IO/Streams/TLS.hs | bsd-3-clause | 3,903 | 0 | 15 | 1,026 | 749 | 424 | 325 | 59 | 2 |
-- For Scotty
{-# LANGUAGE OverloadedStrings #-}
import Web.Scotty
import Network.Wai.Handler.Warp (defaultSettings, settingsPort, settingsHost, HostPreference (HostIPv4))
-- For Me
import Control.Monad.Trans (liftIO)
import Data.Text.Lazy (pack, unpack)
-- Project-Internal
-- TODO: Hookup internal modules
listenPort = 3000 -- Might need to change this later, depending on what's free on CSH's webserver's loopback
waiSettings = defaultSettings { settingsPort = fromInteger listenPort, settingsHost = HostIPv4 }
opts = Options {verbose = 1, settings = waiSettings} -- Glad I got xich to put this in scotty :-)
main = scottyOpts opts $ do
get "/" $ text "200: OK, Content not written yet"
-- TODO: Actually write these functions. This is just brainstorming.
-- get "/newsgroups/:newsgroup" $ do
-- newsgroup <- param "newsgroup"
-- posts <- liftIO $ getPostsFromNewsgroup $ unpack newsgroup
-- html $ pack $ renderNewsgroupPage newsgroup posts
get "/content/:handle" $ do
handle <- param "handle"
file $ "content/" ++ unpack handle
get "/themes/:handle" $ do
handle <- param "handle"
file $ "themes/" ++ unpack handle
| clockfort/mobile-webnews | webnews.hs | bsd-3-clause | 1,142 | 3 | 12 | 187 | 210 | 117 | 93 | 16 | 1 |
{-# LANGUAGE TemplateHaskell, ScopedTypeVariables #-}
-- |
-- This package provides a function to generate a choice operator
-- in lifted IO monad by specifying exceptions to be caught.
module Control.Exception.IOChoice.Lifted.TH (newIOChoice) where
import Control.Exception.Lifted
import Language.Haskell.TH
import Control.Exception.IOChoice.THUtil
-- |
-- A function to generate a choice operator in lifted IO monad.
-- 'IOException' is automatically added to specified exceptions.
-- So, 'Control.Exception.IOChoice.Lifted.goNext' can be used with
-- the new operator.
--
-- > {-# LANGUAGE TemplateHaskell #-}
-- > import Control.Exception
-- > import Control.Exception.IOChoice.Lifted.TH
-- >
-- > (||>>) :: MonadBaseControl IO m => m a -> m a -> m a
-- > (||>>) = $(newIOChoice [''ErrorCall, ''ArithException])
newIOChoice :: [Name] -> ExpQ
newIOChoice = newChoice [| catches |] [| Handler |]
| kazu-yamamoto/io-choice | Control/Exception/IOChoice/Lifted/TH.hs | bsd-3-clause | 905 | 0 | 6 | 133 | 77 | 57 | 20 | 7 | 1 |
{-# OPTIONS_GHC -F -pgmF tasty-discover -optF --modules="*Test.hs" -optF --debug #-}
| tittoassini/typed | test/Driver.hs | bsd-3-clause | 86 | 0 | 2 | 11 | 3 | 2 | 1 | 1 | 0 |
import Control.Concurrent (threadDelay)
import Control.Distributed.Process
import Control.Distributed.Process.Node
import Control.Monad (forever)
import Network.Transport.TCP (createTransport,
defaultTCPParameters)
replyBack :: (ProcessId, String) -> Process ()
replyBack (sender, msg) = send sender msg
logMessage :: String -> Process ()
logMessage msg = say $ "handling " ++ msg
main :: IO ()
main = do
Right t <- createTransport "127.0.0.1" "10501" defaultTCPParameters
node <- newLocalNode t initRemoteTable
_ <- forkProcess node $ do
-- Spawn another worker on the local node
echoPid <- spawnLocal $ forever $ do
-- Test our matches in order against each message in the queue
receiveWait [match logMessage, match replyBack]
-- The `say` function sends a message to a process registered as "logger".
-- By default, this process simply loops through its mailbox and sends
-- any received log message strings it finds to stderr.
say "send some messages!"
send echoPid "hello"
self <- getSelfPid
send echoPid (self, "hello")
-- `expectTimeout` waits for a message or times out after "delay"
m <- expectTimeout 1000000
case m of
-- Die immediately - throws a ProcessExitException with the given reason.
Nothing -> die "nothing came back!"
(Just s) -> say $ "got " ++ s ++ " back!"
return ()
-- A 1 second wait. Otherwise the main thread can terminate before
-- our messages reach the logging process or get flushed to stdio
liftIO $ threadDelay (1*1000000)
return ()
| igniting/cloud-haskell-example | src/SingleNode.hs | bsd-3-clause | 1,716 | 0 | 17 | 477 | 335 | 168 | 167 | 28 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Configuration where
import Data.Aeson
import Control.Applicative ((<$>),(<*>))
import Control.Monad (mzero)
import System.Directory (doesFileExist)
import qualified Data.ByteString.Lazy as B (readFile)
import System.Exit (exitFailure)
-- | Contains the configuratio values
data Configuration = Configuration {
-- | The CSS style sheet
style :: Maybe FilePath
-- | The port to serve the wiki on
, servePort :: Int
-- | The index page
, index :: String
}
instance FromJSON Configuration where
parseJSON (Object v) = Configuration <$>
v .:? "style" .!= style defaultConfiguration <*>
v .:? "port" .!= servePort defaultConfiguration <*>
v .:? "index" .!= index defaultConfiguration
parseJSON _ = mzero
-- | The default configuration
defaultConfiguration :: Configuration
defaultConfiguration = Configuration {
style = Nothing
, servePort = 8000
, index = "index"
}
-- | Load the configuration from a file
loadConfiguration :: FilePath -> IO Configuration
loadConfiguration path = do
let configPath = path ++ "simple-wiki.json"
exists <- doesFileExist configPath
if not exists
then return defaultConfiguration
else do
configFile <- B.readFile configPath
case decode configFile of
Just config -> return config
_ -> do
putStrLn "Couldn't parse configuration file"
exitFailure
| froozen/simple-wiki | src/Configuration.hs | bsd-3-clause | 1,510 | 0 | 15 | 386 | 321 | 174 | 147 | 36 | 3 |
module NaiveLensExamples where
-- http://blog.jakubarnold.cz/2014/07/14/lens-tutorial-introduction-part-1.html
data User = User { name :: String, age :: Int } deriving Show
data Project = Project { owner :: User, value :: Int } deriving Show
bob = User { name = "Bob", age = 30 }
project1 = Project { owner = bob, value = 1 }
alice = bob { name = "Alice" }
project2 = project1 { owner = alice, value = 2 }
data NaiveLens s a = NaiveLens
{ view :: s -> a
, set :: a -> s -> s
}
nameLens :: NaiveLens User String
nameLens = NaiveLens (\user -> name user) (\newName user -> user { name = newName })
ageLens :: NaiveLens User Int
ageLens = NaiveLens (\user -> age user) (\newAge user -> user { age = newAge })
robert = set nameLens "Robert" bob
changeOwnership :: NaiveLens Project User
changeOwnership =
NaiveLens (\project -> owner project) (\newOwner project -> project { owner = newOwner })
--changeOwnershipAgeSet :: NaiveLens Project Int
changeOwnershipAgeSet proj newAge = let
newOwner = set ageLens newAge (owner proj)
in set changeOwnership newOwner proj
-- changeOwnershipAgeLens :: NaiveLens Project Int
-- changeOwnershipAgeLens =
| peterbecich/haskell-programming-first-principles | src/NaiveLensExamples.hs | bsd-3-clause | 1,163 | 0 | 11 | 219 | 361 | 208 | 153 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.HPACK.Table.Entry (
-- * Type
Size
, Entry(..)
, Header -- re-exporting
, HeaderName -- re-exporting
, HeaderValue -- re-exporting
, Index -- re-exporting
-- * Header and Entry
, toEntry
, toEntryToken
-- * Getters
, entrySize
, entryTokenHeader
, entryToken
, entryHeaderName
, entryHeaderValue
-- * For initialization
, dummyEntry
, maxNumbers
) where
import qualified Data.ByteString as BS
import Network.HPACK.Token
import Network.HPACK.Types
----------------------------------------------------------------
-- | Size in bytes.
type Size = Int
-- | Type for table entry. Size includes the 32 bytes magic number.
data Entry = Entry Size Token HeaderValue deriving Show
----------------------------------------------------------------
headerSizeMagicNumber :: Size
headerSizeMagicNumber = 32
headerSize :: Header -> Size
headerSize (k,v) = BS.length k
+ BS.length v
+ headerSizeMagicNumber
headerSize' :: Token -> HeaderValue -> Size
headerSize' t v = BS.length (tokenFoldedKey t)
+ BS.length v
+ headerSizeMagicNumber
----------------------------------------------------------------
-- | From 'Header' to 'Entry'.
toEntry :: Header -> Entry
toEntry kv@(k,v) = Entry siz t v
where
t = toToken k
siz = headerSize kv
toEntryToken :: Token -> HeaderValue -> Entry
toEntryToken t v = Entry siz t v
where
siz = headerSize' t v
----------------------------------------------------------------
-- | Getting the size of 'Entry'.
entrySize :: Entry -> Size
entrySize (Entry siz _ _) = siz
-- | Getting 'TokenHeader'.
entryTokenHeader :: Entry -> TokenHeader
entryTokenHeader (Entry _ t v) = (t, v)
-- | Getting 'Token'.
entryToken :: Entry -> Token
entryToken (Entry _ t _) = t
-- | Getting 'HeaderName'.
entryHeaderName :: Entry -> HeaderName
entryHeaderName (Entry _ t _) = tokenFoldedKey t
-- | Getting 'HeaderValue'.
entryHeaderValue :: Entry -> HeaderValue
entryHeaderValue (Entry _ _ v) = v
----------------------------------------------------------------
-- | Dummy 'Entry' to initialize a dynamic table.
dummyEntry :: Entry
dummyEntry = Entry 0 tokenMax "dummyValue"
-- | How many entries can be stored in a dynamic table?
maxNumbers :: Size -> Int
maxNumbers siz = siz `div` headerSizeMagicNumber
| kazu-yamamoto/http2 | Network/HPACK/Table/Entry.hs | bsd-3-clause | 2,406 | 0 | 9 | 477 | 500 | 286 | 214 | 53 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.