code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
len = fromIntegral . length
rnd n x = (fromIntegral (floor (x * t))) / t
where t = 10^n
put = putStrLn . show
mean xs = sum xs / (len xs)
stddev xs = sqrt $ var xs
var xs = (/(n-1)) $ sum [(x - m)^2 | x <- xs]
where m = mean xs
n = len xs
covar xs ys = (/(n - 1)) . sum $ zipWith (*) [x - xm | x <- xs] [y - ym | y <- ys]
where xm = mean xs
ym = mean ys
n = len xs
correl xs ys = (/ (sdx * sdy)) $ covar xs ys
where sdx = stddev xs
sdy = stddev ys
linreg xs ys =
\x -> b0 + b1 * x
where r = correl xs ys
b1 = r * (stddev ys)/(stddev xs)
b0 = (mean ys) - b1*(mean xs)
main = do
let xs = [20,31,33,35,38,38,44]
ys = [430,580,570,550,660,690,650]
r = correl xs ys
put $ r^2
put $ (linreg xs ys) 20
let b1 = r * (stddev ys)/(stddev xs)
let b0 = (mean ys) - b1*(mean xs)
put $ b1
put $ b0
| wd0/-stats | correl.hs | mit | 903 | 0 | 13 | 318 | 564 | 294 | 270 | 31 | 1 |
module SimpSymb where
az = ['a'..'z']
data XPlus = '+' | '='
-- false x = ('+',x,'+',x,'+') | HaskellForCats/HaskellForCats | simpSymb.hs | mit | 96 | 2 | 6 | 21 | 26 | 16 | 10 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables, ViewPatterns #-}
{-|
Module : Labyrinth.AStar
Description : pathfinding
Copyright : (c) deweyvm 2014
License : MIT
Maintainer : deweyvm
Stability : experimental
Portability : unknown
Implementation of the A* (A star) pathfinding algorithm.
-}
module Labyrinth.Pathing.AStar(pfind) where
import Prelude hiding(elem, all)
import qualified Data.Map as Map
import qualified Data.PSQueue as Q
import qualified Data.Set as Set
import Labyrinth.Graph
import Labyrinth.Util
import Labyrinth.Pathing.Util
data Path b = Path (Set.Set b) -- closed set
(Map.Map b Float) -- g score
(Q.PSQ b Float) -- f score, open set
(Map.Map b b) -- path so far
b -- goal node
mkPath :: Ord a => Heuristic a -> a -> a -> Path a
mkPath l start goal = Path Set.empty
(Map.singleton start 0)
(Q.singleton start $ l start goal)
Map.empty
goal
pathHelper :: forall a b c.(Ord c, Graph a b c)
=> Heuristic c
-> a b
-> Path c
-> Either String [c]
pathHelper h graph (Path closedSet gs fsop path goal) =
case Q.minView fsop of
Just (current, newOpen) -> processCurrent (Q.key current) newOpen
Nothing -> Left "Found no path"
where processCurrent :: c -> Q.PSQ c Float -> Either String [c]
processCurrent currentNode open =
let newClosed = Set.insert currentNode closedSet in
if currentNode == goal
then Right $ rewindPath path goal []
else let ns = getNeighbors graph currentNode
(gs', fsop', path') = foldl (updatePath h goal currentNode newClosed) (gs, open, path) ns in
pathHelper h graph (Path newClosed gs' fsop' path' goal)
updatePath :: Ord a
=> Heuristic a
-> a
-> a
-> Set.Set a
-> (Map.Map a Float, Q.PSQ a Float, Map.Map a a)
-> (a, Float)
-> (Map.Map a Float, Q.PSQ a Float, Map.Map a a)
updatePath h goal current closed s@(gs, fs, p) (nnode, ncost) =
if Set.member nnode closed
then s
else case Map.lookup current gs of
Just g ->
let g' = g + ncost in
if g' < g || not (qMember nnode fs)
then let f = (g' + h nnode goal) in
let newPath = Map.insert nnode current p in
let newGs = Map.insert nnode g' gs in
let newFsop = Q.insert nnode f fs in
(newGs, newFsop, newPath)
else s
Nothing -> error "data structure inconsistent"
{- | Find a path from the start node to the goal node.
Given an admissible heuristic, this will also be a shortest path.-}
pfind :: (Ord c, Graph a b c)
=> Heuristic c -- ^ The pathfinding heuristic
-> a b -- ^ The graph to be traversed
-> c -- ^ The start node
-> c -- ^ The goal node
-> Either String [c] {- ^ Either a string explaining why a path could
not be found, or the found shortest path in
order from start to goal.-}
pfind h graph start goal = pathHelper h graph $ mkPath h start goal
| deweyvm/labyrinth | src/Labyrinth/Pathing/AStar.hs | mit | 3,410 | 0 | 23 | 1,258 | 920 | 477 | 443 | 66 | 4 |
module Cases.Prelude
(
module Exports,
(?:),
(|>),
(<|),
(|$>),
)
where
-- base
-------------------------
import Control.Applicative as Exports hiding (WrappedArrow(..))
import Control.Arrow as Exports hiding (first, second)
import Control.Category as Exports
import Control.Concurrent as Exports
import Control.Exception as Exports
import Control.Monad as Exports hiding (fail, mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Monad.IO.Class as Exports
import Control.Monad.Fail as Exports
import Control.Monad.Fix as Exports hiding (fix)
import Control.Monad.ST as Exports
import Data.Bifunctor as Exports
import Data.Bits as Exports
import Data.Bool as Exports
import Data.Char as Exports
import Data.Coerce as Exports
import Data.Complex as Exports
import Data.Data as Exports
import Data.Dynamic as Exports
import Data.Either as Exports
import Data.Fixed as Exports
import Data.Foldable as Exports hiding (toList)
import Data.Function as Exports hiding (id, (.))
import Data.Functor as Exports
import Data.Functor.Compose as Exports
import Data.Int as Exports
import Data.IORef as Exports
import Data.Ix as Exports
import Data.List as Exports hiding (sortOn, isSubsequenceOf, uncons, concat, foldr, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, find, maximumBy, minimumBy, mapAccumL, mapAccumR, foldl')
import Data.List.NonEmpty as Exports (NonEmpty(..))
import Data.Maybe as Exports
import Data.Monoid as Exports hiding (Alt, (<>))
import Data.Ord as Exports
import Data.Proxy as Exports
import Data.Ratio as Exports
import Data.Semigroup as Exports hiding (First(..), Last(..))
import Data.STRef as Exports
import Data.String as Exports
import Data.Traversable as Exports
import Data.Tuple as Exports
import Data.Unique as Exports
import Data.Version as Exports
import Data.Void as Exports
import Data.Word as Exports
import Debug.Trace as Exports
import Foreign.ForeignPtr as Exports
import Foreign.Ptr as Exports
import Foreign.StablePtr as Exports
import Foreign.Storable as Exports
import GHC.Conc as Exports hiding (orElse, withMVar, threadWaitWriteSTM, threadWaitWrite, threadWaitReadSTM, threadWaitRead)
import GHC.Exts as Exports (IsList(..), lazy, inline, sortWith, groupWith)
import GHC.Generics as Exports (Generic)
import GHC.IO.Exception as Exports
import GHC.OverloadedLabels as Exports
import Numeric as Exports
import Prelude as Exports hiding (Read, fail, concat, foldr, mapM_, sequence_, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, mapM, sequence, id, (.))
import System.Environment as Exports
import System.Exit as Exports
import System.IO as Exports (Handle, hClose)
import System.IO.Error as Exports
import System.IO.Unsafe as Exports
import System.Mem as Exports
import System.Mem.StableName as Exports
import System.Timeout as Exports
import Text.ParserCombinators.ReadP as Exports (ReadP, readP_to_S, readS_to_P)
import Text.ParserCombinators.ReadPrec as Exports (ReadPrec, readPrec_to_P, readP_to_Prec, readPrec_to_S, readS_to_Prec)
import Text.Printf as Exports (printf, hPrintf)
import Unsafe.Coerce as Exports
(?:) :: Maybe a -> a -> a
maybeA ?: b = fromMaybe b maybeA
{-# INLINE (?:) #-}
(|>) :: a -> (a -> b) -> b
a |> aToB = aToB a
{-# INLINE (|>) #-}
(<|) :: (a -> b) -> a -> b
aToB <| a = aToB a
{-# INLINE (<|) #-}
-- |
-- The following are all the same:
-- fmap f a == f <$> a == a |> fmap f == a |$> f
--
-- This operator accomodates the left-to-right operators: >>=, >>>, |>.
(|$>) = flip fmap
{-# INLINE (|$>) #-}
| nikita-volkov/cases | library/Cases/Prelude.hs | mit | 3,608 | 0 | 8 | 508 | 990 | 679 | 311 | 85 | 1 |
import BMSClipboard
import System.Environment
import Data.List
import Debug.Trace
import Data.Maybe
sortObject a b
| t1 < t2 = LT
| t1 > t2 = GT
| otherwise = compare c1 c2
where
t1 = row(a)
t2 = row(b)
c1 = rawChannel(a)
c2 = rawChannel(b)
movePlayableNotesToBGM ([], bgmNotes) = bgmNotes
movePlayableNotesToBGM ((noteToMove:otherPlayableNotes), bgmNotes) = let
availableBGMChannels = iterate (+ 1) 1
matching channel note = ((row note) == (row noteToMove)) && (isBGM note) && ((bgmChannelIndex note) == channel)
usable channel = isNothing $ find (matching channel) bgmNotes
Just channelIndexToMoveTo = find usable availableBGMChannels
movedNote = (setBGMChannel channelIndexToMoveTo) . (setLength 0) $ noteToMove
in
movePlayableNotesToBGM (otherPlayableNotes, (movedNote:bgmNotes))
bgmize objects = let
sortedObjects = sortBy sortObject objects
(bgmNotes, playableNotes) = partition isBGM sortedObjects
in
OK "Moved notes!" (movePlayableNotesToBGM (playableNotes, bgmNotes))
main = do
contents <- getContents
putStrLn $ processBMSClipboard bgmize contents
| bemusic/bms-clipboarder | src/BGMize.hs | mit | 1,142 | 0 | 14 | 222 | 390 | 199 | 191 | 28 | 1 |
module Http.Response
( Response (..)
, response
) where
import Text.ParserCombinators.Parsec hiding (token)
data Response = Response
{ statusLine :: StatusLine
, headers :: [(String,String)]
, body :: String
} deriving (Show)
data StatusLine = StatusLine
{ code :: Int
, reasonPhrase :: String
, version :: String
} deriving (Show)
statusLineDefaults :: StatusLine
statusLineDefaults = StatusLine
{ code=500
, reasonPhrase="Unable to parse response"
, version="HTTP/1.1"
}
responseDefaults :: Response
responseDefaults = Response
{ statusLine=statusLineDefaults
, headers=[]
, body=""
}
-- TODO: better name (contrast with request)
response :: String -> Response
response text = case parse parseResponse "Response" text of
Left err -> error $ "Parse error at " ++ show err
Right res -> res
crlf = string "\r\n"
ctls = "\r\n"
-- Any character (0-127), except for CTLs and separators
token = many (alphaNum <|> oneOf "!#$%&'*+-.^_`|~")
parseResponse :: Parser Response
parseResponse = do sl <- parseStatusLine
headers <- many parseHeader
crlf
body <- many anyChar
eof
return Response { statusLine = sl,
headers = headers,
body = body }
parseHeader :: Parser (String,String)
parseHeader = do name <- token
char ':'
spaces
val <- many1 $ noneOf ctls
crlf
return (name,val)
parseVersion :: Parser String
parseVersion = do string vPrefix
major <- many1 digit
char '.'
minor <- many1 digit
return $ vPrefix ++ major ++ "." ++ minor
where vPrefix = "HTTP/"
parseStatusLine :: Parser StatusLine
parseStatusLine = do version <- parseVersion
space
code <- count 3 digit
space
reason <- many $ noneOf "\r\n"
crlf
return StatusLine { code = read code :: Int,
reasonPhrase = reason,
version = version }
| ndreynolds/hsURL | src/Http/Response.hs | mit | 2,472 | 0 | 10 | 1,050 | 553 | 291 | 262 | 64 | 2 |
{-# LANGUAGE MagicHash, MultiParamTypeClasses, TypeFamilies, DataKinds, FlexibleContexts #-}
module JavaFX.Types where
import Java
data {-# CLASS "javafx.stage.Stage" #-} Stage = Stage (Object# Stage)
deriving Class
data {-# CLASS "javafx.scene.Scene" #-} Scene = Scene (Object# Scene)
deriving Class
data {-# CLASS "javafx.scene.Group" #-} Group = Group (Object# Group)
deriving Class
data {-# CLASS "javafx.scene.canvas.Canvas" #-} Canvas = Canvas (Object# Canvas)
deriving Class
data {-# CLASS "javafx.scene.canvas.Canvas[]" #-} Canvases = Canvases (Object# Canvases)
deriving Class
instance JArray Canvas Canvases
data {-# CLASS "javafx.scene.canvas.GraphicsContext" #-} GraphicsContext = GraphicsContext (Object# GraphicsContext)
deriving Class
data {-# CLASS "javafx.scene.Node" #-} Node = Node (Object# Node)
deriving Class
data {-# CLASS "javafx.scene.Node[]" #-} Nodes = Nodes (Object# Nodes)
deriving Class
instance JArray Node Nodes
data {-# CLASS "javafx.scene.Parent" #-} Parent = Parent (Object# Parent)
deriving Class
type instance Inherits Group = '[Parent]
type instance Inherits Canvas = '[Node]
| filippovitale/eta-playground | javafx-canvas-grid/src/JavaFX/Types.hs | mit | 1,166 | 0 | 8 | 193 | 229 | 132 | 97 | 25 | 0 |
module ProjectEuler.Problem004Spec (main, spec) where
import Test.Hspec
import ProjectEuler.Problem004
main :: IO ()
main = hspec spec
spec :: Spec
spec = parallel $
describe "solve" $
it "finds the largest palindrome product of two 3-digit numbers" $
solve 3 `shouldBe` 906609
| hachibu/project-euler | test/ProjectEuler/Problem004Spec.hs | mit | 293 | 0 | 9 | 57 | 79 | 43 | 36 | 10 | 1 |
{-# LANGUAGE TupleSections #-}
module Main where
import SparseMatrix
import Data.List (sortOn)
import qualified System.IO.Strict as Strict
import Data.Map.Strict (fromList, toList)
main :: IO ()
main = Strict.readFile "hackage.graph.hs" >>= mapM_ print
. (fmap . fmap) (*100) . take 20 . sortOn (negate . snd) . toList
. pageRank 0.85 2
. fromList . (read :: String -> [(String,[String])])
| Magnap/pagerank-hs | src/Main.hs | mit | 399 | 0 | 14 | 68 | 150 | 85 | 65 | 11 | 1 |
-- | Common helpers for the tests.
module TestCommon
( mkApp'
, key
) where
import API (serveApp)
import App (AppConfig(..), mkApp)
import Control.Monad.Logger (runStderrLoggingT, filterLogger, LogLevel(..))
import qualified Data.Map.Strict as M
import Mailgun.APIKey (APIKey(..))
import Network.Wai (Application)
key :: APIKey
key = APIKey "key-49487a3bbead016679ec337d1a99bcf1"
testConfig :: AppConfig
testConfig = AppConfig
{ _configMailgunApiKey = key
, _configConnectionString = ":memory:"
, _configConnections = 1
, _configSchedules = M.fromList []
, _configHttpPort = Nothing
, _configHttpsPort = Nothing
, _configCertificate = Nothing
}
mkApp' :: IO Application
mkApp' = runStderrLoggingT . filterLogger (const (> LevelDebug)) $
serveApp <$> mkApp testConfig
| fusionapp/catcher-in-the-rye | test/TestCommon.hs | mit | 845 | 0 | 11 | 176 | 205 | 126 | 79 | 23 | 1 |
-- | Functions for working with Jalaali calendar system.
-- This library mimics the API of "Data.Time.Calendar.Gregorian".
module Data.Time.Calendar.Jalaali
( isJalaaliLeapYear
, toJalaali
, fromJalaali
, fromJalaaliValid
, jalaaliMonthLength
, showJalaali
, addJalaaliMonthsClip
, addJalaaliMonthsRollOver
, addJalaaliYearsClip
, addJalaaliYearsRollOver
, addJalaaliDurationClip
, addJalaaliDurationRollOver
) where
import Data.List (foldl')
import Data.Time.Calendar
(Day, CalendarDiffDays(CalendarDiffDays), addDays, fromGregorian, toGregorian)
-- | Convert to Jalaali calendar. First element of result is year, second month
-- number (1-12), third day (1-31).
toJalaali :: Day -> (Integer, Int, Int)
toJalaali d = (toInteger jy, jm, jd)
where
(gy, gm, gd) = toGregorian d
(jy, jm, jd) = d2j $ g2d (fromInteger gy) gm gd
-- | Convert from Jalaali calendar. First argument is year, second month
-- number (1-12), third day (1-31). Invalid values will be clipped to the
-- correct range, month first, then day.
fromJalaali :: Integer -> Int -> Int -> Day
fromJalaali jy jm jd = fromGregorian (toInteger gy) gm gd
where
jmv = max 1 (min 12 jm)
jdv = max 1 (min 31 jd)
jdv2 = min jdv (jalaaliMonthLength jy jmv)
(gy, gm, gd) = d2g $ j2d (fromInteger jy) jmv jdv2
-- | Convert from Jalaali calendar. First argument is year, second month
-- number (1-12), third day (1-31). Invalid values will return Nothing.
fromJalaaliValid :: Integer -> Int -> Int -> Maybe Day
fromJalaaliValid jy jm jd
| jy < (-61) = Nothing
| jy > 3177 = Nothing
| jm < 1 = Nothing
| jm > 12 = Nothing
| jd < 1 = Nothing
| jd > jalaaliMonthLength jy jm = Nothing
| otherwise = Just $ fromJalaali jy jm jd
-- | Show in slash-separated format (yyyy/mm/dd).
showJalaali :: Day -> String
showJalaali d = show jy ++ "/" ++ zeroPad jm ++ "/" ++ zeroPad jd
where
(jy, jm, jd) = toJalaali d
zeroPad n = if n < 10 then "0" ++ show n else show n
-- | The number of days in a given month according to the Jalaali calendar.
-- First argument is year, second is month.
jalaaliMonthLength :: Integer -> Int -> Int
jalaaliMonthLength jy jm
| jm <= 6 = 31
| jm <= 11 = 30
| isJalaaliLeapYear jy = 30
| otherwise = 29
-- | Add months, with days past the last day of the month clipped to the last
-- day. For instance, 1400/05/31 + 7 months = 1400/12/29.
addJalaaliMonthsClip :: Integer -> Day -> Day
addJalaaliMonthsClip m d = fromJalaali jyn jmn jdn
where
(jy, jm, jd) = toJalaali d
jyn = toInteger (((fromInteger m) + jm - 1) `div` 12) + jy
jmn = (((fromInteger m) + jm - 1) `mod` 12) + 1
jdn = min jd (jalaaliMonthLength jyn jmn)
-- | Add months, with days past the last day of the month rolling over to the
-- next month. For instance, 1400/05/31 + 7 months = 1401/01/02.
addJalaaliMonthsRollOver :: Integer -> Day -> Day
addJalaaliMonthsRollOver m d =
addDays (toInteger (jd - jdn)) $ fromJalaali jyn jmn jdn
where
(jy, jm, jd) = toJalaali d
jyn = toInteger (((fromInteger m) + jm - 1) `div` 12) + jy
jmn = (((fromInteger m) + jm - 1) `mod` 12) + 1
jdn = min jd (jalaaliMonthLength jyn jmn)
-- | Add years, matching month and day, with Esfand 30th clipped to Esfand 29th
-- if necessary. For instance, 1399/12/30 + 1 year = 1400/12/29.
addJalaaliYearsClip :: Integer -> Day -> Day
addJalaaliYearsClip y d = fromJalaali jyn jmn jdn
where
(jy, jm, jd) = toJalaali d
jyn = y + jy
jmn = jm
jdn = min jd (jalaaliMonthLength jyn jmn)
-- | Add years, matching month and day, with Esfand 30th rolled over to
-- Farvardin 1st if necessary. For instance, 1399/12/30 + 1 year = 1401/01/01.
addJalaaliYearsRollOver :: Integer -> Day -> Day
addJalaaliYearsRollOver y d =
addDays (toInteger (jd - jdn)) $ fromJalaali jyn jmn jdn
where
(jy, jm, jd) = toJalaali d
jyn = y + jy
jmn = jm
jdn = min jd (jalaaliMonthLength jyn jmn)
-- | Add months (clipped to last day), then add days.
addJalaaliDurationClip :: CalendarDiffDays -> Day -> Day
addJalaaliDurationClip (CalendarDiffDays dm dd) d =
addDays dd $ addJalaaliMonthsClip dm d
-- | Add months (rolling over to next month), then add days.
addJalaaliDurationRollOver :: CalendarDiffDays -> Day -> Day
addJalaaliDurationRollOver (CalendarDiffDays dm dd) d =
addDays dd $ addJalaaliMonthsRollOver dm d
-- | Is this year a leap year according to the Jalaali calendar?
isJalaaliLeapYear :: Integer -> Bool
isJalaaliLeapYear jy = leap == 0
where
(leap, _, _) = jalCal (fromInteger jy)
----------------------------------------------
type JalaaliYear = Int
type JalaaliMonth = Int
type JalaaliDay = Int
type GregorianYear = Int
type GregorianMonth = Int
type GregorianDay = Int
type JulianDayNumber = Int
type DayInMarch = Int
type LeapOffset = Int
type JalaaliDate = (JalaaliYear, JalaaliMonth, JalaaliDay)
type GregorianDate = (GregorianYear, GregorianMonth, GregorianDay)
-- Jalaali years starting the 33-year rule.
breaks :: [JalaaliYear]
breaks = [ -61, 9, 38, 199, 426, 686, 756, 818, 1111, 1181, 1210
, 1635, 2060, 2097, 2192, 2262, 2324, 2394, 2456, 3178
]
{-
This function determines if the Jalaali (Persian) year is
leap (366-day long) or is the common year (365 days), and
finds the day in March (Gregorian calendar) of the first
day of the Jalaali year (jy).
@param jy Jalaali calendar year (-61 to 3177)
@return
leap: number of years since the last leap year (0 to 4)
gy: Gregorian year of the beginning of Jalaali year
march: the March day of Farvardin the 1st (1st day of jy)
@see: http://www.astro.uni.torun.pl/~kb/Papers/EMP/PersianC-EMP.htm
@see: http://www.fourmilab.ch/documents/calendar/
-}
jalCal :: JalaaliYear -> (LeapOffset, GregorianYear, DayInMarch)
jalCal jy
| jy < (-61) = error ("invalid Jalaali year " ++ show jy ++ ", should be >= -61")
| jy > 3177 = error ("invalid Jalaali year " ++ show jy ++ ", should be <= 3177")
| otherwise = (leap, gy, dayInMarch)
where
gy = jy + 621
quot4 = (`quot` 4)
quot33 = (`quot` 33)
mod33 = (`mod` 33)
(before, after) = break (jy <) breaks
n = jy - last before
years = before ++ if null after then [] else [head after]
jumps = zipWith (-) (drop 1 years) years
lastJump = last jumps
-- Find the limiting years for the Jalaali year jy.
leapJ' = foldl' (\acc jump -> acc + quot33 jump * 8 + quot4 (mod33 jump)) (-14) (init jumps)
-- Find the number of leap years from AD 621 to the beginning
-- of the current Jalaali year in the Persian calendar.
leapJ'' = leapJ' + quot33 n * 8 + quot4 (mod33 n + 3)
leapJ = leapJ'' + if mod33 lastJump == 4 && (lastJump - n) == 4 then 1 else 0
-- And the same in the Gregorian calendar (until the year gy).
leapG = quot4 gy - quot4 (((gy `quot` 100) + 1) * 3) - 150
-- Determine the Gregorian date of Farvardin the 1st.
dayInMarch = 20 + leapJ - leapG
-- Find how many years have passed since the last leap year.
n' = n + if lastJump - n < 6 then quot33 (lastJump + 4) * 33 - lastJump else 0
leap' = (mod33 (n' + 1) - 1) `mod` 4
leap = if leap' == -1 then 4 else leap'
{-
Converts a date of the Jalaali calendar to the Julian Day number.
@param jy Jalaali year (1 to 3100)
@param jm Jalaali month (1 to 12)
@param jd Jalaali day (1 to 29/31)
@return Julian Day number
-}
j2d :: JalaaliYear -> JalaaliMonth -> JalaaliDay -> JulianDayNumber
j2d jy jm jd = jdn + (jm - 1) * 31 - (jm `quot` 7) * (jm - 7) + jd - 1
where
(_, gy, dayInMarch) = jalCal jy
jdn = g2d gy 3 dayInMarch
{-
Converts the Julian Day number to a date in the Jalaali calendar.
@param jdn Julian Day number
@return
jy: Jalaali year (1 to 3100)
jm: Jalaali month (1 to 12)
jd: Jalaali day (1 to 29/31)
-}
d2j :: JulianDayNumber -> JalaaliDate
d2j jdn = (jy, jm, jd)
where
(gy, _, _) = d2g jdn
jy' = gy - 621
(leap, _, dayInMarch) = jalCal jy'
jdn1f = g2d gy 3 dayInMarch
k' = jdn - jdn1f
k | k' >= 0 && k' <= 185 = k'
| k' >= 0 = k' - 186
| otherwise = k' + 179 + if leap == 1 then 1 else 0
jy = jy' - if k' < 0 then 1 else 0 -- Previous Jalaali year.
jm = if k' >= 0 && k' <= 185
then 1 + (k `quot` 31)
else 7 + (k `quot` 30)
jd = (+) 1 (mod k (if k' >= 0 && k' <= 185 then 31 else 30))
{-
Calculates the Julian Day number from Gregorian or Julian
calendar dates. This integer number corresponds to the noon of
the date (i.e. 12 hours of Universal Time).
The procedure was tested to be good since 1 March, -100100 (of both
calendars) up to a few million years into the future.
@param gy Calendar year (years BC numbered 0, -1, -2, ...)
@param gm Calendar month (1 to 12)
@param gd Calendar day of the month (1 to 28/29/30/31)
@return Julian Day number
-}
g2d :: GregorianYear -> GregorianMonth -> GregorianDay -> JulianDayNumber
g2d gy gm gd =
d - ((((gy + 100100 + ((gm - 8) `quot` 6)) `quot` 100) * 3) `quot` 4) + 752
where
d = ((gy + ((gm - 8) `quot` 6) + 100100) * 1461) `quot` 4 +
(153 * ((gm + 9) `mod` 12) + 2) `quot` 5 +
gd - 34840408
{-
Calculates Gregorian and Julian calendar dates from the Julian Day number
(jdn) for the period since jdn=-34839655 (i.e. the year -100100 of both
calendars) to some millions years ahead of the present.
@param jdn Julian Day number
@return
gy: Calendar year (years BC numbered 0, -1, -2, ...)
gm: Calendar month (1 to 12)
gd: Calendar day of the month M (1 to 28/29/30/31)
-}
d2g :: JulianDayNumber -> GregorianDate
d2g jdn = (gy, gm, gd)
where
j' = 4 * jdn + 139361631
j = j' + ((((4 * jdn + 183187720) `quot` 146097) * 3) `quot` 4) * 4 - 3908
i = ((j `mod` 1461) `quot` 4) * 5 + 308
gd = ((i `mod` 153) `quot` 5) + 1
gm = ((i `quot` 153) `mod` 12) + 1
gy = (j `quot` 1461) - 100100 + ((8 - gm) `quot` 6)
| jalaali/jalaali-hs | src/Data/Time/Calendar/Jalaali.hs | mit | 10,023 | 0 | 20 | 2,401 | 2,713 | 1,503 | 1,210 | 149 | 5 |
import RecursiveContents (getRecursiveContents)
simpleFind :: (FilePath -> Bool) -> FilePath -> IO [FilePath]
simpleFind p path = do
names <- getRecursiveContents path
return (filter p names)
| zhangjiji/real-world-haskell | ch9/SimpleFinder.hs | mit | 197 | 0 | 9 | 31 | 69 | 34 | 35 | 5 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
-- |
-- Module : Network.OAuth.MuLens
-- Copyright : (c) Joseph Abrahamson 2013
-- License : MIT
--
-- Maintainer : me@jspha.com
-- Stability : experimental
-- Portability : non-portable
--
-- Tiny, @Control.Lens@ compatibility layer.
module Network.OAuth.MuLens (
-- * Basics
view, set,
-- * Generalizations
over, foldMapOf,
-- * Building
(<&>), (&), (^.), (.~), (%~),
) where
import Data.Functor.Identity
import Data.Functor.Constant
view :: ((a -> Constant a a) -> s -> Constant a s) -> s -> a
view inj = foldMapOf inj id
{-# INLINE view #-}
over :: ((a -> Identity b) -> s -> Identity t) -> (a -> b) -> s -> t
over inj f = runIdentity . inj (Identity . f)
{-# INLINE over #-}
set :: ((a -> Identity b) -> s -> Identity t) -> b -> s -> t
set l = over l . const
{-# INLINE set #-}
foldMapOf :: ((a -> Constant r b) -> s -> Constant r t) -> (a -> r) -> s -> r
foldMapOf inj f = getConstant . inj (Constant . f)
{-# INLINE foldMapOf #-}
infixl 5 <&>
(<&>) :: Functor f => f a -> (a -> b) -> f b
(<&>) = flip (<$>)
{-# INLINE (<&>) #-}
infixl 1 &
(&) :: b -> (b -> c) -> c
(&) = flip ($)
{-# INLINE (&) #-}
infixl 8 ^.
(^.) :: s -> ((a -> Constant a a) -> s -> Constant a s) -> a
(^.) = flip view
{-# INLINE (^.) #-}
infixr 4 .~
(.~) :: ((a -> Identity b) -> s -> Identity t) -> b -> s -> t
(.~) = set
{-# INLINE (.~) #-}
infixr 4 %~
(%~) :: ((a -> Identity b) -> s -> Identity t) -> (a -> b) -> s -> t
(%~) = over
{-# INLINE (%~) #-}
| tel/oauthenticated | src/Network/OAuth/MuLens.hs | mit | 1,573 | 0 | 11 | 395 | 607 | 349 | 258 | 40 | 1 |
-- If the numbers 1 to 5 are written out in words: one, two, three,
-- four, five, then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in
-- total.
-- If all the numbers from 1 to 1000 (one thousand) inclusive were
-- written out in words, how many letters would be used?
-- NOTE: Do not count spaces or hyphens. For example, 342 (three
-- hundred and forty-two) contains 23 letters and 115 (one hundred and
-- fifteen) contains 20 letters. The use of "and" when writing out
-- numbers is in compliance with British usage.
module Euler.Problem017
( solution
, spell
) where
import Data.Char (isLetter)
import Data.List (intercalate)
solution :: Int -> Integer
solution ceil = sum . map (toInteger . length . filter isLetter . spell) $ [1..ceil]
spell :: Int -> String
spell = intercalate " " . digits
digits :: Int -> [String]
digits n
| n < 1 = []
| n < 20 = [smallNumbers n]
| n < 100 = [tensAndOnes $ n `divMod` 10]
| n < 1000 = hundredsPlus $ n `divMod` 100
| otherwise = [smallNumbers 1, scaleNumbers 1]
hundredsPlus :: (Int, Int) -> [String]
hundredsPlus (h, 0) = hundreds h
hundredsPlus (h, t) = hundreds h ++ ["and"] ++ digits t
hundreds :: Int -> [String]
hundreds h = [smallNumbers h, scaleNumbers 0]
tensAndOnes :: (Int, Int) -> String
tensAndOnes (t, 0) = tens t
tensAndOnes (t, o) = tens t ++ "-" ++ smallNumbers o
smallNumbers :: Int -> String
smallNumbers = (!!) [ "zero"
, "one"
, "two"
, "three"
, "four"
, "five"
, "six"
, "seven"
, "eight"
, "nine"
, "ten"
, "eleven"
, "twelve"
, "thirteen"
, "fourteen"
, "fifteen"
, "sixteen"
, "seventeen"
, "eighteen"
, "nineteen"
]
tens :: Int -> String
tens = (!!) [ ""
, ""
, "twenty"
, "thirty"
, "forty"
, "fifty"
, "sixty"
, "seventy"
, "eighty"
, "ninety"
]
scaleNumbers :: Int -> String
scaleNumbers = (!!) [ "hundred"
, "thousand"
]
| whittle/euler | src/Euler/Problem017.hs | mit | 2,402 | 0 | 11 | 952 | 547 | 310 | 237 | 59 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-|
Copyright : (c) 2016 Dominik Schoop
License : GPL v3 (see LICENSE)
Maintainer : Dominik Schoop dominik.schoop@hs-esslingen.de
Portability : GHC only
Conversion of the graph part of a sequent (constraint system) to a JSON graph format
ADVISE:
- DO NOT USE sequentToJSONPretty IN OPERATIONAL MODE.
Remember to replace sequentToJSONPretty with sequentToJSON in getTheoryGraphR in Handler.hs.
- The time consuming pretty printing of facts and terms can be disabeled by a parameter of
sequentToJSONPretty and sequentToJSON. Currently, everything is pretty with sequentToJSONPretty
and nothing is pretty with sequentToJSON.
TO DO:
- generate JSON in non-interactive mode
- make it work for observational equivalence
- encode historic information in the graph: sequence of nodes added
OPEN PROBLEMS:
- JSON encodePretty converts < and > to "\u003c" and "\u003e"
ghastly postprocessing has been applied to sequentToJSONPretty
-}
module Theory.Constraint.System.JSON (
sequentToJSON,
writeSequentAsJSONToFile,
sequentToJSONPretty,
writeSequentAsJSONPrettyToFile
) where
import Extension.Data.Label as L (get)
import Data.Aeson
import Data.Aeson.TH
import Data.Aeson.Encode.Pretty -- to do pretty printing of JSON
import Data.Foldable
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Set as S
import qualified Data.ByteString.Lazy.Char8 as BC (unpack)
import Text.PrettyPrint.Class -- for Doc and the pretty printing functions
import Theory.Constraint.System
import Theory.Model
-------------------------------------------------------------------------------------------------
-- Data structure for JSON graphs --
-- adapted from https://github.com/jsongraph/json-graph-specification --
-------------------------------------------------------------------------------------------------
-- | Representation of a term in a JSON graph node.
data JSONGraphNodeTerm =
Const String
| Funct String [JSONGraphNodeTerm] String
deriving (Show)
-- | Automatically derived instances have unnecessarily many tag-value pairs.
-- Hence, we have our own here.
instance FromJSON JSONGraphNodeTerm where
parseJSON = withObject "JSONGraphNodeTerm" $ \o -> asum [
Const <$> o .: "jgnConst",
Funct <$> o .: "jgnFunct" <*> o .: "jgnParams" <*> o .: "jgnShow" ]
instance ToJSON JSONGraphNodeTerm where
toJSON (Const s) = object [ "jgnConst" .= s ]
toJSON (Funct f p s) = object
[ "jgnFunct" .= f
, "jgnParams" .= toJSON p
, "jgnShow" .= s
]
-- | Representation of a fact in a JSON graph node.
data JSONGraphNodeFact = JSONGraphNodeFact
{
jgnFactId :: String
, jgnFactTag :: String -- ^ ProtoFact, FreshFact, OutFact, InFact, KUFact, KDFact, DedFact
, jgnFactName :: String -- ^ Fr, Out, In, !KU, ...
, jgnFactMult :: String -- ^ "!" = persistent, "" = linear
, jgnFactTerms :: [JSONGraphNodeTerm]
, jgnFactShow :: String
} deriving (Show)
-- | Representation of meta data of a JSON graph node.
data JSONGraphNodeMetadata = JSONGraphNodeMetadata
{
jgnPrems :: [JSONGraphNodeFact]
, jgnActs :: [JSONGraphNodeFact]
, jgnConcs :: [JSONGraphNodeFact]
} deriving (Show)
-- | Representation of a node of a JSON graph.
data JSONGraphNode = JSONGraphNode
{
jgnId :: String
, jgnType :: String
, jgnLabel :: String
, jgnMetadata :: Maybe JSONGraphNodeMetadata
} deriving (Show)
-- | Optional fields are not handled correctly with automatically derived instances
-- hence, we have our own here.
instance FromJSON JSONGraphNode where
parseJSON = withObject "JSONGraphNode" $ \o -> JSONGraphNode
<$> o .: "jgnId"
<*> o .: "jgnType"
<*> o .: "jgnLabel"
<*> o .:? "jgnMetadata"
instance ToJSON JSONGraphNode where
toJSON (JSONGraphNode jgnId jgnType jgnLabel jgnMetadata) = object $ catMaybes
[ ("jgnId" .=) <$> pure jgnId
, ("jgnType" .=) <$> pure jgnType
, ("jgnLabel" .=) <$> pure jgnLabel
, ("jgnMetadata" .=) <$> jgnMetadata ]
-- | Representation of an edge of a JSON graph.
data JSONGraphEdge = JSONGraphEdge
{
jgeSource :: String
, jgeRelation :: String
, jgeTarget :: String
-- , jgeDirected :: Maybe Bool
-- , jgeLabel :: Maybe String
} deriving (Show)
-- | Representation of a JSON graph.
data JSONGraph = JSONGraph
{
jgDirected :: Bool
, jgType :: String
, jgLabel :: String
, jgNodes :: [JSONGraphNode]
, jgEdges :: [JSONGraphEdge]
-- , jgmetadata :: JSONGraphMetadata
} deriving (Show)
-- | Representation of a collection of JSON graphs.
data JSONGraphs = JSONGraphs
{
graphs :: [JSONGraph]
} deriving (Show)
-- | Derive ToJSON and FromJSON.
concat <$> mapM (deriveJSON defaultOptions) [''JSONGraphNodeFact, ''JSONGraphNodeMetadata, ''JSONGraphEdge, ''JSONGraph, ''JSONGraphs]
-- | Generation of JSON text from JSON graphs.
-- | Flatten out pretty printed facts from prettyLNFact etc.
cleanString :: [Char] -> [Char]
cleanString [] = []
cleanString (' ':'\n':' ':xs) = cleanString (' ':xs)
cleanString ('\n':xs) = cleanString xs
cleanString (' ':' ':xs) = cleanString (' ':xs)
cleanString (c:xs) = (c:cleanString xs)
-- | Convert output of pretty print functions to string.
pps :: Doc -> String
pps d = cleanString $ render d
-- | EncodePretty encodes '<' as "\u003c" and '>' as "\u003e".
-- This function replaces these characters.
removePseudoUnicode :: [Char] -> [Char]
removePseudoUnicode [] = []
removePseudoUnicode ('\\':'u':'0':'0':'3':'c':xs) = ('<':removePseudoUnicode xs)
removePseudoUnicode ('\\':'u':'0':'0':'3':'e':xs) = ('>':removePseudoUnicode xs)
removePseudoUnicode (x:xs) = (x:removePseudoUnicode xs)
-- | Remove " from start and end of string.
plainstring :: String -> String
plainstring ('\"':s) = reverse $ plainstring $ reverse s
plainstring s = s
-- | Determine the type of rule for a JSON node.
getRuleType :: HasRuleName r => r -> String
getRuleType r
| isIntruderRule r = "isIntruderRule"
| isDestrRule r = "isDestrRule"
| isIEqualityRule r = "isIEqualityRule"
| isConstrRule r = "isConstrRule"
| isPubConstrRule r = "isPubConstrRule"
| isFreshRule r = "isFreshRule"
| isIRecvRule r = "isIRecvRule"
| isISendRule r = "isISendRule"
| isCoerceRule r = "isCoerceRule"
| isProtocolRule r = "isProtocolRule"
| otherwise = "unknown rule type"
-- | Generate the JSON data structure from a term.
-- | "instance Show a" in Raw.hs served as example.
lntermToJSONGraphNodeTerm :: Bool -> LNTerm -> JSONGraphNodeTerm
lntermToJSONGraphNodeTerm pretty t =
case viewTerm t of
Lit l -> Const (show l)
FApp (NoEq (s,_)) []
-> Funct (plainstring $ show s) [] res
FApp (NoEq (s,_)) as
-> Funct (plainstring $ show s) (map (lntermToJSONGraphNodeTerm pretty) as) res
FApp (AC o) as
-> Funct (show o) (map (lntermToJSONGraphNodeTerm pretty) as) res
_ -> Const ("unknown term type: " ++ show t)
where
res = case pretty of
True -> show t
False -> ""
-- | Generate the JSON data structure for items such as facts and actions.
itemToJSONGraphNodeFact :: Bool -> String -> LNFact -> JSONGraphNodeFact
itemToJSONGraphNodeFact pretty id f =
JSONGraphNodeFact { jgnFactId = id
, jgnFactTag = case isProtoFact f of
True -> "ProtoFact"
False -> show (factTag f)
, jgnFactName = showFactTag $ factTag f
, jgnFactMult = case factTagMultiplicity $ factTag f of
Linear -> ""
Persistent -> "!"
, jgnFactTerms = map (lntermToJSONGraphNodeTerm pretty) (factTerms f)
, jgnFactShow = case pretty of
True -> pps $ prettyLNFact f
False -> ""
}
{-|
Generate JSON data structure for facts in premises and conclusion of rules.
Since facts are ordered in the premises and conclusions, the ordering number as well as a prefix
("p" (premise) and "c" (conclusion)) are given to the function.
-}
factToJSONGraphNodeFact :: Bool -> String -> NodeId -> (Int,LNFact) -> JSONGraphNodeFact
factToJSONGraphNodeFact pretty prefix n (idx, f) =
itemToJSONGraphNodeFact pretty (show n ++ ":" ++ prefix ++ show idx) f
-- | Generate JSONGraphNode from node of sequent (metadata part).
-- Facts and actions as are represented as metadata to keep close to the original JSON graph schema.
nodeToJSONGraphNodeMetadata :: Bool -> (NodeId, RuleACInst) -> JSONGraphNodeMetadata
nodeToJSONGraphNodeMetadata pretty (n, ru) =
JSONGraphNodeMetadata { jgnPrems = map (factToJSONGraphNodeFact pretty "p" n)
$ zip [0..] $ L.get rPrems ru
, jgnActs = map (itemToJSONGraphNodeFact pretty "action") $ L.get rActs ru
, jgnConcs = map (factToJSONGraphNodeFact pretty "c" n)
$ zip [0..] $ L.get rConcs ru
}
-- | Generate JSONGraphNode from node of sequent.
nodeToJSONGraphNode :: Bool -> (NodeId, RuleACInst) -> JSONGraphNode
nodeToJSONGraphNode pretty (n, ru) =
JSONGraphNode { jgnId = show n
, jgnType = getRuleType ru
, jgnLabel = getRuleName ru
, jgnMetadata = Just (nodeToJSONGraphNodeMetadata pretty (n, ru))
}
-- | Determine the type of an edge.
getRelationType :: NodeConc -> NodePrem -> System -> String
getRelationType src tgt se =
let check p = maybe False p (resolveNodePremFact tgt se) ||
maybe False p (resolveNodeConcFact src se)
relationType | check isKFact = "KFact"
| check isPersistentFact = "PersistentFact"
| check isProtoFact = "ProtoFact"
| otherwise = "default"
in
relationType
-- | Generate JSON data structure for lastAtom.
lastAtomToJSONGraphNode :: Maybe NodeId -> [JSONGraphNode]
lastAtomToJSONGraphNode n = case n of
Nothing -> []
Just n -> [JSONGraphNode { jgnId = show n
, jgnType = "lastAtom"
, jgnLabel = show n
, jgnMetadata = Nothing
}]
-- | Generate JSON data structure for unsolvedActionAtom.
unsolvedActionAtomsToJSONGraphNode :: Bool -> (NodeId, LNFact) -> JSONGraphNode
unsolvedActionAtomsToJSONGraphNode pretty (n, f) =
JSONGraphNode
{ jgnId = show n
, jgnType = "unsolvedActionAtom"
, jgnLabel = case pretty of
True -> pps $ prettyLNFact f
False -> ""
, jgnMetadata =
Just JSONGraphNodeMetadata
{ jgnPrems = []
, jgnActs = [itemToJSONGraphNodeFact pretty "action" f]
, jgnConcs = []
}
}
{-|
Generate a JSONGraphNode for those nodes in sEdges that are not present in sNodes.
This might occur in the case distinctions shown in the GUI.
Since a fact is missing, the id is encoded as jgnFactId, could also be done directly in jgnId.
-}
missingNodesToJSONGraphNodes :: System -> [Edge] -> [JSONGraphNode]
missingNodesToJSONGraphNodes _ [] = []
missingNodesToJSONGraphNodes se ((Edge (sid, _) (tid, _)):el)
| notElem sid nodelist =
(JSONGraphNode
{ jgnId = show sid
, jgnType = "missingNodeConc"
, jgnLabel = ""
, jgnMetadata =
Just JSONGraphNodeMetadata
{ jgnPrems = []
, jgnActs = []
, jgnConcs =
[ JSONGraphNodeFact
{ jgnFactId = show sid ++":c0"
, jgnFactTag = ""
, jgnFactName = ""
, jgnFactMult = ""
, jgnFactTerms = []
, jgnFactShow = ""
}
]
}
}: missingNodesToJSONGraphNodes se el)
| notElem tid nodelist =
(JSONGraphNode
{ jgnId = show tid
, jgnType = "missingNodePrem"
, jgnLabel = ""
, jgnMetadata =
Just JSONGraphNodeMetadata
{ jgnPrems =
[ JSONGraphNodeFact
{ jgnFactId = show tid ++":p0"
, jgnFactTag = ""
, jgnFactName = ""
, jgnFactMult = ""
, jgnFactTerms = []
, jgnFactShow = ""
}
]
, jgnActs = []
, jgnConcs = []
}
}: missingNodesToJSONGraphNodes se el)
| otherwise = (missingNodesToJSONGraphNodes se el)
where
nodelist = map fst $ M.toList $ L.get sNodes se
-- | Generate JSON data structure for edges.
edgeToJSONGraphEdge :: System -> Edge -> JSONGraphEdge
edgeToJSONGraphEdge se (Edge src tgt) =
JSONGraphEdge { jgeSource = show sid ++ ":c" ++ show concidx
, jgeTarget = show tid ++ ":p" ++ show premidx
, jgeRelation = getRelationType src tgt se
}
where
(sid, ConcIdx concidx) = src
(tid, PremIdx premidx) = tgt
-- | Generate JSON data structure for lessAtoms edge.
lessAtomsToJSONGraphEdge :: (NodeId, NodeId) -> JSONGraphEdge
lessAtomsToJSONGraphEdge (src, tgt) =
JSONGraphEdge { jgeSource = show src
, jgeRelation = "LessAtoms"
, jgeTarget = show tgt
}
-- | Generate JSON data structure for unsolvedChain edge.
unsolvedchainToJSONGraphEdge :: (NodeConc, NodePrem) -> JSONGraphEdge
unsolvedchainToJSONGraphEdge (src, tgt) =
JSONGraphEdge { jgeSource = show sid ++ ":c" ++ show concidx
, jgeTarget = show tid ++ ":p" ++ show premidx
, jgeRelation = "unsolvedChain"
}
where
(sid, ConcIdx concidx) = src
(tid, PremIdx premidx) = tgt
-- | Generate JSON graph(s) data structure from sequent.
sequentToJSONGraphs :: Bool -- ^ determines whether facts etc are also pretty printed
-> String -- ^ label of graph
-> System -- ^ sequent to dump to JSON
-> JSONGraphs
sequentToJSONGraphs pretty label se =
JSONGraphs
{ graphs =
[ JSONGraph
{ jgDirected = True
, jgType = "Tamarin prover constraint system"
, jgLabel = label
, jgNodes = (map (nodeToJSONGraphNode pretty) $ M.toList $ L.get sNodes se)
++ (lastAtomToJSONGraphNode $ L.get sLastAtom se)
++ (map (unsolvedActionAtomsToJSONGraphNode pretty) $ unsolvedActionAtoms se)
++ (missingNodesToJSONGraphNodes se $ S.toList $ L.get sEdges se)
, jgEdges = (map (edgeToJSONGraphEdge se) $ S.toList $ L.get sEdges se)
++ (map lessAtomsToJSONGraphEdge $ S.toList $ L.get sLessAtoms se)
++ (map unsolvedchainToJSONGraphEdge $ unsolvedChains se)
}
]
}
-- | Generate JSON bytestring from sequent.
sequentToJSON :: String -> System -> String
sequentToJSON l se =
BC.unpack $ encode (sequentToJSONGraphs False l se)
-- | NOTE (dschoop): encodePretty encodes < and > as "\u003c" and "\u003e" respectively.
-- The encoding is removed with function removePseudoUnicode since Data.Strings.Util is non-standard.
-- The function encodePretty returns Data.ByteString.Lazy.Internal.ByteString containing
-- 8-bit bytes. However, eventually some other ByteString or String is expected by writeFile
-- in /src/Web/Theory.hs.
sequentToJSONPretty :: String -> System -> String
sequentToJSONPretty l se =
removePseudoUnicode $ BC.unpack $ encodePretty $ sequentToJSONGraphs True l se
writeSequentAsJSONToFile :: FilePath -> String -> System -> IO ()
writeSequentAsJSONToFile fp l se =
do writeFile fp $ sequentToJSON l se
writeSequentAsJSONPrettyToFile :: FilePath -> String -> System -> IO ()
writeSequentAsJSONPrettyToFile fp l se =
do writeFile fp $ sequentToJSONPretty l se
| samscott89/tamarin-prover | lib/theory/src/Theory/Constraint/System/JSON.hs | gpl-3.0 | 17,300 | 3 | 18 | 5,537 | 3,347 | 1,812 | 1,535 | 272 | 6 |
module Main where
import Control.Applicative ((<$>), (<|>))
import Control.Monad(replicateM)
import qualified Data.HashMap.Strict as Map
import Data.List (find)
import Data.Maybe (isJust, isNothing)
import Data.Monoid ((<>))
import qualified Data.Set as Set
import Prelude hiding (Either (..), getChar, putStr,
putStrLn)
import SneakyBeaky.Coord
import SneakyBeaky.Generation
import SneakyBeaky.Lifted
import SneakyBeaky.List
import SneakyBeaky.Rect
import SneakyBeaky.Terminal
import SneakyBeaky.TileTypes
data World = World {
wHero :: !Coord
, wObstacles :: ![ObstacleTile]
, wEnemies :: ![Enemy]
, wExit :: !Coord
, wLightSources :: ![LightSource]
, wViewport :: !Rect
}
data Input = Up
| Down
| Left
| Right
| Stand
| Exit
| UpLeft
| UpRight
| DownLeft
| DownRight
deriving (Eq)
lightTiles :: LightSource -> CoordSet
lightTiles ls = let (cx,cy) = lsPosition ls
r = lsRadius ls
ts = [(x,y) | x <- [cx-r..cx+r],y <- [cy-r..cy+r]]
in Set.fromList $ filter (\(x,y) -> (x-cx)*(x-cx) + (cy-y)*(cy-y) <= r) ts
lightTilesUnion :: [LightSource] -> CoordSet
lightTilesUnion = Set.unions . map lightTiles
gameTitle :: String
gameTitle = "sneakybeaky"
initialWorld :: [ObstacleTile] -> [Enemy] -> [LightSource] -> Coord -> Rect -> World
initialWorld obstacles enemies lightSources exit viewport = World {
wHero = (0,0)
, wObstacles = obstacles
, wExit = exit
, wLightSources = lightSources
, wViewport = viewport
, wEnemies = enemies
}
main :: IO ()
main = do
let viewport = mkRectPosDim (0,0) (80,25)
run viewport $ do
obstacles <- evalRandIO $ generateObstacles viewport
let obstaclePositions = Set.fromList ((tPosition . oTile) <$> obstacles)
enemies <- evalRandIO $ replicateM 20 (generateEnemy viewport obstaclePositions)
lightSources <- evalRandIO (replicateM 12 (generateLightSource viewport obstaclePositions))
(start,exit) <- evalRandIO (generateStartAndExit viewport obstaclePositions)
let world = World {
wHero = start
, wObstacles = obstacles
, wExit = exit
, wLightSources = lightSources
, wViewport = viewport
, wEnemies = enemies
}
gameLoop world
insideLight :: Coord -> LightSource -> Bool
insideLight (x,y) (LightSource (lx,ly) r) = (x-lx)*(x-lx) + (y-ly)*(y-ly) <= r
viewObstructed :: CoordSet -> Coord -> Coord -> Maybe Coord
viewObstructed obstacles from to = find (`Set.member` obstacles) (line from to)
tileToAssoc :: Tile -> (Coord,Tile)
tileToAssoc t = (tPosition t,t)
obstacleTilesAsMap :: World -> Map.HashMap Coord Tile
obstacleTilesAsMap w = Map.fromList $ map (tileToAssoc . renderObstacle) (wObstacles w)
obstaclesAsSet :: World -> CoordSet
obstaclesAsSet w = Set.fromList $ map (tPosition . oTile) (wObstacles w)
litTiles :: World -> CoordSet
litTiles w = let lights = wLightSources w
lit = lightTilesUnion lights
obstacleTiles = obstaclesAsSet w
litFilter lite = any isNothing (map (\l -> viewObstructed obstacleTiles (lsPosition l) lite) lights)
in Set.filter litFilter lit
renderWorld :: World -> Rect -> [Tile]
renderWorld w viewport =
let obstacleTiles = obstacleTilesAsMap w
enemyTiles = Map.fromList $ map tileToAssoc $ concatMap renderEnemy (wEnemies w)
realTiles = obstacleTiles <> enemyTiles <> Map.fromList (map tileToAssoc [renderHero (wHero w),renderExit (wExit w)])
renderedLit = (Map.fromList . map (\c -> (c,renderLit c)) . Set.toList) (litTiles w)
in (filter ((\p -> insideRect viewport p && p /= ((\(x,y) -> (x-1,y-1)) (rBottomRight viewport))) . tPosition) . Map.elems) (realTiles <> renderedLit)
renderLit :: Coord -> Tile
renderLit c = Tile { tCharacter = '.', tPosition = c,tColor = mkColorPair Blue Transparent }
renderEnemy :: Enemy -> [Tile]
renderEnemy e | eVisible e = [eTile e]
| otherwise = []
renderHero :: Coord -> Tile
-- renderHero c = Tile { tCharacter = '@', tSgr = [SetConsoleIntensity BoldIntensity, SetColor Foreground Vivid Blue ], tPosition = c }
renderHero c = Tile { tCharacter = '@', tPosition = c,tColor = mkColorPair White Transparent }
renderExit :: Coord -> Tile
renderExit c = Tile {
tCharacter = '>'
, tPosition = c
, tColor = mkColorPair Blue Transparent
}
renderObstacle :: ObstacleTile -> Tile
renderObstacle = oTile
showMessageAndWait :: String -> TerminalMonad ()
showMessageAndWait s = drawStringCentered s >> getCharEvent >> return ()
gameLoop :: World -> TerminalMonad ()
gameLoop w =
if wHero w == wExit w
then showMessageAndWait "You won!"
else do
render (renderWorld (updateEnemyVisibility w) (wViewport w))
input <- getInput
case input of
Exit -> return ()
_ ->
case updateEnemies w of
Nothing -> showMessageAndWait "Game over!"
Just newWorld -> do
let inputResult = handleDir newWorld input
if isJust (enemyAt inputResult (wHero inputResult))
then showMessageAndWait "Game over!"
else gameLoop inputResult
getInput :: TerminalMonad Input
getInput = do
char <- getCharEvent
case char of
'q' -> return Exit
'k' -> return Up
'j' -> return Down
'h' -> return Left
'.' -> return Stand
'l' -> return Right
'y' -> return UpLeft
'u' -> return UpRight
'b' -> return DownLeft
'n' -> return DownRight
_ -> getInput
obstacleAt :: World -> Coord -> Maybe ObstacleTile
obstacleAt w c = find ((== c) . tPosition . oTile) (wObstacles w)
enemyAt :: World -> Coord -> Maybe Enemy
enemyAt w c = find ((== c) . tPosition . eTile) (wEnemies w)
playerAt :: World -> Coord -> Bool
playerAt w c = wHero w == c
replaceEnemy :: World -> Enemy -> Enemy -> World
replaceEnemy w old new = w { wEnemies = replaceBy (wEnemies w) ((== (tPosition . eTile) old). tPosition . eTile) new }
updateEnemyVisibility :: World -> World
updateEnemyVisibility w = let os = obstaclesAsSet w
in foldr (updateEnemyVisibility' os) w (wEnemies w)
where updateEnemyVisibility' os e w' = replaceEnemy w' e (e { eVisible = isNothing (viewObstructed os (wHero w') (tPosition . eTile $ e))})
updateEnemies :: World -> Maybe World
updateEnemies ow = let lt = litTiles ow
foldEnemy' e w' = w' >>= \w -> let re = updateEnemy w lt e
in if (tPosition . eTile) re == wHero ow
then Nothing
else Just (replaceEnemy w e re)
in foldr foldEnemy' (Just ow) (wEnemies ow)
maxFramesSeen :: Int
maxFramesSeen = 3
updateEnemy :: World -> CoordSet -> Enemy -> Enemy
updateEnemy w lt e | eAggro e = updateEnemyAggro w lt e
| otherwise = updateEnemyCalm w lt e
updateEnemyAggro :: World -> CoordSet -> Enemy -> Enemy
updateEnemyAggro w _ e = case calculateOptimalPath (obstaclesAsSet w) ((tPosition . eTile) e) (wHero w) of
Nothing -> e
Just [] -> e
Just (x:_) -> e { eTile = (eTile e) { tPosition = x } }
updateEnemyCalm :: World -> CoordSet -> Enemy -> Enemy
updateEnemyCalm w lt e =
let oldPosition = tPosition (eTile e)
newPosition' = oldPosition `pairPlus` eWalkingDir e
isAtTurningPoint = eCurrentWalk e + 1 == eWalkingRadius e
isObstructed = isJust (obstacleAt w newPosition') || isJust (enemyAt w newPosition')
isTurning = isAtTurningPoint || isObstructed
newWalkingDir = if isTurning
then pairNegate (eWalkingDir e)
else eWalkingDir e
newPosition = if isObstructed then oldPosition else newPosition'
newVisible = isNothing (viewObstructed (obstaclesAsSet w) (wHero w) newPosition)
newFramesSeen = eFramesSeen e + if wHero w `Set.member` lt && newVisible then 1 else 0
--newCharacter = tCharacter (eTile e)
newAggro = newFramesSeen >= maxFramesSeen
newColor = if newAggro
then Red
else case newFramesSeen of
0 -> White
1 -> Cyan
2 -> Yellow
_ -> Green
newTile = (eTile e) { tPosition = newPosition,tColor = mkColorPair newColor Transparent }
newCurrentWalk = if isAtTurningPoint
then 0
else eCurrentWalk e + 1
in Enemy newTile newAggro newWalkingDir (eWalkingRadius e) newCurrentWalk newFramesSeen newVisible
handleDir :: World -> Input -> World
handleDir w input = w { wHero = newCoord }
where oldCoord@(heroX,heroY) = wHero w
newCoord' = case input of
Up -> (heroX, heroY - 1)
Down -> (heroX, heroY + 1)
Left -> (heroX - 1, heroY)
Right -> (heroX + 1, heroY)
UpLeft -> (heroX - 1, heroY - 1)
UpRight -> (heroX + 1, heroY - 1)
DownLeft -> (heroX - 1, heroY + 1)
DownRight -> (heroX + 1, heroY + 1)
_ -> oldCoord
newCoord = if insideRect (wViewport w) newCoord' then case obstacleAt w newCoord' of
Nothing -> newCoord'
Just _ -> oldCoord
else oldCoord
| pmiddend/sneakybeaky | src/SneakyBeaky/Main.hs | gpl-3.0 | 9,799 | 0 | 21 | 3,006 | 3,143 | 1,655 | 1,488 | 224 | 11 |
-- | Common args and parser.
module OrgStat.CLI
( CommonArgs (..)
, parseCommonArgs
) where
import Universum
import Options.Applicative.Simple (Parser, help, long, metavar, strOption, switch)
-- | Read-only arguments that inner application needs (in contrast to,
-- say, logging severity).
data CommonArgs = CommonArgs
{ caXdgOpen :: !Bool
-- ^ Open report types using xdg-open
, caOutputs :: ![Text]
-- ^ Single output can be selected instead of running all of them.
, caOutputDir :: !(Maybe FilePath)
-- ^ Output directory for all ... outputs.
} deriving Show
parseCommonArgs :: Parser CommonArgs
parseCommonArgs =
CommonArgs <$>
switch (long "xdg-open" <> help "Open each report using xdg-open") <*>
many (
fromString <$>
strOption (long "output" <>
long "select-output" <>
help ("Output name(s) you want to process " <>
"(default: all outputs are processed)"))) <*>
optional (
strOption (long "output-dir" <>
metavar "FILEPATH" <>
help ("Final output directory that overrides one in config. " <>
"No extra subdirectories will be created")))
| volhovM/orgstat | src/OrgStat/CLI.hs | gpl-3.0 | 1,275 | 0 | 15 | 389 | 212 | 117 | 95 | 31 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- file Spec.hs
import Test.Hspec
import Test.QuickCheck
import Control.Exception (evaluate)
import JobParser
import Data.Text as T
import Data.List as L
import qualified Data.Text.IO as TIO
singleLine = T.pack "E8 Unité de Soins de Longue Durée (USLD) S.J. MANCIAUX"
twoLines = T.pack "E666 CardiologieBidon \n H.C. XXX"
reSort = T.unlines . L.sort . T.lines
main :: IO ()
main = hspec $ do
describe "Small tests" $ do
it "single line" $ do
getAllJobs singleLine `shouldBe` [Job "E8" "Unité de Soins de Longue Durée (USLD)" "S.J." "MANCIAUX"]
it "multi-lines (2)" $ do
getAllJobs twoLines `shouldBe` [Job "E666" "CardiologieBidon" "H.C." "XXX"]
describe "Full tests" $ do
it "A1Trimestre2" $ do
output <- parseData "tests/postesa1trimestre2.txt"
ref <- TIO.readFile "tests/postesa1trimestre2_ref.txt"
ref == output `shouldBe` True
it "A1Trimestre3" $ do
output <- parseData "tests/postesa1trimestre3.txt"
TIO.writeFile "output.csv" output
ref <- TIO.readFile "tests/postesa1trimestre3_ref.txt"
ref == output `shouldBe` True
it "A2Trimestre1" $ do
output <- parseData "tests/postesa2trimestre2.txt"
TIO.writeFile "output.csv" output
ref <- TIO.readFile "tests/postesa2trimestre2_ref.txt"
let ref_sorted = reSort ref
let output_sorted = reSort output
ref_sorted == output_sorted `shouldBe` True
it "A2Trimestre3" $ do
output <- parseData "tests/postesa2trimestre3.txt"
TIO.writeFile "output.csv" output
ref <- TIO.readFile "tests/postesa2trimestre3_ref.txt"
let ref_sorted = reSort ref
let output_sorted = reSort output
ref_sorted == output_sorted `shouldBe` True
| alexDarcy/shelly-examples | stages/tests/Integration.hs | gpl-3.0 | 1,850 | 0 | 17 | 453 | 453 | 215 | 238 | 42 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Estuary.Types.EnsembleRequest where
import Data.Time
import Data.Text
import GHC.Generics
import Data.Aeson
import Data.Sequence
import Estuary.Types.View
import Estuary.Types.Definition
import Estuary.Types.Tempo
import Estuary.Types.ResourceOp
data EnsembleRequest =
WriteTempo Tempo |
WriteZone Int Definition |
WriteView Text View |
WriteChat Text |
WriteStatus Text |
WriteResourceOps (Seq ResourceOp) |
ResetZonesRequest |
ResetViewsRequest |
ResetTempoRequest Tempo |
ResetRequest Tempo
deriving (Generic)
instance ToJSON EnsembleRequest where
toEncoding = genericToEncoding defaultOptions
instance FromJSON EnsembleRequest
| d0kt0r0/estuary | common/src/Estuary/Types/EnsembleRequest.hs | gpl-3.0 | 698 | 0 | 8 | 100 | 147 | 87 | 60 | 26 | 0 |
module AFN where
import Data.List (foldl')
import MonadicSet (Set)
import qualified MonadicSet as S
import Regex
import Transicao
data AFN a = AFN { estados :: Set Int
, estadosAtuais :: Set Int
, aceitacao :: Set Int
, inicio :: Int
, transicoes :: Set (Transicao a Int)
}
deriving (Eq,Show)
buildAFN :: Regex Char -> (Set Char, AFN Char)
buildAFN Vazia = (S.singleton 'E', zero)
buildAFN (Literal l) = (S.singleton l, single l)
buildAFN (Kleene r) = let (a,m) = buildAFN r in (a,estrela m)
buildAFN r = let ((a1,m1),(a2,m2),f) = case r of
r1 :. r2 -> (buildAFN r1,buildAFN r2, concatenar)
r1 :+ r2 -> (buildAFN r1,buildAFN r2, union)
_ -> error "Não vai acontecer =)"
in (a1 `S.union` a2, f m1 m2)
concatenar, union :: Ord a => AFN a -> AFN a -> AFN a
concatenar m1 m2 = m1' { estadosAtuais = fechoVazio m1' }
where m2' = ajustarIndices m1 m2
transVazia i q = TransVazia q i
m1' = m1 { estados = estados m1 `S.union` estados m2'
, aceitacao = aceitacao m2'
, transicoes = S.map (transVazia (inicio m2')) (aceitacao m1)
`S.union` transicoes m1 `S.union` transicoes m2'
}
union m1 m2 = m3{estadosAtuais = fechoVazio m3}
where m1' = atualizarAFN 1 m1
m2' = atualizarAFN 1 $ ajustarIndices m1' m2
m3 = AFN { estados = S.insert 0 $ estados m1' `S.union` estados m2'
, estadosAtuais = fechoVazio m3
, aceitacao = aceitacao m1' `S.union` aceitacao m2'
, inicio = 0
, transicoes = S.fromList [TransVazia 0 $ inicio m1', TransVazia 0 $ inicio m2']
`S.union` transicoes m1'
`S.union` transicoes m2'
}
{- Algoritmo simples para implementar o fecho de Kleene (estrela) é o seguinte:
1. Some 1 aos identificadores de todos os estados e transações do Autômato FN;
2. Crie um novo estado inicial, chame-o 0;
3. Para cada estado final do Autômato, crie uma transação vazia para 0;
4. Torne 0 o único estado final; -}
estrela :: Ord a => AFN a -> AFN a
estrela m = m'
where m' = m { estados = S.insert 0 . S.map (+1) $ estados m
, estadosAtuais = fechoVazio m'
, aceitacao = S.singleton 0
, transicoes = S.insert (TransVazia 0 1) . S.map (mapT (+1)) $ transicoes m
}
-- Autômato que reconhece a expressão 'E'.
zero :: Ord a => AFN a
zero = AFN { estados = es
, aceitacao = es
, estadosAtuais = es
, inicio = 0
, transicoes = S.empty
}
where es = S.singleton 0
--Autômato para a linguagem vazia
empty :: AFN a
empty = AFN { estados = S.singleton 0
, aceitacao = S.empty
, estadosAtuais = S.singleton 0
, inicio = 0
, transicoes = S.empty
}
-- Cria um AFN que reconhece a linguagem cuja única palavra é o literal passado.
single :: Ord a => a -> AFN a
single a = AFN { estados = S.fromAscList [0,1]
, aceitacao = S.singleton 1
, estadosAtuais = S.singleton 0
, inicio = 0
, transicoes = S.singleton $ Trans a 0 1
}
ajustarIndices :: Ord a => AFN a -> AFN a -> AFN a
ajustarIndices m1 = atualizarAFN s1
where s1 = S.size $ estados m1
atualizarAFN :: Ord a => Int -> AFN a -> AFN a
atualizarAFN n m = m' { estadosAtuais = fechoVazio m' }
where es = S.map (+n) $ estados m
fs = S.map (+n) $ aceitacao m
ts = S.map (mapT (+n)) $ transicoes m
m' = m { estados = es
, aceitacao = fs
, transicoes = ts
, inicio = inicio m + n
}
fechoVazio :: Ord a => AFN a -> Set Int
fechoVazio m = fechoVazio' m $ inicio m
fechoVazio' :: Ord a => AFN a -> Int -> Set Int
fechoVazio' m = fv S.empty
where fv visited q
| S.null toVisit = visited
| otherwise = toVisit S.>>= fv (visited `S.union` toVisit)
where toVisit = S.map destino . S.filter filtro $ transicoes m
filtro t = vazia t && origem t == q && destino t `S.notMember` visited
executar :: Ord a => AFN a -> Set a -> [a] -> Maybe (Bool, AFN a)
executar m sigma = foldl' p (Just (False,m))
where p mb i | i `S.notMember` sigma = Nothing
| otherwise = do (_,m) <- mb
let m' = passo m i
return (aceitaEstados m', m')
aceitaEstados :: AFN a -> Bool
aceitaEstados m = not . S.null $ estadosAtuais m `S.intersection` aceitacao m
passo :: Ord a => AFN a -> a -> AFN a
passo m i = m { estadosAtuais = qs S.>>= delta m i }
where qs = estadosAtuais m
delta :: Ord a => AFN a -> a -> Int -> Set Int
delta m i q = qs `S.union` (qs S.>>= fechoVazio' m)
where qs = S.map destino . S.filter filtro $ transicoes m
filtro (Trans i' q' _) = i' == i && q' == q
filtro _ = False
| lordao/lfc-regex | src/AFN.hs | gpl-3.0 | 5,514 | 0 | 15 | 2,163 | 1,842 | 954 | 888 | 101 | 3 |
module Game.Toliman.Graphical.SDL.Core(
sdlCheckRet, sdlCheckRet',
sdlCheckPtr, sdlCheckPtr',
sdlCheckPred, sdlCheckPred',
sdlGetError ) where
import Foreign.C.Types (CInt)
import Foreign.C.String (peekCAString)
import Foreign.Ptr (Ptr, nullPtr)
import Text.Printf (printf)
import Control.Monad.Lift.IO (MonadIO, liftIO)
import Graphics.UI.SDL (getError)
import Monad.Error (throwError)
import Game.Toliman.Graphical.Internal.Errors (
MonadGraphicalError, TolimanGraphicalError(..))
sdlCheckRet :: (MonadGraphicalError m, MonadIO m) => String -> CInt -> m CInt
sdlCheckRet desc ret
| ret == -1 = sdlGetError desc
| otherwise = return ret
sdlCheckRet' :: (MonadGraphicalError m, MonadIO m) => String -> IO CInt -> m CInt
sdlCheckRet' desc m = sdlCheckRet desc =<< (liftIO $ m)
sdlCheckPtr :: (MonadGraphicalError m, MonadIO m) => String -> Ptr a -> m (Ptr a)
sdlCheckPtr desc p
| p == nullPtr = sdlGetError desc
| otherwise = return p
sdlCheckPtr' :: (MonadGraphicalError m, MonadIO m) => String -> IO (Ptr a) -> m (Ptr a)
sdlCheckPtr' desc m = sdlCheckPtr desc =<< (liftIO $ m)
sdlCheckPred :: (MonadGraphicalError m, MonadIO m) => String -> Bool -> m ()
sdlCheckPred desc p
| p = sdlGetError desc
| otherwise = return ()
sdlCheckPred' :: (MonadGraphicalError m, MonadIO m) => String -> m Bool -> m ()
sdlCheckPred' desc m = sdlCheckPred desc =<< m
sdlGetError :: (MonadGraphicalError m, MonadIO m) => String -> m a
sdlGetError desc = do
message <- liftIO (getError >>= peekCAString)
throwError $ SDLError $ printf "%s: %s" desc message
| duncanburke/toliman-graphical | src-lib/Game/Toliman/Graphical/SDL/Core.hs | mpl-2.0 | 1,577 | 0 | 10 | 265 | 593 | 309 | 284 | 36 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DialogFlow.Projects.Locations.Agents.Webhooks.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified webhook.
--
-- /See:/ <https://cloud.google.com/dialogflow/ Dialogflow API Reference> for @dialogflow.projects.locations.agents.webhooks.delete@.
module Network.Google.Resource.DialogFlow.Projects.Locations.Agents.Webhooks.Delete
(
-- * REST Resource
ProjectsLocationsAgentsWebhooksDeleteResource
-- * Creating a Request
, projectsLocationsAgentsWebhooksDelete
, ProjectsLocationsAgentsWebhooksDelete
-- * Request Lenses
, plawdXgafv
, plawdUploadProtocol
, plawdForce
, plawdAccessToken
, plawdUploadType
, plawdName
, plawdCallback
) where
import Network.Google.DialogFlow.Types
import Network.Google.Prelude
-- | A resource alias for @dialogflow.projects.locations.agents.webhooks.delete@ method which the
-- 'ProjectsLocationsAgentsWebhooksDelete' request conforms to.
type ProjectsLocationsAgentsWebhooksDeleteResource =
"v3" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "force" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Delete '[JSON] GoogleProtobufEmpty
-- | Deletes the specified webhook.
--
-- /See:/ 'projectsLocationsAgentsWebhooksDelete' smart constructor.
data ProjectsLocationsAgentsWebhooksDelete =
ProjectsLocationsAgentsWebhooksDelete'
{ _plawdXgafv :: !(Maybe Xgafv)
, _plawdUploadProtocol :: !(Maybe Text)
, _plawdForce :: !(Maybe Bool)
, _plawdAccessToken :: !(Maybe Text)
, _plawdUploadType :: !(Maybe Text)
, _plawdName :: !Text
, _plawdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsAgentsWebhooksDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plawdXgafv'
--
-- * 'plawdUploadProtocol'
--
-- * 'plawdForce'
--
-- * 'plawdAccessToken'
--
-- * 'plawdUploadType'
--
-- * 'plawdName'
--
-- * 'plawdCallback'
projectsLocationsAgentsWebhooksDelete
:: Text -- ^ 'plawdName'
-> ProjectsLocationsAgentsWebhooksDelete
projectsLocationsAgentsWebhooksDelete pPlawdName_ =
ProjectsLocationsAgentsWebhooksDelete'
{ _plawdXgafv = Nothing
, _plawdUploadProtocol = Nothing
, _plawdForce = Nothing
, _plawdAccessToken = Nothing
, _plawdUploadType = Nothing
, _plawdName = pPlawdName_
, _plawdCallback = Nothing
}
-- | V1 error format.
plawdXgafv :: Lens' ProjectsLocationsAgentsWebhooksDelete (Maybe Xgafv)
plawdXgafv
= lens _plawdXgafv (\ s a -> s{_plawdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plawdUploadProtocol :: Lens' ProjectsLocationsAgentsWebhooksDelete (Maybe Text)
plawdUploadProtocol
= lens _plawdUploadProtocol
(\ s a -> s{_plawdUploadProtocol = a})
-- | This field has no effect for webhook not being used. For webhooks that
-- are used by pages\/flows\/transition route groups: * If \`force\` is set
-- to false, an error will be returned with message indicating the
-- referenced resources. * If \`force\` is set to true, Dialogflow will
-- remove the webhook, as well as any references to the webhook (i.e.
-- Webhook and tagin fulfillments that point to this webhook will be
-- removed).
plawdForce :: Lens' ProjectsLocationsAgentsWebhooksDelete (Maybe Bool)
plawdForce
= lens _plawdForce (\ s a -> s{_plawdForce = a})
-- | OAuth access token.
plawdAccessToken :: Lens' ProjectsLocationsAgentsWebhooksDelete (Maybe Text)
plawdAccessToken
= lens _plawdAccessToken
(\ s a -> s{_plawdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plawdUploadType :: Lens' ProjectsLocationsAgentsWebhooksDelete (Maybe Text)
plawdUploadType
= lens _plawdUploadType
(\ s a -> s{_plawdUploadType = a})
-- | Required. The name of the webhook to delete. Format:
-- \`projects\/\/locations\/\/agents\/\/webhooks\/\`.
plawdName :: Lens' ProjectsLocationsAgentsWebhooksDelete Text
plawdName
= lens _plawdName (\ s a -> s{_plawdName = a})
-- | JSONP
plawdCallback :: Lens' ProjectsLocationsAgentsWebhooksDelete (Maybe Text)
plawdCallback
= lens _plawdCallback
(\ s a -> s{_plawdCallback = a})
instance GoogleRequest
ProjectsLocationsAgentsWebhooksDelete
where
type Rs ProjectsLocationsAgentsWebhooksDelete =
GoogleProtobufEmpty
type Scopes ProjectsLocationsAgentsWebhooksDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow"]
requestClient
ProjectsLocationsAgentsWebhooksDelete'{..}
= go _plawdName _plawdXgafv _plawdUploadProtocol
_plawdForce
_plawdAccessToken
_plawdUploadType
_plawdCallback
(Just AltJSON)
dialogFlowService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsAgentsWebhooksDeleteResource)
mempty
| brendanhay/gogol | gogol-dialogflow/gen/Network/Google/Resource/DialogFlow/Projects/Locations/Agents/Webhooks/Delete.hs | mpl-2.0 | 6,096 | 0 | 16 | 1,306 | 787 | 462 | 325 | 119 | 1 |
import System.Plugins
import API
conf = "../Plugin.hs"
apipath = "../api"
main = do
status <- makeWith conf conf ["-i"++apipath]
o <- case status of
MakeFailure e -> mapM_ putStrLn e >> error "compile failed"
MakeSuccess _ o -> return o
m_v <- load o [apipath] [] "resource"
v <- case m_v of
LoadSuccess _ v -> return v
LoadFailure ers -> mapM_ putStrLn ers >> error "load failed"
putStr $ field v
makeCleaner o
| Changaco/haskell-plugins | testsuite/makewith/global_pragma/prog/Main.hs | lgpl-2.1 | 527 | 0 | 12 | 189 | 174 | 80 | 94 | 15 | 3 |
{-# LANGUAGE DeriveDataTypeable, TemplateHaskell, TypeOperators #-}
-- | general GTK FRP functions. These may be split into a separate
-- module in the future...
module Jaek.UI.FrpTypes
where
import Graphics.UI.Gtk
import Reactive.Banana as B
import Data.Data
import Data.Label
type MotionEvent = ([EventModifier], Double, Double)
data ClickType = SingleC | DoubleC | TripleC | ReleaseC
deriving (Eq, Show, Enum, Ord, Data, Typeable)
-- | Encapsulate information about a click
data ClickEvent = ClickE {
_clickType :: !ClickType
,_clickMods :: [EventModifier]
,_xPos :: !Double
,_yPos :: !Double
}
deriving (Eq, Show, Ord, Data, Typeable)
-- | Encapsulate data about drag events
data DragEvent = DragE {
_dragStart :: ClickEvent
,_xDragEnd :: !Double
,_yDragEnd :: !Double
}
deriving (Eq, Show, Data, Typeable)
data EventModifier
= ShiftE
| LockE
| ControlE
| AltE
| Alt2E
| Alt3E
| Alt4E
| Alt5E
| Button1E
| Button2E
| Button3E
| Button4E
| Button5E
| SuperE
| HyperE
| MetaE
| ReleaseE
| ModifierMaskE
deriving (Eq, Ord, Show, Data, Typeable)
fromModifier :: Modifier -> EventModifier
fromModifier Shift = ShiftE
fromModifier Lock = LockE
fromModifier Control = ControlE
fromModifier Alt = AltE
fromModifier Alt2 = Alt2E
fromModifier Alt3 = Alt3E
fromModifier Alt4 = Alt4E
fromModifier Alt5 = Alt5E
fromModifier Button1 = Button1E
fromModifier Button2 = Button2E
fromModifier Button3 = Button3E
fromModifier Button4 = Button4E
fromModifier Button5 = Button5E
fromModifier Super = SuperE
fromModifier Hyper = HyperE
fromModifier Meta = MetaE
fromModifier Release = ReleaseE
fromModifier ModifierMask = ModifierMaskE
-- | Check if a drag is valid, i.e. start and end points differ
checkDrag :: DragEvent -> Bool
checkDrag (DragE (ClickE _ _ cx cy) dx dy) = (cx /= dx) || (cy /= dy)
data DragAcc = None | Start ClickEvent | Full DragEvent
click2ClickType :: Click -> ClickType
click2ClickType SingleClick = SingleC
click2ClickType DoubleClick = DoubleC
click2ClickType TripleClick = TripleC
click2ClickType ReleaseClick = ReleaseC
isClick :: ClickEvent -> Bool
isClick = not . isRelease
isRelease :: ClickEvent -> Bool
isRelease (ClickE ReleaseC _ _ _) = True
isRelease _ = False
$(mkLabels [''ClickEvent, ''DragEvent])
| JohnLato/jaek | src/Jaek/UI/FrpTypes.hs | lgpl-3.0 | 2,340 | 0 | 9 | 456 | 631 | 351 | 280 | 83 | 1 |
module Board
(showBoard
,makeBoard
,move
,check
,flipChains
,hasMovesFor
,hasChain
,slice
,moves
,indexes
,value) where
import Data.Array
import DataTypes
showBoard :: Board -> IO ()
showBoard b = putStrLn $ tablerize (prettify b)
where prettify b = foldr1 (\x acc -> x ++ "\n" ++ acc) board
where line r = foldr (\x acc -> " " ++ (show x) ++ " " ++ acc) "" ( map ((!) b) r )
board = [ line ( range ((i,1),(i,8)) ) | i <- [1..8]]
tablerize b = column 1 $ " " ++ row ++ "\n" ++ b
where row = foldr (\x acc -> " " ++ (show x) ++ " " ++ acc) "" [1..8]
column i [] = []
column i (x:xs)
| x == '\n' = (x : (show i)) ++ (column (i+1) xs)
| otherwise = (x : column i xs)
-- Make a 8-board with 4 centered pieces (othello rules)
makeBoard :: Board
makeBoard = board n // [((4,4), X), ((4,5), O), ((5,4), O), ((5,5), X)]
where board n = array ((1,1), n) [ ((i,j), E) | (i,j) <- range ((1,1), n)]
n = (8,8)
-- Adds or updates a given piece
move :: Board -> Move -> Board
move b (p, s) = b // [(p,s)]
moves :: Board -> [Move] -> Board
moves b mvs = b // mvs
-- Checks if some move is valid
check :: Board -> Move -> Bool
check b m = empty b p && (foldr (||) False (map hasChain (allChains newBoard p)))
where newBoard = move b m
p = fst m
-- Turns out all flankered pieces
flipChains :: Board -> Position -> Board
flipChains b p = foldr (\x bacc -> if hasChain x then moves bacc $ turnChain x else bacc) b chains
where chains = allChains b p
turnChain ps = map turnPiece $ takeWhile (\x -> current /= snd x ) (tail ps)
turnPiece (p,s)
| s == O = (p,X)
| otherwise = (p,O)
current = b!p
-- Finds the all 8 possible chains for a given position
allChains :: Board -> Position -> [[(Position, State)]]
allChains b p = map (slice b p) [(-1,0),(-1,1),(0,1),(1,1),(1,0),(1,-1),(0,-1),(-1,-1)]
-- Check if there are a chain of pieces other than the first one
hasChain :: [(Position, State)] -> Bool
hasChain ((m,s):ms) = hasChainOf s ms 0
where hasChainOf _ [] _ = False
hasChainOf sym (x:xs) pos
| snd x == sym = pos /= 0
| snd x /= E = hasChainOf sym xs (pos+1)
| otherwise = False
slice :: Board -> Position -> (Int,Int) -> [(Position, State)]
slice b (i,j) (di,dj)
| inRange (bounds b) (i,j) = (current : slice b (i+di,j+dj) (di,dj))
| otherwise = []
where current = ((i,j), b!(i,j))
-- Checks if there is at least one valid move for the given state
hasMovesFor :: Board -> State -> Bool
hasMovesFor b s = foldr1 (||) moves
where moves = [ check b ((i,j),s) | (i,j) <- range (bounds b) ]
-- Verifies if a given position is empty
empty :: Board -> Position -> Bool
empty b p = (b!p) == E
-- All possible indexes for the board
indexes :: Board -> [(Int,Int)]
indexes b = range (bounds b)
-- Current state of given position
value :: Board -> Position -> State
value b p = b!p
| guiocavalcanti/haskell-reversi | Board.hs | lgpl-3.0 | 3,004 | 0 | 16 | 807 | 1,479 | 815 | 664 | 68 | 2 |
replicate' :: Int -> a -> [a]
replicate' n x
| n <= 0 = []
| otherwise = x : replicate' (n - 1) x
take' :: Int -> [a] -> [a]
take' n _
| n <= 0 = []
take' _ [] = []
take' n (x:xs) = x : take' (n - 1) xs
reverse' :: [a] -> [a]
reverse' [] = []
reverse' (x:xs) = reverse xs ++ [x]
repeat' :: a -> [a]
repeat' x = x : repeat' x
zip' :: [a] -> [b] -> [(a, b)]
zip' _ [] = []
zip' [] _ = []
zip' (x:xs) (y:ys) = (x, y) : zip' xs ys
elem' :: (Eq a) => a -> [a] -> Bool
elem' a [] = False
elem' a (x:xs)
| a == x = True
| otherwise = elem' a xs
-- pattern matching, case expressions and guards
-- pattern matching only works in function definition
-- guards can be used to evaluate Boolean conditions
-- case expressions can be used anywhere, normal pattern matching is just syntactic sugar for case expressions
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) =
let smallerOrEqual = [a | a <- xs, a <= x]
largerThan = [a | a <- xs, a > x]
in quicksort smallerOrEqual ++ [x] ++ quicksort largerThan
| alexliew/learn_you_a_haskell | code/recursion.hs | unlicense | 1,045 | 0 | 11 | 261 | 530 | 277 | 253 | 29 | 1 |
{-# LANGUAGE TypeOperators #-}
module Language.Sigil.Stack
( push
, pop
) where
import Control.Monad.Free
import Language.Sigil.Types
push :: a -> SigilProgram a ()
push x = liftF $ Push x ()
pop :: SigilProgram a ()
pop = liftF $ Pop ()
{- swap :: (s :. a :. b) -> (s :. b :. a) -}
{- swap (s :. a :. b) = s :. b :. a -}
{- swapd :: s :. a :. b :. c -> s :. b :. a :. c -}
{- swapd (s :. a :. b :. c) = s :. b :. a :. c -}
{- push :: a -> s -> s :. a -}
{- push a s = s :. a -}
{- pop :: (s :. a) -> s -}
{- pop (s :. _) = s -}
{- nip :: (s :. a :. b) -> (s :. b) -}
{- nip (s :. _ :. b) = s :. b -}
{- nip2 :: (s :. a :. b :. c) -> (s :. c) -}
{- nip2 (s :. _ :. _ :. c) = s :. c -}
{- rotr :: (s :. a :. b :. c) -> (s :. c :. a :. b) -}
{- rotr (s :. a :. b :. c) = s :. c :. a :. b -}
{- rotl :: (s :. a :. b :. c) -> (s :. b :. c :. a) -}
{- rotl (s :. a :. b :. c) = s :. b :. c :. a -}
{- dup :: s :. a -> s :. a :. a -}
{- dup s@(_ :. a) = s :. a -}
{- dup2 :: s :. a :. b -> s :. a :. b :. a :. b -}
{- dup2 s@(_ :. a :. b) = s :. a :. b -}
{- dupd :: s :. a :. b -> s :. a :. a :. b -}
{- dupd (s :. a :. b) = s :. a :. a :. b -}
{- over :: s :. a :. b -> s :. a :. b :. a -}
{- over s@(_ :. a :. _) = s :. a -}
{- over2 :: s :. a :. b :. c -> s :. a :. b :. c :. a :. b -}
{- over2 s@(_ :. a :. b :. _) = s :. a :. b -}
{- pick :: s :. a :. b :. c -> s :. a :. b :. c :. a -}
{- pick s@(_ :. a :. _ :. _) = s :. a -}
| erochest/sigil | Language/Sigil/Stack.hs | apache-2.0 | 1,482 | 0 | 7 | 519 | 117 | 77 | 40 | 10 | 1 |
module Lab4 where
fp :: Eq a => (a -> a) -> a -> a
fp f = \ x -> if x == f x then x else fp f (f x)
| bartolkaruza/software-testing-2014-group-W1 | week4/Lab4.hs | apache-2.0 | 103 | 0 | 9 | 36 | 70 | 37 | 33 | 3 | 2 |
module HEP.Automation.EventAnalysis.Command where
import HEP.Automation.EventAnalysis.ProgType
import HEP.Automation.EventAnalysis.Job
commandLineProcess :: EventAnalysis -> IO ()
commandLineProcess (Single lhefp pdffp) = do
putStrLn "test called"
startSingle lhefp pdffp
commandLineProcess (JsonTest fp) = do
putStrLn "jsontest called"
startJsonTest fp
commandLineProcess (MultiAnalysis fp) = do
putStrLn "jsontest called"
startMultiAnalysis fp
commandLineProcess (Junjie lhefp outfp) = do
putStrLn "test called"
startJunjie lhefp outfp
commandLineProcess (LowMassAnalysis hsfp) = do
startLowMassAnalysis hsfp
| wavewave/EventAnalysis | lib/HEP/Automation/EventAnalysis/Command.hs | bsd-2-clause | 640 | 0 | 7 | 93 | 168 | 80 | 88 | 18 | 1 |
module Distribution.VcsRevision.Svn ( getRevision ) where
import Control.Exception
import System.Process
import System.Exit
import Data.List
tryIO :: IO a -> IO (Either IOException a)
tryIO = try
-- | Nothing if we're not in a svn repo, Just (revision,modified) if we're in a repo.
getRevision :: IO (Maybe (String, Bool))
getRevision = do
res <- tryIO $ readProcessWithExitCode "svn" ["info"] ""
case res of
Left ex -> return Nothing
Right (exit,info,_) -> case exit of
ExitSuccess -> do
let prefix = "Last Changed Rev: "
let rev = drop (length prefix) $ head $ filter (prefix `isPrefixOf`) (lines info)
(_,out,_) <- readProcessWithExitCode "svn" ["st", "-q"] ""
return $ Just (rev, out /= "")
_ -> return Nothing
| jkff/vcs-revision | Distribution/VcsRevision/Svn.hs | bsd-2-clause | 775 | 0 | 22 | 176 | 264 | 138 | 126 | 19 | 3 |
module Module2.Task10 where
avg :: Int -> Int -> Int -> Double
avg a b c = fromIntegral (a + b + c) / 3
| dstarcev/stepic-haskell | src/Module2/Task10.hs | bsd-3-clause | 105 | 0 | 9 | 26 | 53 | 28 | 25 | 3 | 1 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses #-}
module Database.Dedalus.PrettyPrint where
-- This code is initially based on
-- https://github.com/pchiusano/datalog-refactoring/blob/master/src/PrettyPrint.hs
import qualified Text.PrettyPrint as PP
import Text.PrettyPrint (($$),(<>),(<+>))
import Control.Applicative
import Database.Dedalus.Backend
class Pretty p where
doc :: p -> PP.Doc
instance Pretty Con where
doc = PP.text . conName
instance Pretty Var where
doc = PP.text . varName
instance Pretty Term where
doc = eitherTerm doc doc
instance Pretty a => Pretty (Atom a) where
doc (Atom p b) = doc p <> PP.parens (PP.hsep $ PP.punctuate PP.comma (doc <$> b))
instance Pretty Pat where
doc (Pat p) = doc p
doc (Not p) = PP.text "\\+" <+> doc p
instance Pretty Rule where
doc (Rule h b ts) =
doc h <+> doc ts <+> PP.text ":-" <+> (PP.hsep $ PP.punctuate PP.comma (doc <$> b))
instance Pretty TimeSuffix where
doc TSImplicit = PP.empty
doc TSAsync = PP.text "@async"
doc TSNext = PP.text "@next"
doc (TS v) = PP.text "@" <> PP.text (show v)
instance Pretty Fact where
doc (Fact f ts) = doc f <+> doc ts
instance Pretty [Rule] where
doc [] = PP.empty
doc (a:as) = doc a <> PP.text "." $$ doc as
instance Pretty [Fact] where
doc [] = PP.empty
doc (a:as) = doc a <> PP.text "." $$ doc as
instance Pretty ([Fact],[Rule]) where
doc (x,y) = doc x $$ doc y
-- instance Pretty Subst
instance Pretty [(Var,Con)] where
doc vs = PP.vcat $ map (\(v,n) -> doc v <+> PP.text "=" <+> doc n) vs
instance Pretty [QueryResult] where
doc [] = PP.text "no queries"
doc rs = PP.vcat $ map (\(q,qr) -> doc q <+> PP.text " : " <+> doc qr) rs
| alanz/hdedalus | src/Database/Dedalus/PrettyPrint.hs | bsd-3-clause | 1,773 | 0 | 13 | 403 | 744 | 381 | 363 | 42 | 0 |
module Language.ContextSemantics.CallByNeedLambda where
import Language.ContextSemantics.Expressions
import Language.ContextSemantics.Utilities ()
import Language.ContextSemantics.Output
import Control.Arrow (second)
import Data.List (nub)
import Data.List.Zipper
import Data.Maybe
import Data.Nthable
import Prelude hiding (fst, snd)
--
-- Context semantics
--
data Token = White | Black | LeftT | RightT | Bracket [Token] [Token] | Symbol String
instance Show Token where
show White = "⚪"
show Black = "⚫"
show LeftT = "L"
show RightT = "R"
show (Bracket ts1 ts2) = "<" ++ show ts1 ++ "," ++ show ts2 ++ ">"
show (Symbol s) = s
showList = showCompactList
type Port = Zipper [Token] -> Either String (Output (Zipper [Token]))
popAtCursor :: Zipper [Token] -> Either String (Token, Zipper [Token])
popAtCursor tss = case cursor tss of
(t:ts) -> return (t, replace ts tss)
[] -> Left $ "popAtCursor: malformed incoming context " ++ show tss
pushAtCursor :: Token -> Zipper [Token] -> Zipper [Token]
pushAtCursor t tss = replace (t : cursor tss) tss
app :: Port -> Port -> Port -> (Port, Port, Port)
app princp_out cont_out arg_out = (princp_in, cont_in, arg_in)
where
princp_in tss = popAtCursor tss >>= \tss' -> case tss' of
(White, tss'') -> cont_out tss''
(Black, tss'') -> arg_out tss''
_ -> Left $ "app: principal port got malformed incoming context " ++ show tss
cont_in tss = princp_out (pushAtCursor White tss)
arg_in tss = princp_out (pushAtCursor Black tss)
lam :: Port -> Port -> Port -> (Port, Port, Port)
lam princp_out body_out param_out = (princp_in, body_in, param_in)
where
princp_in tss = popAtCursor tss >>= \tss' -> case tss' of
(White, tss'') -> body_out tss''
(Black, tss'') -> param_out tss''
_ -> Left $ "lam: principal port got malformed incoming context " ++ show tss
body_in tss = princp_out (pushAtCursor White tss)
param_in tss = princp_out (pushAtCursor Black tss)
share :: Port -> Port -> Port -> (Port, Port, Port)
share princp_out left_out right_out = (princp_in, left_in, right_in)
where
princp_in tss = popAtCursor tss >>= \tss' -> case tss' of
(LeftT, tss'') -> left_out tss''
(RightT, tss'') -> right_out tss''
_ -> Left $ "share: principal port got malformed incoming context " ++ show tss
left_in tss = princp_out (pushAtCursor LeftT tss)
right_in tss = princp_out (pushAtCursor RightT tss)
enterBox :: Port -> Port
enterBox entering ts = entering (right ts)
leaveBox :: Port -> Port
leaveBox leaving ts = leaving (left ts)
croissant :: String -> Port -> Port -> (Port, Port)
croissant s forced_out boxed_out = (forced_in, boxed_in)
where
forced_in tss = boxed_out (insert [Symbol s] tss)
boxed_in tss = case cursor tss of
[Symbol s'] | s == s' -> forced_out (delete tss)
_ -> Left $ "croissant: boxed port got malformed incoming context " ++ show tss
bracket :: Port -> Port -> (Port, Port)
bracket merged_out waiting_out = (merged_in, waiting_in)
where
merged_in tss = waiting_out (insert ([Bracket (cursor tss) (cursor (right tss))]) (delete (delete tss)))
waiting_in tss = case cursor tss of
[Bracket shallow deep] -> merged_out $ insert shallow $ insert deep $ delete tss
_ -> Left $ "bracket: waiting port got malformed incoming context " ++ show tss
fv :: String -> Port
fv = (Right .) . Output
--
-- Translation from traditional CBN lambda calculus
--
exprSemantics :: Expr -> (Port, [(String, Port)])
exprSemantics e = exprSemantics' (fv "Input") [(v, fv v) | v <- freeVars e] e
exprSemantics' :: Port -> [(String, Port)] -> Expr -> (Port, [(String, Port)])
exprSemantics' out_port env (V v) = (forced_port, [(v, boxed_port)])
where (forced_port, boxed_port) = croissant v out_port (lookupInEnv env v)
exprSemantics' out_port env (e1 :@ e2) = (c, usg)
where (e1_port, usg1) = exprSemantics' r env1 e1
-- If you send a signal out of e2 then it must leave the box - hence the modifications
-- to the environment and the port we supply
(e2_port, usg2) = exprSemantics' (leaveBox a) (map (second leaveBox) env2') e2
-- Both expressions in the application might refer to the same free variable, and we need
-- to insert share nodes if that happens
(env1, env2, usg) = combineUsages env usg1 usg2'
-- If you send a signal to the usages originating from e2 then you implicitly enter the box.
-- Furthermore, we need to make sure that before you enter the box you go through a bracket
-- node -- inserting these is the job of bracketUsages
(env2', usg2') = bracketUsages env2 (map (second enterBox) usg2)
-- Finally, build the app node. Remember that e2 is boxed, so we need to enterBox on its input port
(r, c, a) = app e1_port out_port (enterBox e2_port)
exprSemantics' out_port env (Lam v e) = (r, filter ((/= v) . fst) usg)
where (e_port, usg) = exprSemantics' b ((v, p) : env) e
v_port = (fv $ "Plug for " ++ v) `fromMaybe` lookup v usg
(r, b, p) = lam out_port e_port v_port
combineUsages :: [(String, Port)] -> [(String, Port)] -> [(String, Port)] -> ([(String, Port)], [(String, Port)], [(String, Port)])
combineUsages env usg1 usg2 = (catMaybes env1_mbs, catMaybes env2_mbs, usg)
where
(usg, env1_mbs, env2_mbs) = unzip3 [combineUsage v (lookup v usg1) (lookup v usg2)
| v <- nub $ map fst (usg1 ++ usg2)]
-- If both sides of the usage refer to the same variable, we need to insert a share node and
-- adjust the usage and environment appropriately to interdict all communication between the
-- use and definition sites
combineUsage v mb_p1 mb_p2 = case (mb_p1, mb_p2) of
(Nothing, Nothing) -> error "combineUsage"
(Just p1, Nothing) -> ((v, p1), Just (v, p), Nothing)
(Nothing, Just p2) -> ((v, p2), Nothing, Just (v, p))
(Just p1, Just p2) -> let (p_in, l_in, r_in) = share p p1 p2
in ((v, p_in), Just (v, l_in), Just (v, r_in))
where p = lookupInEnv env v
bracketUsages :: [(String, Port)] -> [(String, Port)] -> ([(String, Port)], [(String, Port)])
bracketUsages env = unzip . map bracketUsage
where
-- For every usage originating from the expression, add something to the environment that
-- brackets it before we go any further away from the box, adjusting the usage information
-- to now refer to the bracket
bracketUsage (v, p) = ((v, m), (v, w))
where (m, w) = bracket p (lookupInEnv env v)
lookupInEnv :: [(String, Port)] -> String -> Port
lookupInEnv env v = error ("No binding for " ++ v) `fromMaybe` lookup v env
--
-- Examples
--
examples :: IO ()
examples = do
printUTF8 $ identity $ fromList [[White]]
printUTF8 $ identity_app $ fromList [[]]
printUTF8 $ self_app $ fromList [[White]]
printUTF8 $ self_app $ fromList [[Black, LeftT, Symbol "x"], [Black, Symbol "α"]]
printUTF8 $ fst dead_var $ fromList [[Black]]
printUTF8 $ fst dead_var $ fromList [[White]]
printUTF8 $ snd dead_var $ fromList [[Symbol "x"], [Symbol "α"]]
printUTF8 $ fst app_to_fv $ fromList [[]]
printUTF8 $ fst app_to_fv_in_lam $ fromList [[White]]
printUTF8 $ snd app_to_fv_in_lam $ fromList [[Symbol "x"], [Black, Symbol "α"], [White]]
-- (\x.x) @ y
-- Port wired to the input of the lambda
identity :: Port
identity = r1
where
inp = fv "Input"
(r1, b1, p1) = lam inp f2 b2
(f2, b2) = croissant "x" b1 p1
-- (\x.x) @ y
-- Port wired to the input of the application
identity_app :: Port
identity_app = c1
where
inp = fv "Input"
y = fv "y"
(r1, c1, _a1) = app r2 inp (enterBox y)
(r2, b2, p2) = lam r1 f3 b3
(f3, b3) = croissant "x" b2 p2
self_app :: Port
self_app = fst $ exprSemantics $ Lam "x" $ V "x" :@ V "x"
dead_var :: (Port, Port)
dead_var = (p, lookupInEnv fvs "x")
where (p, fvs) = exprSemantics $ Lam "y" $ V "x"
app_to_fv :: (Port, Port, Port)
app_to_fv = (p, lookupInEnv fvs "x", lookupInEnv fvs "y")
where (p, fvs) = exprSemantics $ V "x" :@ V "y"
app_to_fv_in_lam :: (Port, Port)
app_to_fv_in_lam = (p, lookupInEnv fvs "x")
where (p, fvs) = exprSemantics $ Lam "y" $ V "x" :@ V "y" | batterseapower/context-semantics | Language/ContextSemantics/CallByNeedLambda.hs | bsd-3-clause | 8,490 | 0 | 16 | 2,076 | 2,950 | 1,588 | 1,362 | 135 | 4 |
module Chp82
where
{-- Derived instances --}
{--
We explained that a typeclass is a sort of an interface that defines some behavior. A type can be made an instance of a typeclass if it supports that behavior.
--}
{--
We also mentioned that they're often confused with classes in languages like Java, Python, C++ and the like, which then baffles a lot of people. In those languages, classes are a blueprint from which we then create objects that contain state and can do some actions.
Typeclasses are more like interfaces. We don't make data from typeclasses. Instead, we first make our data type and then we think about what it can act like.
If it can act like something that can be equated, we make it an instance of the Eq typeclass.
--}
{--
Haskell can derive the behavior of our types in these contexts if we use the deriving keyword when making our data type.
--}
data Person = Person { firstName :: String
, lastName :: String
, age :: Int
} deriving (Eq)
{--
When we derive the Eq instance for a type and then try to compare two values of that type with == or /=, Haskell will see if the value constructors match (there's only one value constructor here though) and then it will check if all the data contained inside matches by testing each pair of fields with ==.
There's only one catch though, the types of all the fields also have to be part of the Eq typeclass.
But since both String and Int are, we're OK. Let's test our Eq instance.
--}
mikeD = Person {firstName = "Michael", lastName = "Diamond", age = 43}
{--
mikeD == Person {firstName = "Michael", lastName = "Diamond", age = 43}
true
we can use it as the "a" for all functions that have a class constraint of "Eq a" in their type signature, such as "elem".
The Show and Read typeclasses are for things that can be converted to or from strings, respectively. Like with Eq, if a type's constructors have fields, their type has to be a part of Show or Read if we want to make our type an instance of them.
--}
data Person2 = Person2 { firstName2 :: String
, lastName2 :: String
, age2 :: Int
} deriving (Eq, Show, Read)
{--
`Read` is pretty much the inverse typeclass of `Show`. `Show` is for converting values of our a type to a string, `Read` is for converting strings to values of our type.
Remember though, when we use the `read` function, we have to use an explicit type annotation to tell Haskell which type we want to get as a result.
If we don't make the type we want as a result explicit, Haskell doesn't know which type we want.
--}
res3 = read "Person2 {firstName2 =\"Michael\", lastName2 =\"Diamond\", age2 = 43}" :: Person2
{--
If we use the result of our read later on in a way that Haskell can infer that it should read it as a person, we don't have to use type annotation.
read "Person {firstName =\"Michael\", lastName =\"Diamond\", age = 43}" == mikeD
True
We can also read parameterized types, but we have to fill in the type parameters.
So we can't do:
read "Just 't'" :: Maybe a
but we can do:
read "Just 't'" :: Maybe Char
--}
{--
We can derive instances for the `Ord` type class, which is for types that have values that can be ordered.
If we compare two values of the same type that were made using different constructors, the value which was made with a constructor that's defined first is considered smaller.
data Bool = False | True deriving (Ord)
--}
res4 = True `compare` False
res5 = True > False
{--
In the `Maybe a` data type, the `Nothing` value constructor is specified before the Just value constructor, so a value of `Nothing` is always smaller than a value of Just something, even if that something is minus one billion trillion.
But if we compare two Just values, then it goes to compare what's inside them.
But we can't do something like Just (*3) > Just (*2), because (*3) and (*2) are functions, which aren't instances of Ord.
--}
data Day1 = Monday1 | Tuesday1 | Wednesday1 | Thursday1 | Friday1 | Saturday1 | Sunday1
{--
Because all the value constructors are nullary (take no parameters, i.e. fields), we can make it part of the Enum typeclass. The Enum typeclass is for things that have predecessors and successors.
We can also make it part of the Bounded typeclass, which is for things that have a lowest possible value and highest possible value.
--}
data Day2 = Monday | Tuesday | Wednesday | Thursday | Friday | Saturday | Sunday
deriving (Eq, Ord, Show, Read, Bounded, Enum)
res6 = minBound :: Day2
-- Monday
{--
Type synonyms
Type synonyms don't really do anything per se, they're just about giving some types different names so that they make more sense to someone reading our code and documentation.
Here's how the standard library defines String as a synonym for [Char].
--}
type String2 = [Char]
{--
Type synonyms can also be parameterized. If we want a type that represents an association list type but still want it to be general so it can use any type as the keys and values, we can do this:
--}
type AssocList k v = [(k,v)]
{--
Now, a function that gets the value by a key in an association list can have a type of (Eq k) => k -> AssocList k v -> Maybe v.
AssocList is a type constructor that takes two types and produces a concrete type, like AssocList Int String, for instance.
When I talk about concrete types I mean like fully applied types like Map Int String or if we're dealin' with one of them polymorphic functions, [a] or (Ord a) => Maybe a and stuff.
And like, sometimes me and the boys say that Maybe is a type, but we don't mean that, cause every idiot knows Maybe is a type constructor.
When I apply an extra type to Maybe, like "Maybe String", then I have a concrete type. You know, values can only have types that are concrete types!
--}
{--
Partial applied type constructor
Just like we can partially apply functions to get new functions, we can partially apply type parameters and get new type constructors from them.
Just like we call a function with too few parameters to get back a new function, we can specify a type constructor with too few type parameters and get back a partially applied type constructor.
type IntMap v = Map Int v
or we can do this
type IntMap = Map Int
When you do a qualified import, type constructors also have to be preceeded with a module name. So you'd write type IntMap = Map.Map Int.
Make sure that you really understand the distinction between type constructors and value constructors.
Just because we made a type synonym called `IntMap` or `AssocList` doesn't mean that we can do stuff like:
AssocList [(1,2),(4,5),(7,9)]
All it means is that we can refer to its type by using different names.
We can do:
[(1,2),(3,5),(8,9)] :: AssocList Int Int
which will make the numbers inside assume a type of Int.
Type synonyms (and types generally) can only be used in the type portion of Haskell.
We're in Haskell's type portion whenever we're defining new types (so in data and type declarations) or when we're located after a "::". The "::" is in type declarations or in type annotations.
--}
data LockerState = Taken | Free deriving (Show, Eq)
type Code = String
{--
Simple stuff. We introduce a new data type to represent whether a locker is taken or free and we make a type synonym for the locker code.
--}
| jamesyang124/haskell-playground | src/Chp82.hs | bsd-3-clause | 7,414 | 0 | 8 | 1,555 | 291 | 185 | 106 | 21 | 1 |
module Snap.Snaplet.Config.Tests where
------------------------------------------------------------------------------
import Control.Concurrent
import Control.Concurrent.Async
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import qualified Data.Configurator.Types as C
import Data.Function
import qualified Data.Map as Map
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup
import Data.Monoid hiding ((<>))
#else
import Data.Monoid
#endif
import Data.Typeable
import System.Environment
------------------------------------------------------------------------------
import Snap.Core
import Snap.Http.Server.Config
import Snap.Snaplet
import Snap.Snaplet.Config
import Snap.Snaplet.Heist
import Snap.Snaplet.Test.Common.App
import Snap.Snaplet.Internal.Initializer
import qualified Snap.Test as ST
import Snap.Snaplet.Test
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck
import Test.HUnit hiding (Test)
------------------------------------------------------------------------------
configTests :: Test
configTests = testGroup "Snaplet Config"
[ testProperty "Monoid left identity" monoidLeftIdentity
, testProperty "Monoid right identity" monoidRightIdentity
, testProperty "Monoid associativity" monoidAssociativity
, testCase "Verify Typeable instance" verTypeable
-- , testCase "Config options used" appConfigGetsToConfig
]
newtype ArbAppConfig = ArbAppConfig { unArbAppConfig :: AppConfig }
instance Show ArbAppConfig where
show (ArbAppConfig (AppConfig a)) =
"ArbAppConfig (AppConfig " ++ show a ++ ")"
instance Eq ArbAppConfig where
a == b = ((==) `on` (appEnvironment . unArbAppConfig)) a b
instance Arbitrary ArbAppConfig where
arbitrary = liftM (ArbAppConfig . AppConfig) arbitrary
instance Semigroup ArbAppConfig where
a <> b = ArbAppConfig $ ((<>) `on` unArbAppConfig) a b
instance Monoid ArbAppConfig where
mempty = ArbAppConfig mempty
#if !MIN_VERSION_base(4,11,0)
mappend = (<>)
#endif
monoidLeftIdentity :: ArbAppConfig -> Bool
monoidLeftIdentity a = mempty <> a == a
monoidRightIdentity :: ArbAppConfig -> Bool
monoidRightIdentity a = a <> mempty == a
monoidAssociativity :: ArbAppConfig -> ArbAppConfig -> ArbAppConfig
-> Bool
monoidAssociativity a b c = (a <> b) <> c == a <> (b <> c)
------------------------------------------------------------------------------
verTypeable :: Assertion
verTypeable =
assertEqual "Unexpected Typeable behavior"
#if MIN_VERSION_base(4,7,0)
"AppConfig"
#else
"Snap.Snaplet.Config.AppConfig"
#endif
(show . typeOf $ (undefined :: AppConfig))
------------------------------------------------------------------------------
appConfigGetsToConfig :: Assertion
appConfigGetsToConfig = do
opts <- completeConfig =<<
commandLineAppConfig defaultConfig :: IO (Config Snap AppConfig)
a <- async . withArgs ["-p", "8001","-e","otherEnv"] $
serveSnaplet opts appInit
threadDelay 500000
cancel a
b <- async . withArgs ["--environment","devel"] $ serveSnaplet defaultConfig appInit
threadDelay 500000
cancel b
--TODO - Don't just run the server to touch the config code. Check some values
| snapframework/snap | test/suite/Snap/Snaplet/Config/Tests.hs | bsd-3-clause | 3,308 | 0 | 11 | 519 | 684 | 390 | 294 | 68 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module NumberSix.Handlers.TryRuby
( ruby
, handler
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Monad (mzero)
import Control.Monad.Trans (liftIO)
import Data.Aeson (FromJSON, Value (..), parseJSON, (.:))
import Data.Text (Text)
import qualified Data.Text.Encoding as T
import qualified Network.HTTP.Conduit as HC
--------------------------------------------------------------------------------
import NumberSix.Bang
import NumberSix.Irc
import NumberSix.Util
import NumberSix.Util.Error
import NumberSix.Util.Http
--------------------------------------------------------------------------------
data Result
= Success Text
| Error Text
deriving (Show)
--------------------------------------------------------------------------------
instance FromJSON Result where
parseJSON (Object o) = do
success <- o .: "success"
if success then Success <$> o .: "output" else Error <$> o .: "result"
parseJSON _ = mzero
--------------------------------------------------------------------------------
ruby :: Text -> IO Text
ruby cmd = do
bs <- http "http://tryruby.org/levels/1/challenges/0" (setPut . setCmd)
case parseJsonEither bs of
Right (Success x) -> return x
Right (Error x) -> return x
Left _ -> randomError
where
setCmd :: Monad m => HC.Request m -> HC.Request m
setCmd = HC.urlEncodedBody [("cmd", T.encodeUtf8 cmd)]
setPut rq = rq {HC.method = "PUT"}
--------------------------------------------------------------------------------
handler :: UninitializedHandler
handler = makeBangHandler "TryRuby" ["@","!ruby"] $ liftIO . ruby
| itkovian/number-six | src/NumberSix/Handlers/TryRuby.hs | bsd-3-clause | 1,999 | 0 | 12 | 433 | 418 | 234 | 184 | 37 | 3 |
module Main where
import Network
import System.IO (hPutStrLn, hClose, Handle)
import Control.Concurrent (forkIO)
main :: IO ()
main = startServer >>= handleConnections qotdService
startServer :: IO Socket
startServer = listenOn $ PortNumber 17
handleConnections :: (Handle -> IO ()) -> Socket -> IO ()
handleConnections handler socket = do
putStrLn "Received connection..."
(handle, _, _) <- accept socket
_ <- forkIO $ handler handle >> hClose handle
handleConnections handler socket
qotdService :: Handle -> IO ()
qotdService file = hPutStrLn file "Hello world"
| anler/tcp-quotes | app/Main.hs | bsd-3-clause | 581 | 0 | 10 | 100 | 196 | 99 | 97 | 16 | 1 |
--
-- xmonad example config file.
--
-- A template showing all available configuration hooks,
-- and how to override the defaults in your own xmonad.hs conf file.
--
-- Normally, you'd only override those defaults you care about.
--
import XMonad
import Data.Monoid
import System.Exit
import qualified XMonad.StackSet as W
import qualified Data.Map as M
-- The preferred terminal program, which is used in a binding below and by
-- certain contrib modules.
--
myTerminal = "xterm"
-- Whether focus follows the mouse pointer.
myFocusFollowsMouse :: Bool
myFocusFollowsMouse = True
-- Whether clicking on a window to focus also passes the click to the window
myClickJustFocuses :: Bool
myClickJustFocuses = False
-- Width of the window border in pixels.
--
myBorderWidth = 1
-- modMask lets you specify which modkey you want to use. The default
-- is mod1Mask ("left alt"). You may also consider using mod3Mask
-- ("right alt"), which does not conflict with emacs keybindings. The
-- "windows key" is usually mod4Mask.
--
myModMask = mod1Mask
-- The default number of workspaces (virtual screens) and their names.
-- By default we use numeric strings, but any string may be used as a
-- workspace name. The number of workspaces is determined by the length
-- of this list.
--
-- A tagging example:
--
-- > workspaces = ["web", "irc", "code" ] ++ map show [4..9]
--
myWorkspaces = ["1","2","3","4","5","6","7","8","9"]
-- Border colors for unfocused and focused windows, respectively.
--
myNormalBorderColor = "#dddddd"
myFocusedBorderColor = "#ff0000"
------------------------------------------------------------------------
-- Key bindings. Add, modify or remove key bindings here.
--
myKeys conf@(XConfig {XMonad.modMask = modm}) = M.fromList $
-- launch a terminal
[ ((modm .|. shiftMask, xK_Return), spawn $ XMonad.terminal conf)
-- launch dmenu
, ((modm, xK_p ), spawn "dmenu_run")
-- launch gmrun
, ((modm .|. shiftMask, xK_p ), spawn "gmrun")
-- close focused window
, ((modm .|. shiftMask, xK_c ), kill)
-- Rotate through the available layout algorithms
, ((modm, xK_space ), sendMessage NextLayout)
-- Reset the layouts on the current workspace to default
, ((modm .|. shiftMask, xK_space ), setLayout $ XMonad.layoutHook conf)
-- Resize viewed windows to the correct size
, ((modm, xK_n ), refresh)
-- Move focus to the next window
, ((modm, xK_Tab ), windows W.focusDown)
-- Move focus to the next window
, ((modm, xK_j ), windows W.focusDown)
-- Move focus to the previous window
, ((modm, xK_k ), windows W.focusUp )
-- Move focus to the master window
, ((modm, xK_m ), windows W.focusMaster )
-- Swap the focused window and the master window
, ((modm, xK_Return), windows W.swapMaster)
-- Swap the focused window with the next window
, ((modm .|. shiftMask, xK_j ), windows W.swapDown )
-- Swap the focused window with the previous window
, ((modm .|. shiftMask, xK_k ), windows W.swapUp )
-- Shrink the master area
, ((modm, xK_h ), sendMessage Shrink)
-- Expand the master area
, ((modm, xK_l ), sendMessage Expand)
-- Push window back into tiling
, ((modm, xK_t ), withFocused $ windows . W.sink)
-- Increment the number of windows in the master area
, ((modm , xK_comma ), sendMessage (IncMasterN 1))
-- Deincrement the number of windows in the master area
, ((modm , xK_period), sendMessage (IncMasterN (-1)))
-- Toggle the status bar gap
-- Use this binding with avoidStruts from Hooks.ManageDocks.
-- See also the statusBar function from Hooks.DynamicLog.
--
-- , ((modm , xK_b ), sendMessage ToggleStruts)
-- Quit xmonad
, ((modm .|. shiftMask, xK_q ), io (exitWith ExitSuccess))
-- Restart xmonad
, ((modm , xK_q ), spawn "xmonad --recompile; xmonad --restart")
-- Run xmessage with a summary of the default keybindings (useful for beginners)
, ((modMask .|. shiftMask, xK_slash ), spawn ("echo \"" ++ help ++ "\" | xmessage -file -"))
]
++
--
-- mod-[1..9], Switch to workspace N
-- mod-shift-[1..9], Move client to workspace N
--
[((m .|. modm, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf) [xK_1 .. xK_9]
, (f, m) <- [(W.greedyView, 0), (W.shift, shiftMask)]]
++
--
-- mod-{w,e,r}, Switch to physical/Xinerama screens 1, 2, or 3
-- mod-shift-{w,e,r}, Move client to screen 1, 2, or 3
--
[((m .|. modm, key), screenWorkspace sc >>= flip whenJust (windows . f))
| (key, sc) <- zip [xK_w, xK_e, xK_r] [0..]
, (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
------------------------------------------------------------------------
-- Mouse bindings: default actions bound to mouse events
--
myMouseBindings (XConfig {XMonad.modMask = modm}) = M.fromList $
-- mod-button1, Set the window to floating mode and move by dragging
[ ((modm, button1), (\w -> focus w >> mouseMoveWindow w
>> windows W.shiftMaster))
-- mod-button2, Raise the window to the top of the stack
, ((modm, button2), (\w -> focus w >> windows W.shiftMaster))
-- mod-button3, Set the window to floating mode and resize by dragging
, ((modm, button3), (\w -> focus w >> mouseResizeWindow w
>> windows W.shiftMaster))
-- you may also bind events to the mouse scroll wheel (button4 and button5)
]
------------------------------------------------------------------------
-- Layouts:
-- You can specify and transform your layouts by modifying these values.
-- If you change layout bindings be sure to use 'mod-shift-space' after
-- restarting (with 'mod-q') to reset your layout state to the new
-- defaults, as xmonad preserves your old layout settings by default.
--
-- The available layouts. Note that each layout is separated by |||,
-- which denotes layout choice.
--
myLayout = tiled ||| Mirror tiled ||| Full
where
-- default tiling algorithm partitions the screen into two panes
tiled = Tall nmaster delta ratio
-- The default number of windows in the master pane
nmaster = 1
-- Default proportion of screen occupied by master pane
ratio = 1/2
-- Percent of screen to increment by when resizing panes
delta = 3/100
------------------------------------------------------------------------
-- Window rules:
-- Execute arbitrary actions and WindowSet manipulations when managing
-- a new window. You can use this to, for example, always float a
-- particular program, or have a client always appear on a particular
-- workspace.
--
-- To find the property name associated with a program, use
-- > xprop | grep WM_CLASS
-- and click on the client you're interested in.
--
-- To match on the WM_NAME, you can use 'title' in the same way that
-- 'className' and 'resource' are used below.
--
myManageHook = composeAll
[ className =? "MPlayer" --> doFloat
, className =? "Gimp" --> doFloat
, resource =? "desktop_window" --> doIgnore
, resource =? "kdesktop" --> doIgnore ]
------------------------------------------------------------------------
-- Event handling
-- * EwmhDesktops users should change this to ewmhDesktopsEventHook
--
-- Defines a custom handler function for X Events. The function should
-- return (All True) if the default handler is to be run afterwards. To
-- combine event hooks use mappend or mconcat from Data.Monoid.
--
myEventHook = mempty
------------------------------------------------------------------------
-- Status bars and logging
-- Perform an arbitrary action on each internal state change or X event.
-- See the 'XMonad.Hooks.DynamicLog' extension for examples.
--
myLogHook = return ()
------------------------------------------------------------------------
-- Startup hook
-- Perform an arbitrary action each time xmonad starts or is restarted
-- with mod-q. Used by, e.g., XMonad.Layout.PerWorkspace to initialize
-- per-workspace layout choices.
--
-- By default, do nothing.
myStartupHook = return ()
------------------------------------------------------------------------
-- Now run xmonad with all the defaults we set up.
-- Run xmonad with the settings you specify. No need to modify this.
--
main = xmonad defaults
-- A structure containing your configuration settings, overriding
-- fields in the default config. Any you don't override, will
-- use the defaults defined in xmonad/XMonad/Config.hs
--
-- No need to modify this.
--
defaults = defaultConfig {
-- simple stuff
terminal = myTerminal,
focusFollowsMouse = myFocusFollowsMouse,
clickJustFocuses = myClickJustFocuses,
borderWidth = myBorderWidth,
modMask = myModMask,
workspaces = myWorkspaces,
normalBorderColor = myNormalBorderColor,
focusedBorderColor = myFocusedBorderColor,
-- key bindings
keys = myKeys,
mouseBindings = myMouseBindings,
-- hooks, layouts
layoutHook = myLayout,
manageHook = myManageHook,
handleEventHook = myEventHook,
logHook = myLogHook,
startupHook = myStartupHook
}
| markus1189/xmonad-710 | man/xmonad.hs | bsd-3-clause | 9,677 | 5 | 12 | 2,399 | 1,360 | 851 | 509 | 82 | 1 |
import Distribution.Simple
main = defaultMain
| solidsnack/maccatcher | Setup.hs | bsd-3-clause | 74 | 0 | 4 | 33 | 11 | 6 | 5 | 2 | 1 |
{-|
This module provides the /Remove Weak Suffixes/ processor.
Let @Wl#@ be forward closed, then
@
|- <S# / W# + W, Q, T#> :f
-------------------------------------
|- <S# / W# + Wl# + W, Q, T#> :f
@
-}
module Tct.Trs.Processor.DP.DPGraph.RemoveWeakSuffixes
( removeWeakSuffixesDeclaration
, removeWeakSuffixes
) where
import qualified Data.Set as S
import qualified Data.Rewriting.Rule as R (Rule)
import qualified Tct.Core.Common.Pretty as PP
import qualified Tct.Core.Common.Xml as Xml
import qualified Tct.Core.Data as T
import Tct.Common.ProofCombinators
import Tct.Trs.Data
import qualified Tct.Trs.Data.Rules as RS
import Tct.Trs.Data.DependencyGraph
import qualified Tct.Trs.Data.Problem as Prob
data RemoveWeakSuffixes = RemoveWeakSuffixes deriving Show
data RemoveWeakSuffixesProof
= RemoveWeakSuffixesProof
{ wdg_ :: DG F V
, removable_ :: [(NodeId, R.Rule F V)] }
| RemoveWeakSuffixesFail
deriving Show
instance T.Processor RemoveWeakSuffixes where
type ProofObject RemoveWeakSuffixes = ApplicationProof RemoveWeakSuffixesProof
type In RemoveWeakSuffixes = Trs
type Out RemoveWeakSuffixes = Trs
-- an scc in the congruence graph is considered weak if all rules in the scc are weak
-- compute maximal weak suffix bottom-up
execute RemoveWeakSuffixes prob =
maybe remtail (\s -> T.abortWith (Inapplicable s :: ApplicationProof RemoveWeakSuffixesProof)) (Prob.isDTProblem' prob)
where
remtail
| null initials = T.abortWith (Applicable RemoveWeakSuffixesFail)
| otherwise = T.succeedWith1 (Applicable proof) T.fromId nprob
where
onlyWeaks = not . any (isStrict . snd) . theSCC
computeTails [] lfs = lfs
computeTails (n:ns) lfs
| n `S.member` lfs = computeTails ns lfs
| otherwise = computeTails (ns++preds) lfs'
where
(lpreds, _, cn, lsucs) = context cdg n
sucs = map snd lsucs
preds = map snd lpreds
lfs' = if S.fromList sucs `S.isSubsetOf` lfs && onlyWeaks cn
then S.insert n lfs
else lfs
-- congruence graph
cdg = Prob.congruenceGraph prob
initials = [n | (n,cn) <- withNodeLabels' cdg (leafs cdg), onlyWeaks cn]
cdgTail = S.toList $ computeTails initials S.empty
-- dependency graph
wdg = Prob.dependencyGraph prob
wdgLabTail = fmap theRule `fmap` concatMap (theSCC . lookupNodeLabel' cdg) cdgTail
(wdgTail, rs) = unzip wdgLabTail
nprob = prob
{ Prob.weakDPs = Prob.weakDPs prob `RS.difference` RS.fromList rs
, Prob.dpGraph = DependencyGraph
{ dependencyGraph = wdg `removeNodes` wdgTail
, congruenceGraph = cdg `removeNodes` cdgTail }}
proof = RemoveWeakSuffixesProof { wdg_ = wdg, removable_ = wdgLabTail }
--- * instances ------------------------------------------------------------------------------------------------------
removeWeakSuffixesDeclaration :: T.Declaration ('[] T.:-> TrsStrategy)
removeWeakSuffixesDeclaration = T.declare "removeWeakSuffixes" desc () (T.Apply RemoveWeakSuffixes) where
desc =
[ "Removes trailing paths that do not need to be oriented."
, "Only applicable if the strict component is empty."]
-- | Removes trailing weak paths.
-- A dependency pair is on a trailing weak path if it is from the weak components and all sucessors in the dependency
-- graph are on trailing weak paths.
--
-- Only applicable on DP-problems as obtained by 'dependencyPairs' or 'dependencyTuples'. Also
-- not applicable when @strictTrs prob \= RS.empty@.
removeWeakSuffixes :: TrsStrategy
removeWeakSuffixes = T.declFun removeWeakSuffixesDeclaration
--- * proofdata ------------------------------------------------------------------------------------------------------
instance PP.Pretty RemoveWeakSuffixesProof where
pretty RemoveWeakSuffixesFail = PP.text "The dependency graph contains no sub-graph of weak DPs closed under successors."
pretty p@RemoveWeakSuffixesProof{} = PP.vcat
[ PP.text "Consider the dependency graph"
, PP.indent 2 $ PP.pretty (wdg_ p)
, PP.text "The following weak DPs constitute a sub-graph of the DG that is closed under successors. The DPs are removed."
, PP.indent 2 $ PP.listing' (removable_ p) ]
instance Xml.Xml RemoveWeakSuffixesProof where
toXml RemoveWeakSuffixesFail = Xml.elt "removeWeakSuffixes" []
toXml p@RemoveWeakSuffixesProof{} = Xml.elt "removeWeakSuffixes"
[ Xml.toXml (wdg_ p)
, Xml.elt "removeWeakSuffix" $ map Xml.toXml (removable_ p) ]
| ComputationWithBoundedResources/tct-trs | src/Tct/Trs/Processor/DP/DPGraph/RemoveWeakSuffixes.hs | bsd-3-clause | 4,815 | 0 | 16 | 1,157 | 979 | 540 | 439 | -1 | -1 |
{-# LANGUAGE UnicodeSyntax #-}
import Prelude.Unicode
data Tree a = Empty | Branch a (Tree a) (Tree a)
deriving (Show, Eq)
tree4 = Branch 1
(Branch 2 Empty (Branch 4 Empty Empty))
(Branch 2 Empty Empty)
countLeaves ∷ Tree a → Int
countLeaves Empty = 0
countLeaves (Branch _ Empty Empty) = 1
countLeaves (Branch _ l r) = countLeaves l + countLeaves r
leaves ∷ Tree a → [a]
leaves Empty = []
leaves (Branch x Empty Empty) = [x]
leaves (Branch x l r) = leaves l ++ leaves r
| m00nlight/99-problems | haskell/p-61.hs | bsd-3-clause | 514 | 1 | 9 | 127 | 232 | 116 | 116 | 15 | 1 |
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE TemplateHaskell #-}
module Types.Posts
( ClientMessage
, newClientMessage
, cmDate
, cmType
, cmText
, ClientMessageType(..)
, Attachment
, mkAttachment
, attachmentName
, attachmentFileId
, attachmentURL
, ClientPostType(..)
, ClientPost
, toClientPost
, cpUserOverride
, cpMarkdownSource
, cpUser
, cpText
, cpType
, cpReactions
, cpPending
, cpOriginalPost
, cpInReplyToPost
, cpDate
, cpChannelId
, cpAttachments
, cpDeleted
, cpPostId
, unEmote
, postIsLeave
, postIsJoin
, postIsTopicChange
, postIsEmote
, getBlocks
)
where
import Prelude ()
import Prelude.MH
import Cheapskate ( Blocks )
import qualified Cheapskate as C
import qualified Data.Map.Strict as Map
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import Data.Time.Clock ( getCurrentTime )
import Lens.Micro.Platform ( makeLenses )
import Network.Mattermost.Lenses
import Network.Mattermost.Types
import Types.Common
-- * Client Messages
-- | A 'ClientMessage' is a message given to us by our client,
-- like help text or an error message.
data ClientMessage = ClientMessage
{ _cmText :: Text
, _cmDate :: ServerTime
, _cmType :: ClientMessageType
} deriving (Eq, Show)
-- | Create a new 'ClientMessage' value. This is a message generated
-- by this Matterhorn client and not by (or visible to) the Server.
-- These should be visible, but not necessarily integrated into any
-- special position in the output stream (i.e., they should generally
-- appear at the bottom of the messages display, but subsequent
-- messages should follow them), so this is a special place where
-- there is an assumed approximation of equality between local time
-- and server time.
newClientMessage :: (MonadIO m) => ClientMessageType -> Text -> m ClientMessage
newClientMessage ty msg = do
now <- liftIO getCurrentTime
return (ClientMessage msg (ServerTime now) ty)
-- | We format 'ClientMessage' values differently depending on
-- their 'ClientMessageType'
data ClientMessageType =
Informative
| Error
| DateTransition
| NewMessagesTransition
| UnknownGap -- ^ marks region where server may have messages unknown locally
deriving (Eq, Show)
-- ** 'ClientMessage' Lenses
makeLenses ''ClientMessage
-- * Mattermost Posts
-- | A 'ClientPost' is a temporary internal representation of
-- the Mattermost 'Post' type, with unnecessary information
-- removed and some preprocessing done.
data ClientPost = ClientPost
{ _cpText :: Blocks
, _cpMarkdownSource :: Text
, _cpUser :: Maybe UserId
, _cpUserOverride :: Maybe Text
, _cpDate :: ServerTime
, _cpType :: ClientPostType
, _cpPending :: Bool
, _cpDeleted :: Bool
, _cpAttachments :: Seq Attachment
, _cpInReplyToPost :: Maybe PostId
, _cpPostId :: PostId
, _cpChannelId :: ChannelId
, _cpReactions :: Map.Map Text Int
, _cpOriginalPost :: Post
} deriving (Show)
-- | An attachment has a very long URL associated, as well as
-- an actual file URL
data Attachment = Attachment
{ _attachmentName :: Text
, _attachmentURL :: Text
, _attachmentFileId :: FileId
} deriving (Eq, Show)
mkAttachment :: Text -> Text -> FileId -> Attachment
mkAttachment = Attachment
-- | A Mattermost 'Post' value can represent either a normal
-- chat message or one of several special events.
data ClientPostType =
NormalPost
| Emote
| Join
| Leave
| TopicChange
deriving (Eq, Show)
-- ** Creating 'ClientPost' Values
-- | Parse text as Markdown and extract the AST
getBlocks :: Text -> Blocks
getBlocks s = bs where C.Doc _ bs = C.markdown C.def s
-- | Determine the internal 'PostType' based on a 'Post'
postClientPostType :: Post -> ClientPostType
postClientPostType cp =
if | postIsEmote cp -> Emote
| postIsJoin cp -> Join
| postIsLeave cp -> Leave
| postIsTopicChange cp -> TopicChange
| otherwise -> NormalPost
-- | Find out whether a 'Post' represents a topic change
postIsTopicChange :: Post -> Bool
postIsTopicChange p = postType p == PostTypeHeaderChange
-- | Find out whether a 'Post' is from a @/me@ command
postIsEmote :: Post -> Bool
postIsEmote p =
and [ p^.postPropsL.postPropsOverrideIconUrlL == Just (""::Text)
, ("*" `T.isPrefixOf` (sanitizeUserText $ postMessage p))
, ("*" `T.isSuffixOf` (sanitizeUserText $ postMessage p))
]
-- | Find out whether a 'Post' is a user joining a channel
postIsJoin :: Post -> Bool
postIsJoin p =
p^.postTypeL == PostTypeJoinChannel
-- | Find out whether a 'Post' is a user leaving a channel
postIsLeave :: Post -> Bool
postIsLeave p =
p^.postTypeL == PostTypeLeaveChannel
-- | Undo the automatic formatting of posts generated by @/me@-commands
unEmote :: ClientPostType -> Text -> Text
unEmote Emote t = if "*" `T.isPrefixOf` t && "*" `T.isSuffixOf` t
then T.init $ T.tail t
else t
unEmote _ t = t
-- | Convert a Mattermost 'Post' to a 'ClientPost', passing in a
-- 'ParentId' if it has a known one.
toClientPost :: Post -> Maybe PostId -> ClientPost
toClientPost p parentId =
let src = unEmote (postClientPostType p) $ sanitizeUserText $ postMessage p
in ClientPost { _cpText = getBlocks src <> getAttachmentText p
, _cpMarkdownSource = src
, _cpUser = postUserId p
, _cpUserOverride = p^.postPropsL.postPropsOverrideUsernameL
, _cpDate = postCreateAt p
, _cpType = postClientPostType p
, _cpPending = False
, _cpDeleted = False
, _cpAttachments = Seq.empty
, _cpInReplyToPost = parentId
, _cpPostId = p^.postIdL
, _cpChannelId = p^.postChannelIdL
, _cpReactions = Map.empty
, _cpOriginalPost = p
}
-- | Right now, instead of treating 'attachment' properties specially, we're
-- just going to roll them directly into the message text
getAttachmentText :: Post -> Blocks
getAttachmentText p =
case p^.postPropsL.postPropsAttachmentsL of
Nothing -> Seq.empty
Just attachments ->
fmap (C.Blockquote . render) attachments
where render att = getBlocks (att^.ppaTextL) <> getBlocks (att^.ppaFallbackL)
-- ** 'ClientPost' Lenses
makeLenses ''Attachment
makeLenses ''ClientPost
| aisamanra/matterhorn | src/Types/Posts.hs | bsd-3-clause | 6,677 | 0 | 13 | 1,723 | 1,236 | 711 | 525 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Numeral.HU.Corpus
( corpus ) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Numeral.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale HU Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (NumeralValue 0)
[ "0"
, "nulla"
, "zéró"
]
, examples (NumeralValue 1)
[ "1"
, "egy"
]
, examples (NumeralValue 2)
[ "kettő"
]
, examples (NumeralValue 3)
[ "három"
]
, examples (NumeralValue 4)
[ "négy"
]
, examples (NumeralValue 5)
[ "öt"
]
, examples (NumeralValue 6)
[ "hat"
]
, examples (NumeralValue 7)
[ "hét"
]
, examples (NumeralValue 8)
[ "nyolc"
]
, examples (NumeralValue 9)
[ "kilenc"
]
, examples (NumeralValue 11)
[ "tizenegy"
]
, examples (NumeralValue 15)
[ "tizenöt"
]
, examples (NumeralValue 17)
[ "tizenhét"
]
, examples (NumeralValue 20)
[ "20"
, "húsz"
]
, examples (NumeralValue 22)
[ "huszonkettő"
]
, examples (NumeralValue 24)
[ "24"
, "huszonnégy"
]
, examples (NumeralValue 26)
[ "huszonhat"
]
, examples (NumeralValue 28)
[ "huszonnyolc"
]
, examples (NumeralValue 10)
[ "tíz"
]
, examples (NumeralValue 20)
[ "húsz"
]
, examples (NumeralValue 50)
[ "ötven"
]
, examples (NumeralValue 34)
[ "harmincnégy"
]
]
| facebookincubator/duckling | Duckling/Numeral/HU/Corpus.hs | bsd-3-clause | 2,236 | 0 | 9 | 933 | 486 | 270 | 216 | 62 | 1 |
module AI
( search
, module AI.Types
) where
import Types
import AI.Types
import Text.Printf
import qualified AI.API.My as My
import qualified AI.API.Tzaar as Tzaar
import qualified AI.API.GameTree as GameTree
search :: Board b => Algorithm -> Implementation -> Evaluation -> Position b -> Depth -> (PV, Score)
search Minimax My = My.minimax
search AlphaBeta My = My.alphabeta
search Negascout My = My.negascout
search Minimax GameTree = GameTree.minimax
search AlphaBeta GameTree = GameTree.alphabeta
search Negascout GameTree = GameTree.negascout
search Minimax Tzaar = Tzaar.minimax
search AlphaBeta Tzaar = Tzaar.alphabeta
search Negascout Tzaar = Tzaar.negascout
search a i = error $ printf "Unsupported algorithm-implementation pair: (%s, %s)"
(show a) (show i)
| sphynx/hamisado | AI.hs | bsd-3-clause | 795 | 0 | 11 | 136 | 237 | 130 | 107 | 21 | 1 |
{-# LANGUAGE BangPatterns #-}
-- |
-- Module: Data.Aeson.Encoding.Builder
-- Copyright: (c) 2011 MailRank, Inc.
-- (c) 2013 Simon Meier <iridcode@gmail.com>
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <bos@serpentine.com>
-- Stability: experimental
-- Portability: portable
--
-- Efficiently serialize a JSON value using the UTF-8 encoding.
module Data.Aeson.Encoding.Builder
(
encodeToBuilder
, null_
, bool
, array
, emptyArray_
, emptyObject_
, object
, text
, string
, unquoted
, quote
, scientific
, day
, localTime
, utcTime
, timeOfDay
, zonedTime
, ascii2
, ascii4
, ascii5
) where
import Prelude ()
import Prelude.Compat
import Data.Aeson.Internal.Time
import Data.Aeson.Types.Internal (Value (..))
import Data.ByteString.Builder as B
import Data.ByteString.Builder.Prim as BP
import Data.ByteString.Builder.Scientific (scientificBuilder)
import Data.Char (chr, ord)
import Data.Monoid ((<>))
import Data.Scientific (Scientific, base10Exponent, coefficient)
import Data.Text.Encoding (encodeUtf8BuilderEscaped)
import Data.Time (UTCTime(..))
import Data.Time.Calendar (Day(..), toGregorian)
import Data.Time.LocalTime
import Data.Word (Word8)
import qualified Data.HashMap.Strict as HMS
import qualified Data.Text as T
import qualified Data.Vector as V
-- | Encode a JSON value to a "Data.ByteString" 'B.Builder'.
--
-- Use this function if you are encoding over the wire, or need to
-- prepend or append further bytes to the encoded JSON value.
encodeToBuilder :: Value -> Builder
encodeToBuilder Null = null_
encodeToBuilder (Bool b) = bool b
encodeToBuilder (Number n) = scientific n
encodeToBuilder (String s) = text s
encodeToBuilder (Array v) = array v
encodeToBuilder (Object m) = object m
-- | Encode a JSON null.
null_ :: Builder
null_ = BP.primBounded (ascii4 ('n',('u',('l','l')))) ()
-- | Encode a JSON boolean.
bool :: Bool -> Builder
bool = BP.primBounded (BP.condB id (ascii4 ('t',('r',('u','e'))))
(ascii5 ('f',('a',('l',('s','e'))))))
-- | Encode a JSON array.
array :: V.Vector Value -> Builder
array v
| V.null v = emptyArray_
| otherwise = B.char8 '[' <>
encodeToBuilder (V.unsafeHead v) <>
V.foldr withComma (B.char8 ']') (V.unsafeTail v)
where
withComma a z = B.char8 ',' <> encodeToBuilder a <> z
-- Encode a JSON object.
object :: HMS.HashMap T.Text Value -> Builder
object m = case HMS.toList m of
(x:xs) -> B.char8 '{' <> one x <> foldr withComma (B.char8 '}') xs
_ -> emptyObject_
where
withComma a z = B.char8 ',' <> one a <> z
one (k,v) = text k <> B.char8 ':' <> encodeToBuilder v
-- | Encode a JSON string.
text :: T.Text -> Builder
text t = B.char8 '"' <> unquoted t <> B.char8 '"'
-- | Encode a JSON string, without enclosing quotes.
unquoted :: T.Text -> Builder
unquoted = encodeUtf8BuilderEscaped escapeAscii
-- | Add quotes surrounding a builder
quote :: Builder -> Builder
quote b = B.char8 '"' <> b <> B.char8 '"'
-- | Encode a JSON string.
string :: String -> Builder
string t = B.char8 '"' <> BP.primMapListBounded go t <> B.char8 '"'
where go = BP.condB (> '\x7f') BP.charUtf8 (c2w >$< escapeAscii)
escapeAscii :: BP.BoundedPrim Word8
escapeAscii =
BP.condB (== c2w '\\' ) (ascii2 ('\\','\\')) $
BP.condB (== c2w '\"' ) (ascii2 ('\\','"' )) $
BP.condB (>= c2w '\x20') (BP.liftFixedToBounded BP.word8) $
BP.condB (== c2w '\n' ) (ascii2 ('\\','n' )) $
BP.condB (== c2w '\r' ) (ascii2 ('\\','r' )) $
BP.condB (== c2w '\t' ) (ascii2 ('\\','t' )) $
BP.liftFixedToBounded hexEscape -- fallback for chars < 0x20
where
hexEscape :: BP.FixedPrim Word8
hexEscape = (\c -> ('\\', ('u', fromIntegral c))) BP.>$<
BP.char8 >*< BP.char8 >*< BP.word16HexFixed
{-# INLINE escapeAscii #-}
c2w :: Char -> Word8
c2w c = fromIntegral (ord c)
-- | Encode a JSON number.
scientific :: Scientific -> Builder
scientific s
| e < 0 = scientificBuilder s
| otherwise = B.integerDec (coefficient s * 10 ^ e)
where
e = base10Exponent s
emptyArray_ :: Builder
emptyArray_ = BP.primBounded (ascii2 ('[',']')) ()
emptyObject_ :: Builder
emptyObject_ = BP.primBounded (ascii2 ('{','}')) ()
ascii2 :: (Char, Char) -> BP.BoundedPrim a
ascii2 cs = BP.liftFixedToBounded $ const cs BP.>$< BP.char7 >*< BP.char7
{-# INLINE ascii2 #-}
ascii4 :: (Char, (Char, (Char, Char))) -> BP.BoundedPrim a
ascii4 cs = BP.liftFixedToBounded $ const cs >$<
BP.char7 >*< BP.char7 >*< BP.char7 >*< BP.char7
{-# INLINE ascii4 #-}
ascii5 :: (Char, (Char, (Char, (Char, Char)))) -> BP.BoundedPrim a
ascii5 cs = BP.liftFixedToBounded $ const cs >$<
BP.char7 >*< BP.char7 >*< BP.char7 >*< BP.char7 >*< BP.char7
{-# INLINE ascii5 #-}
ascii6 :: (Char, (Char, (Char, (Char, (Char, Char))))) -> BP.BoundedPrim a
ascii6 cs = BP.liftFixedToBounded $ const cs >$<
BP.char7 >*< BP.char7 >*< BP.char7 >*< BP.char7 >*< BP.char7 >*< BP.char7
{-# INLINE ascii6 #-}
ascii8 :: (Char, (Char, (Char, (Char, (Char, (Char, (Char, Char)))))))
-> BP.BoundedPrim a
ascii8 cs = BP.liftFixedToBounded $ const cs >$<
BP.char7 >*< BP.char7 >*< BP.char7 >*< BP.char7 >*<
BP.char7 >*< BP.char7 >*< BP.char7 >*< BP.char7
{-# INLINE ascii8 #-}
day :: Day -> Builder
day dd = encodeYear yr <>
BP.primBounded (ascii6 ('-',(mh,(ml,('-',(dh,dl)))))) ()
where (yr,m,d) = toGregorian dd
!(T mh ml) = twoDigits m
!(T dh dl) = twoDigits d
encodeYear y
| y >= 1000 = B.integerDec y
| y >= 0 = BP.primBounded (ascii4 (padYear y)) ()
| y >= -999 = BP.primBounded (ascii5 ('-',padYear (- y))) ()
| otherwise = B.integerDec y
padYear y =
let (ab,c) = fromIntegral y `quotRem` 10
(a,b) = ab `quotRem` 10
in ('0',(digit a,(digit b,digit c)))
{-# INLINE day #-}
timeOfDay :: TimeOfDay -> Builder
timeOfDay t = timeOfDay64 (toTimeOfDay64 t)
{-# INLINE timeOfDay #-}
timeOfDay64 :: TimeOfDay64 -> Builder
timeOfDay64 (TOD h m s)
| frac == 0 = hhmmss -- omit subseconds if 0
| otherwise = hhmmss <> BP.primBounded showFrac frac
where
hhmmss = BP.primBounded (ascii8 (hh,(hl,(':',(mh,(ml,(':',(sh,sl)))))))) ()
!(T hh hl) = twoDigits h
!(T mh ml) = twoDigits m
!(T sh sl) = twoDigits (fromIntegral real)
(real,frac) = s `quotRem` pico
showFrac = (\x -> ('.', x)) >$< (BP.liftFixedToBounded BP.char7 >*< trunc12)
trunc12 = (`quotRem` micro) >$<
BP.condB (\(_,y) -> y == 0) (fst >$< trunc6) (digits6 >*< trunc6)
digits6 = ((`quotRem` milli) . fromIntegral) >$< (digits3 >*< digits3)
trunc6 = ((`quotRem` milli) . fromIntegral) >$<
BP.condB (\(_,y) -> y == 0) (fst >$< trunc3) (digits3 >*< trunc3)
digits3 = (`quotRem` 10) >$< (digits2 >*< digits1)
digits2 = (`quotRem` 10) >$< (digits1 >*< digits1)
digits1 = BP.liftFixedToBounded (digit >$< BP.char7)
trunc3 = BP.condB (== 0) BP.emptyB $
(`quotRem` 100) >$< (digits1 >*< trunc2)
trunc2 = BP.condB (== 0) BP.emptyB $
(`quotRem` 10) >$< (digits1 >*< trunc1)
trunc1 = BP.condB (== 0) BP.emptyB digits1
pico = 1000000000000 -- number of picoseconds in 1 second
micro = 1000000 -- number of microseconds in 1 second
milli = 1000 -- number of milliseconds in 1 second
timeZone :: TimeZone -> Builder
timeZone (TimeZone off _ _)
| off == 0 = B.char7 'Z'
| otherwise = BP.primBounded (ascii6 (s,(hh,(hl,(':',(mh,ml)))))) ()
where !s = if off < 0 then '-' else '+'
!(T hh hl) = twoDigits h
!(T mh ml) = twoDigits m
(h,m) = abs off `quotRem` 60
{-# INLINE timeZone #-}
dayTime :: Day -> TimeOfDay64 -> Builder
dayTime d t = day d <> B.char7 'T' <> timeOfDay64 t
{-# INLINE dayTime #-}
utcTime :: UTCTime -> B.Builder
utcTime (UTCTime d s) = dayTime d (diffTimeOfDay64 s) <> B.char7 'Z'
{-# INLINE utcTime #-}
localTime :: LocalTime -> Builder
localTime (LocalTime d t) = dayTime d (toTimeOfDay64 t)
{-# INLINE localTime #-}
zonedTime :: ZonedTime -> Builder
zonedTime (ZonedTime t z) = localTime t <> timeZone z
{-# INLINE zonedTime #-}
data T = T {-# UNPACK #-} !Char {-# UNPACK #-} !Char
twoDigits :: Int -> T
twoDigits a = T (digit hi) (digit lo)
where (hi,lo) = a `quotRem` 10
digit :: Int -> Char
digit x = chr (x + 48)
| sol/aeson | Data/Aeson/Encoding/Builder.hs | bsd-3-clause | 8,539 | 0 | 16 | 1,989 | 3,122 | 1,697 | 1,425 | 192 | 2 |
module Expression.AST where
data Variable = Variable String deriving Eq
data Expression = Const Int
| Var Variable
| BinExpr Expression BinOp Expression
| MultiExpr MultiOp [Expression]
| TimeDerivative Variable
| TimeDerivative2 Variable
data BinOp = Plus
| Minus
| Mult
| Quotient
| Exp
data MultiOp = Sum
| Product
| Zomega/thesis | Wurm/CAS/Expression/AST.hs | mit | 354 | 0 | 7 | 87 | 90 | 54 | 36 | 15 | 0 |
-- Compiler Toolkit: basic error management
--
-- Author : Manuel M. T. Chakravarty
-- Created: 20 February 95
--
-- Copyright (c) [1995..2000] Manuel M. T. Chakravarty
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Library General Public
-- License as published by the Free Software Foundation; either
-- version 2 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Library General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This modules exports some auxilliary routines for error handling.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
-- * the single lines of error messages shouldn't be to long as file name
-- and position are prepended at each line
--
--- TODO ----------------------------------------------------------------------
--
module Data.Errors (
-- handling of internal error
--
interr, todo,
--
-- errors in the compiled program (wrapper to Language.C Error type)
--
ErrorLevel(..), Error, makeError, errorLevel, showError, errorAtPos
) where
import Language.C.Data.Error hiding (Error)
import Language.C.Data.Position
type Error = CError
-- internal errors
-- ---------------
-- | raise a fatal internal error; message may have multiple lines
--
interr :: String -> a
interr msg = error ("INTERNAL COMPILER ERROR:\n"
++ indentMultilineString 2 msg
++ "\n")
-- | raise a error due to a implementation restriction; message may have multiple
-- lines
--
todo :: String -> a
todo msg = error ("Feature not yet implemented:\n"
++ indentMultilineString 2 msg
++ "\n")
-- | produce an 'Error', given its level, position, and a list of lines of
-- the error message that must not be empty
--
makeError :: ErrorLevel -> Position -> [String] -> Error
makeError lvl pos msgs = CError $ ErrorInfo lvl pos msgs
errorAtPos :: Position -> [String] -> a
errorAtPos pos = error
--FIXME: should be using show here, but Show instance
-- for CError from language-c is wierd
. showErrorInfo "" . errorInfo
. makeError LevelError pos
-- | indent the given multiline text by the given number of spaces
--
indentMultilineString :: Int -> String -> String
indentMultilineString n = unlines . (map (spaces++)) . lines
where
spaces = take n (repeat ' ')
| jrockway/c2hs | src/Data/Errors.hs | gpl-2.0 | 2,860 | 0 | 9 | 702 | 327 | 201 | 126 | 23 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.KMS.ListGrants
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List the grants for a specified key.
--
-- /See:/ <http://docs.aws.amazon.com/kms/latest/APIReference/API_ListGrants.html AWS API Reference> for ListGrants.
module Network.AWS.KMS.ListGrants
(
-- * Creating a Request
listGrants
, ListGrants
-- * Request Lenses
, lgMarker
, lgLimit
, lgKeyId
-- * Destructuring the Response
, listGrantsResponse
, ListGrantsResponse
-- * Response Lenses
, lgrsTruncated
, lgrsGrants
, lgrsNextMarker
, lgrsResponseStatus
) where
import Network.AWS.KMS.Types
import Network.AWS.KMS.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'listGrants' smart constructor.
data ListGrants = ListGrants'
{ _lgMarker :: !(Maybe Text)
, _lgLimit :: !(Maybe Nat)
, _lgKeyId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListGrants' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lgMarker'
--
-- * 'lgLimit'
--
-- * 'lgKeyId'
listGrants
:: Text -- ^ 'lgKeyId'
-> ListGrants
listGrants pKeyId_ =
ListGrants'
{ _lgMarker = Nothing
, _lgLimit = Nothing
, _lgKeyId = pKeyId_
}
-- | Use this parameter only when paginating results, and only in a
-- subsequent request after you\'ve received a response where the results
-- are truncated. Set it to the value of the 'NextMarker' in the response
-- you just received.
lgMarker :: Lens' ListGrants (Maybe Text)
lgMarker = lens _lgMarker (\ s a -> s{_lgMarker = a});
-- | Specify this parameter only when paginating results to indicate the
-- maximum number of grants you want listed in the response. If there are
-- additional grants beyond the maximum you specify, the 'Truncated'
-- response element will be set to 'true.'
lgLimit :: Lens' ListGrants (Maybe Natural)
lgLimit = lens _lgLimit (\ s a -> s{_lgLimit = a}) . mapping _Nat;
-- | A unique identifier for the customer master key. This value can be a
-- globally unique identifier or the fully specified ARN to a key.
--
-- - Key ARN Example -
-- arn:aws:kms:us-east-1:123456789012:key\/12345678-1234-1234-1234-123456789012
-- - Globally Unique Key ID Example -
-- 12345678-1234-1234-1234-123456789012
lgKeyId :: Lens' ListGrants Text
lgKeyId = lens _lgKeyId (\ s a -> s{_lgKeyId = a});
instance AWSRequest ListGrants where
type Rs ListGrants = ListGrantsResponse
request = postJSON kMS
response
= receiveJSON
(\ s h x ->
ListGrantsResponse' <$>
(x .?> "Truncated") <*> (x .?> "Grants" .!@ mempty)
<*> (x .?> "NextMarker")
<*> (pure (fromEnum s)))
instance ToHeaders ListGrants where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("TrentService.ListGrants" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON ListGrants where
toJSON ListGrants'{..}
= object
(catMaybes
[("Marker" .=) <$> _lgMarker,
("Limit" .=) <$> _lgLimit,
Just ("KeyId" .= _lgKeyId)])
instance ToPath ListGrants where
toPath = const "/"
instance ToQuery ListGrants where
toQuery = const mempty
-- | /See:/ 'listGrantsResponse' smart constructor.
data ListGrantsResponse = ListGrantsResponse'
{ _lgrsTruncated :: !(Maybe Bool)
, _lgrsGrants :: !(Maybe [GrantListEntry])
, _lgrsNextMarker :: !(Maybe Text)
, _lgrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListGrantsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lgrsTruncated'
--
-- * 'lgrsGrants'
--
-- * 'lgrsNextMarker'
--
-- * 'lgrsResponseStatus'
listGrantsResponse
:: Int -- ^ 'lgrsResponseStatus'
-> ListGrantsResponse
listGrantsResponse pResponseStatus_ =
ListGrantsResponse'
{ _lgrsTruncated = Nothing
, _lgrsGrants = Nothing
, _lgrsNextMarker = Nothing
, _lgrsResponseStatus = pResponseStatus_
}
-- | A flag that indicates whether there are more items in the list. If your
-- results were truncated, you can make a subsequent pagination request
-- using the 'Marker' request parameter to retrieve more grants in the
-- list.
lgrsTruncated :: Lens' ListGrantsResponse (Maybe Bool)
lgrsTruncated = lens _lgrsTruncated (\ s a -> s{_lgrsTruncated = a});
-- | A list of grants.
lgrsGrants :: Lens' ListGrantsResponse [GrantListEntry]
lgrsGrants = lens _lgrsGrants (\ s a -> s{_lgrsGrants = a}) . _Default . _Coerce;
-- | If 'Truncated' is true, this value is present and contains the value to
-- use for the 'Marker' request parameter in a subsequent pagination
-- request.
lgrsNextMarker :: Lens' ListGrantsResponse (Maybe Text)
lgrsNextMarker = lens _lgrsNextMarker (\ s a -> s{_lgrsNextMarker = a});
-- | The response status code.
lgrsResponseStatus :: Lens' ListGrantsResponse Int
lgrsResponseStatus = lens _lgrsResponseStatus (\ s a -> s{_lgrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-kms/gen/Network/AWS/KMS/ListGrants.hs | mpl-2.0 | 6,056 | 0 | 14 | 1,416 | 941 | 561 | 380 | 111 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.DetachInternetGateway
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Detaches an Internet gateway from a VPC, disabling connectivity between
-- the Internet and the VPC. The VPC must not contain any running instances
-- with Elastic IP addresses.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DetachInternetGateway.html AWS API Reference> for DetachInternetGateway.
module Network.AWS.EC2.DetachInternetGateway
(
-- * Creating a Request
detachInternetGateway
, DetachInternetGateway
-- * Request Lenses
, digDryRun
, digInternetGatewayId
, digVPCId
-- * Destructuring the Response
, detachInternetGatewayResponse
, DetachInternetGatewayResponse
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'detachInternetGateway' smart constructor.
data DetachInternetGateway = DetachInternetGateway'
{ _digDryRun :: !(Maybe Bool)
, _digInternetGatewayId :: !Text
, _digVPCId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DetachInternetGateway' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'digDryRun'
--
-- * 'digInternetGatewayId'
--
-- * 'digVPCId'
detachInternetGateway
:: Text -- ^ 'digInternetGatewayId'
-> Text -- ^ 'digVPCId'
-> DetachInternetGateway
detachInternetGateway pInternetGatewayId_ pVPCId_ =
DetachInternetGateway'
{ _digDryRun = Nothing
, _digInternetGatewayId = pInternetGatewayId_
, _digVPCId = pVPCId_
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
digDryRun :: Lens' DetachInternetGateway (Maybe Bool)
digDryRun = lens _digDryRun (\ s a -> s{_digDryRun = a});
-- | The ID of the Internet gateway.
digInternetGatewayId :: Lens' DetachInternetGateway Text
digInternetGatewayId = lens _digInternetGatewayId (\ s a -> s{_digInternetGatewayId = a});
-- | The ID of the VPC.
digVPCId :: Lens' DetachInternetGateway Text
digVPCId = lens _digVPCId (\ s a -> s{_digVPCId = a});
instance AWSRequest DetachInternetGateway where
type Rs DetachInternetGateway =
DetachInternetGatewayResponse
request = postQuery eC2
response = receiveNull DetachInternetGatewayResponse'
instance ToHeaders DetachInternetGateway where
toHeaders = const mempty
instance ToPath DetachInternetGateway where
toPath = const "/"
instance ToQuery DetachInternetGateway where
toQuery DetachInternetGateway'{..}
= mconcat
["Action" =: ("DetachInternetGateway" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
"DryRun" =: _digDryRun,
"InternetGatewayId" =: _digInternetGatewayId,
"VpcId" =: _digVPCId]
-- | /See:/ 'detachInternetGatewayResponse' smart constructor.
data DetachInternetGatewayResponse =
DetachInternetGatewayResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DetachInternetGatewayResponse' with the minimum fields required to make a request.
--
detachInternetGatewayResponse
:: DetachInternetGatewayResponse
detachInternetGatewayResponse = DetachInternetGatewayResponse'
| fmapfmapfmap/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DetachInternetGateway.hs | mpl-2.0 | 4,256 | 0 | 11 | 834 | 520 | 315 | 205 | 71 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Test.AWS.Gen.WorkSpaces
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Test.AWS.Gen.WorkSpaces where
import Data.Proxy
import Test.AWS.Fixture
import Test.AWS.Prelude
import Test.Tasty
import Network.AWS.WorkSpaces
import Test.AWS.WorkSpaces.Internal
-- Auto-generated: the actual test selection needs to be manually placed into
-- the top-level so that real test data can be incrementally added.
--
-- This commented snippet is what the entire set should look like:
-- fixtures :: TestTree
-- fixtures =
-- [ testGroup "request"
-- [ testDescribeWorkspaceDirectories $
-- describeWorkspaceDirectories
--
-- , testDescribeWorkspaceBundles $
-- describeWorkspaceBundles
--
-- , testRebuildWorkspaces $
-- rebuildWorkspaces
--
-- , testRebootWorkspaces $
-- rebootWorkspaces
--
-- , testTerminateWorkspaces $
-- terminateWorkspaces
--
-- , testCreateWorkspaces $
-- createWorkspaces
--
-- , testDescribeWorkspaces $
-- describeWorkspaces
--
-- ]
-- , testGroup "response"
-- [ testDescribeWorkspaceDirectoriesResponse $
-- describeWorkspaceDirectoriesResponse
--
-- , testDescribeWorkspaceBundlesResponse $
-- describeWorkspaceBundlesResponse
--
-- , testRebuildWorkspacesResponse $
-- rebuildWorkspacesResponse
--
-- , testRebootWorkspacesResponse $
-- rebootWorkspacesResponse
--
-- , testTerminateWorkspacesResponse $
-- terminateWorkspacesResponse
--
-- , testCreateWorkspacesResponse $
-- createWorkspacesResponse
--
-- , testDescribeWorkspacesResponse $
-- describeWorkspacesResponse
--
-- ]
-- ]
-- Requests
testDescribeWorkspaceDirectories :: DescribeWorkspaceDirectories -> TestTree
testDescribeWorkspaceDirectories = req
"DescribeWorkspaceDirectories"
"fixture/DescribeWorkspaceDirectories.yaml"
testDescribeWorkspaceBundles :: DescribeWorkspaceBundles -> TestTree
testDescribeWorkspaceBundles = req
"DescribeWorkspaceBundles"
"fixture/DescribeWorkspaceBundles.yaml"
testRebuildWorkspaces :: RebuildWorkspaces -> TestTree
testRebuildWorkspaces = req
"RebuildWorkspaces"
"fixture/RebuildWorkspaces.yaml"
testRebootWorkspaces :: RebootWorkspaces -> TestTree
testRebootWorkspaces = req
"RebootWorkspaces"
"fixture/RebootWorkspaces.yaml"
testTerminateWorkspaces :: TerminateWorkspaces -> TestTree
testTerminateWorkspaces = req
"TerminateWorkspaces"
"fixture/TerminateWorkspaces.yaml"
testCreateWorkspaces :: CreateWorkspaces -> TestTree
testCreateWorkspaces = req
"CreateWorkspaces"
"fixture/CreateWorkspaces.yaml"
testDescribeWorkspaces :: DescribeWorkspaces -> TestTree
testDescribeWorkspaces = req
"DescribeWorkspaces"
"fixture/DescribeWorkspaces.yaml"
-- Responses
testDescribeWorkspaceDirectoriesResponse :: DescribeWorkspaceDirectoriesResponse -> TestTree
testDescribeWorkspaceDirectoriesResponse = res
"DescribeWorkspaceDirectoriesResponse"
"fixture/DescribeWorkspaceDirectoriesResponse.proto"
workSpaces
(Proxy :: Proxy DescribeWorkspaceDirectories)
testDescribeWorkspaceBundlesResponse :: DescribeWorkspaceBundlesResponse -> TestTree
testDescribeWorkspaceBundlesResponse = res
"DescribeWorkspaceBundlesResponse"
"fixture/DescribeWorkspaceBundlesResponse.proto"
workSpaces
(Proxy :: Proxy DescribeWorkspaceBundles)
testRebuildWorkspacesResponse :: RebuildWorkspacesResponse -> TestTree
testRebuildWorkspacesResponse = res
"RebuildWorkspacesResponse"
"fixture/RebuildWorkspacesResponse.proto"
workSpaces
(Proxy :: Proxy RebuildWorkspaces)
testRebootWorkspacesResponse :: RebootWorkspacesResponse -> TestTree
testRebootWorkspacesResponse = res
"RebootWorkspacesResponse"
"fixture/RebootWorkspacesResponse.proto"
workSpaces
(Proxy :: Proxy RebootWorkspaces)
testTerminateWorkspacesResponse :: TerminateWorkspacesResponse -> TestTree
testTerminateWorkspacesResponse = res
"TerminateWorkspacesResponse"
"fixture/TerminateWorkspacesResponse.proto"
workSpaces
(Proxy :: Proxy TerminateWorkspaces)
testCreateWorkspacesResponse :: CreateWorkspacesResponse -> TestTree
testCreateWorkspacesResponse = res
"CreateWorkspacesResponse"
"fixture/CreateWorkspacesResponse.proto"
workSpaces
(Proxy :: Proxy CreateWorkspaces)
testDescribeWorkspacesResponse :: DescribeWorkspacesResponse -> TestTree
testDescribeWorkspacesResponse = res
"DescribeWorkspacesResponse"
"fixture/DescribeWorkspacesResponse.proto"
workSpaces
(Proxy :: Proxy DescribeWorkspaces)
| fmapfmapfmap/amazonka | amazonka-workspaces/test/Test/AWS/Gen/WorkSpaces.hs | mpl-2.0 | 5,120 | 0 | 7 | 929 | 466 | 284 | 182 | 79 | 1 |
module Main where
--import Test.Framework (defaultMain, Test, testGroup)
import qualified Data.ByteString.Lazy.Builder.BasicEncoding.Tests
import qualified Data.ByteString.Lazy.Builder.Tests
import TestFramework
main :: IO ()
main = defaultMain tests
tests :: [Test]
tests =
[ testGroup "Builder"
Data.ByteString.Lazy.Builder.Tests.tests
, testGroup "BasicEncoding"
Data.ByteString.Lazy.Builder.BasicEncoding.Tests.tests
]
| meiersi/bytestring-builder | tests/builder/TestSuite.hs | bsd-3-clause | 470 | 0 | 7 | 85 | 87 | 57 | 30 | 12 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Types where
import Debug.Trace
import Test.QuickCheck
type Longitude = Double -- -180 .. 180
type Latitude = Double -- -90 .. 90
newtype Geographic = Geographic (Longitude,Latitude)
deriving Show
type ScreenX = Double -- -1 .. 1, + on right
type ScreenY = Double -- -1 .. 1, + on top
type Distance = Double -- nominally meters
type Inclination = Double -- "latitude", In Radians, 0 is ahead, + is up, normalized to +/- pi/2
type Azimuth = Double -- "longiture", In Radians, 0 is ahead, + is right, normalized to +/- pi
newtype Spherical = Spherical (Distance,Inclination,Azimuth)
deriving Show
-- x == in/out, in+
-- y == left/right, right+
-- z == up/down, up+
newtype Cartesian = Cartesian (Double,Double,Double) -- in nominal meters
deriving Show
newtype ScreenCoord = ScreenCoord (Double,Double)
deriving Show
type CanvasCoord = (Double,Double)
radian2degree :: Double -> Double
radian2degree = (* (180 / pi))
degree2radian :: Double -> Double
degree2radian = (/ (180 / pi))
-- From http://stackoverflow.com/questions/24234609/standard-way-to-normalize-an-angle-to-%CF%80-radians-in-java
-- Normalize a radian
radian :: Double -> Double
radian t = t - pi * 2 * fromIntegral (floor((t + pi) / (pi * 2)))
-- We define that the Spherical is a sphere 5 units (meters) radius
geographicToSpherical :: Geographic -> Spherical
geographicToSpherical (Geographic (long,lat)) = Spherical (5, degree2radian lat, degree2radian long)
cartesian2Spherical :: Cartesian -> Spherical
cartesian2Spherical (Cartesian (0,0,0)) = Spherical (0,0,0) -- choice
cartesian2Spherical (Cartesian (x,y,z)) = mkSpherical (r,t,u)
where
r = sqrt (x^2 + y^2 + z^2)
t = asin (z / r) -- polar angle, "latitude", 0 .. pi/2
u = atan2 y x -- azimuth angle, "longiture", -pi .. pi
mkSpherical :: (Double,Double,Double) -> Spherical
mkSpherical (r,t,u) = mkSpherical' (r,radian t, radian u)
where
mkSpherical' (r,t,u)
| traceShow ("mkSpherical",(r,t,u)) False = undefined
-- | t < -pi / 2 = mkSpherical (r,-t,u' + pi)
-- | t > pi / 2 = mkSpherical (r,-t,u' - pi)
| otherwise = Spherical (r,t,u) -- t is +/- pi/2 (+/-90), u is +/- pi (+/-180)
spherical2Cartesian :: Spherical -> Cartesian
spherical2Cartesian (Spherical (r,t,u)) = Cartesian (x,y,z)
where
x = r * cos t * cos u
y = r * cos t * sin u
z = r * sin t
class Coordinate c where
toSpherical :: c -> Spherical
toCartesian :: c -> Cartesian
instance Coordinate Cartesian where
toSpherical = cartesian2Spherical
toCartesian = id
instance Coordinate Spherical where
toSpherical = id
toCartesian = spherical2Cartesian
instance Coordinate Geographic where
toSpherical = toSpherical . geographicToSpherical
toCartesian = toCartesian . geographicToSpherical
class Lerp a where
lerp2 :: a -> a -> Double -> a
instance Lerp Double where
lerp2 a b s = b * s + a * (1 - s)
instance Lerp Geographic where
lerp2 (Geographic a) (Geographic b) s = Geographic (lerp2 a b s)
instance (Lerp a, Lerp b) => Lerp (a,b) where
lerp2 (a1,a2) (b1,b2) s = (lerp2 a1 b1 s,lerp2 a2 b2 s)
instance (Lerp a, Lerp b, Lerp c) => Lerp (a,b,c) where
lerp2 (a1,a2,a3) (b1,b2,b3) s = (lerp2 a1 b1 s,lerp2 a2 b2 s,lerp2 a3 b3 s)
interpolate :: (Monad m, Lerp a) => Int -> a -> a -> (a -> a -> m ()) -> m ()
interpolate n a b f = sequence_ [ f j j' | (j,j') <- js `zip` tail js ]
where js = joints n a b
joints :: Lerp a => Int -> a -> a -> [a]
joints n a b = [ lerp2 a b (fromIntegral s/fromIntegral n) | s <- [0..n]]
-- wrap around if line is too long
distance :: CanvasCoord -> CanvasCoord -> Double
distance (x,y) (x',y') = sqrt (xd * xd + yd * yd)
where
xd = x - x'
yd = y - y'
------------------------------------------------------------------------
-- QC
------------------------------------------------------------------------
instance Arbitrary Cartesian where
arbitrary = fmap Cartesian arbitrary
-- shrink (Cartesian a) = [ Cartesian c | c <- shrink a ]
prop_c2s_then_s2c (c@(Cartesian (x,y,z))) =
abs (x*x + y*y + z*z) > 1e-10 ==> -- make sure you are not *too* close to the origin
-- label (show (x,y,z)) $
c `eqish` spherical2Cartesian (cartesian2Spherical c)
class Eqish a where
eqish :: a -> a -> Bool
instance Eqish Double where
eqish a b = abs (a - b) < 1e-6
instance (Eqish a, Eqish b, Eqish c) => Eqish (a,b,c) where
eqish (a,b,c) (a',b',c') = eqish a a' && eqish b b' && eqish c c'
instance Eqish Cartesian where
eqish (Cartesian c1) (Cartesian c2) = c1 `eqish` c2
| andygill/willowbrae | projections/Types.hs | bsd-3-clause | 4,672 | 0 | 14 | 1,013 | 1,590 | 879 | 711 | 88 | 1 |
{-# LANGUAGE CPP, NoMonomorphismRestriction #-}
#include "fusion-phases.h"
module Data.Array.Parallel.Unlifted.Stream.Segments
( streamSegsFromNestedUSSegd
, streamSegsFromVectorsUSSegd
, streamSegsFromVectorsUVSegd
, streamSegsFromVectorsUSSegdSegmap
, streamSegsFromVectorsUSSegd_split)
where
import Data.Vector.Fusion.Bundle.Monadic (Bundle(..), fromStream)
import Data.Vector.Fusion.Bundle.Size
import Data.Vector.Fusion.Stream.Monadic (Stream(..), Step(..))
import Data.Array.Parallel.Unlifted.Sequential.Vector (Unbox, Vector, index)
import Data.Array.Parallel.Unlifted.Vectors (Unboxes, Vectors)
import Data.Array.Parallel.Unlifted.Sequential.USegd (USegd(..))
import Data.Array.Parallel.Unlifted.Sequential.USSegd (USSegd(..))
import Data.Array.Parallel.Unlifted.Sequential.UVSegd (UVSegd(..))
import qualified Data.Array.Parallel.Unlifted.Vectors as US
import qualified Data.Array.Parallel.Unlifted.Sequential.USegd as USegd
import qualified Data.Array.Parallel.Unlifted.Sequential.USSegd as USSegd
import qualified Data.Array.Parallel.Unlifted.Sequential.Vector as U
import qualified Data.Vector as V
import qualified Data.Primitive.ByteArray as P
import System.IO.Unsafe
-- Nested -----------------------------------------------------------------------------------------
-- | Stream some physical segments from many data arrays.
---
-- * TODO: make this more efficient, and fix fusion.
-- We should be able to eliminate a lot of the indexing happening in the
-- inner loop by being cleverer about the loop state.
--
-- * TODO: If this is contiguous then we can stream the lot without worrying
-- about jumping between segments. EXCEPT that this information must be
-- statically visible else streamSegs won't fuse, so we can't have an
-- ifThenElse checking the manifest flag.
streamSegsFromNestedUSSegd
:: (Unbox a, Monad m)
=> V.Vector (Vector a) -- ^ Source arrays.
-> USSegd -- ^ Segment descriptor defining segments base on source vectors.
-> Bundle m v a
streamSegsFromNestedUSSegd
pdatas
ussegd@(USSegd _ starts sources usegd)
= let
here = "streamSegsFromNestedUSSegd"
-- length of each segment
pseglens = USegd.takeLengths usegd
-- We've finished streaming this pseg
{-# INLINE_INNER fn #-}
fn (pseg, ix)
-- All psegs are done.
| pseg >= USSegd.length ussegd
= return $ Done
-- Current pseg is done
| ix >= U.index here pseglens pseg
= return $ Skip (pseg + 1, 0)
-- Stream an element from this pseg
| otherwise
= let !srcid = index here sources pseg
!pdata = pdatas `V.unsafeIndex` srcid
!start = index here starts pseg
!result = index here pdata (start + ix)
in return $ Yield result (pseg, ix + 1)
in fromStream (Stream fn (0, 0)) Unknown
{-# INLINE_STREAM streamSegsFromNestedUSSegd #-}
-- Vectors ----------------------------------------------------------------------------------------
-- | Stream segments from a `Vectors`.
--
-- * There must be at least one segment in the `USSegd`, but this is not checked.
--
-- * No bounds checking is done for the `USSegd`.
--
streamSegsFromVectorsUSSegd
:: (Unboxes a, Monad m)
=> Vectors a -- ^ Vectors holding source data.
-> USSegd -- ^ Scattered segment descriptor
-> Bundle m v a
streamSegsFromVectorsUSSegd
vectors
ussegd@(USSegd _ segStarts segSources usegd)
= segStarts `seq` segSources `seq` usegd `seq` vectors `seq`
let here = "stremSegsFromVectorsUSSegd"
-- Length of each segment
!segLens = USegd.takeLengths usegd
-- Total number of segments.
!segsTotal = USSegd.length ussegd
-- Total number of elements to stream.
!elements = USegd.takeElements usegd
-- seg, ix of that seg in usegd, length of seg, elem in seg
{-# INLINE_INNER fnSeg #-}
fnSeg (ixSeg, baSeg, ixEnd, ixElem)
= ixSeg `seq` baSeg `seq`
if ixElem >= ixEnd -- Was that the last elem in the current seg?
then if ixSeg + 1 >= segsTotal -- Was that last seg?
-- That was the last seg, we're done.
then return $ Done
-- Move to the next seg.
else let ixSeg' = ixSeg + 1
sourceSeg = index here segSources ixSeg'
startSeg = index here segStarts ixSeg'
lenSeg = index here segLens ixSeg'
(arr, startArr, _)
= US.unsafeIndexUnpack vectors sourceSeg
in return $ Skip
( ixSeg'
, arr
, startArr + startSeg + lenSeg
, startArr + startSeg)
-- Stream the next element from the segment.
else let !result = P.indexByteArray baSeg ixElem
in return $ Yield result (ixSeg, baSeg, ixEnd, ixElem + 1)
-- Starting state of the stream.
-- CAREFUL:
-- The ussegd might not contain any segments, so we can't initialise the state
-- just by taking the first segment length etc from the ussegd.
-- On the other hand, we don't want to use an extra case expression to test for
-- this sitution, as that could break fusion.
-- Instead, start with a dummy state which forces the loop to grab the first
-- segment, if there are any.
!dummy = unsafePerformIO
$ P.newByteArray 0 >>= P.unsafeFreezeByteArray
!initState
= ( -1 -- force fnSeg loop to load first seg
, dummy -- dummy array data to start with
, 0 -- force fnSeg loop to load first seg
, 0)
-- It's important that we set the result stream size, so Data.Vector
-- doesn't need to add code to grow the result when it overflows.
in fromStream (Stream fnSeg initState) (Exact elements)
{-# INLINE_STREAM streamSegsFromVectorsUSSegd #-}
-- Vectors ----------------------------------------------------------------------------------------
-- | Stream segments from a `Vectors`.
--
-- * There must be at least one segment in the `USSegd`, but this is not checked.
--
-- * No bounds checking is done for the `USSegd`.
--
streamSegsFromVectorsUVSegd
:: (Unboxes a, Monad m)
=> Vectors a -- ^ Vectors holding source data.
-> UVSegd -- ^ Scattered segment descriptor
-> Bundle m v a
streamSegsFromVectorsUVSegd
vectors
(UVSegd _ _ segmap _ ussegd)
= streamSegsFromVectorsUSSegdSegmap vectors ussegd segmap
{-# INLINE_STREAM streamSegsFromVectorsUVSegd #-}
streamSegsFromVectorsUSSegdSegmap
:: (Unboxes a, Monad m)
=> Vectors a -- ^ Vectors holding source data.
-> USSegd -- ^ Scattered segment descriptor
-> Vector Int -- ^ Segmap
-> Bundle m v a
streamSegsFromVectorsUSSegdSegmap
vectors ussegd@(USSegd _ segStarts segSources usegd) segmap
= segStarts `seq` segSources `seq` usegd `seq` segmap `seq`
let here = "stremSegsFromVectorsUVSegd"
-- Total number of elements to be streamed
!lengths = USSegd.takeLengths ussegd
!elemsTotal = U.sum $ U.map (U.index here lengths) segmap
-- Total number of segments.
!segsTotal = U.length segmap
-- Length of each physical segment.
!segLens = USegd.takeLengths usegd
-- seg, ix of that seg in usegd, length of seg, elem in seg
{-# INLINE_INNER fnSeg #-}
fnSeg (ixSeg, baSeg, ixEnd, ixElem)
= ixSeg `seq` baSeg `seq`
if ixElem >= ixEnd -- Was that the last elem in the current seg?
then if ixSeg + 1 >= segsTotal -- Was that last seg?
-- That was the last seg, we're done.
then return $ Done
-- Move to the next seg.
else let ixSeg' = ixSeg + 1
ixPSeg = index here segmap ixSeg'
sourceSeg = index here segSources ixPSeg
startSeg = index here segStarts ixPSeg
lenSeg = index here segLens ixPSeg
(arr, startArr, _)
= US.unsafeIndexUnpack vectors sourceSeg
in return $ Skip
( ixSeg'
, arr
, startArr + startSeg + lenSeg
, startArr + startSeg)
-- Stream the next element from the segment.
else let !result = P.indexByteArray baSeg ixElem
in return $ Yield result (ixSeg, baSeg, ixEnd, ixElem + 1)
-- Starting state of the stream.
!dummy = unsafePerformIO
$ P.newByteArray 0 >>= P.unsafeFreezeByteArray
!initState
= ( -1 -- force fnSeg loop to load first seg
, dummy -- dummy array data to start with
, 0 -- force fnSeg loop to load first seg
, 0)
-- It's important that we set the result stream size, so Data.Vector
-- doesn't need to add code to grow the result when it overflows.
in fromStream (Stream fnSeg initState) (Exact elemsTotal)
{-# INLINE_STREAM streamSegsFromVectorsUSSegdSegmap #-}
streamSegsFromVectorsUSSegd_split
:: (Unboxes a, Monad m)
=> Vectors a -- ^ Vectors holding source data.
-> USSegd -- ^ Scattered segment descriptor
-> Vector Int -- ^ Virtual segment ids
-> ((USegd,Int),Int) -- ^ Segmap
-> Bundle m v a
streamSegsFromVectorsUSSegd_split
!vectors !ussegd
!vsegids ((!segd,!seg_off),!el_off)
= let here = "streamSegsFromVectorsUSSegd_split"
-- Total number of elements to be streamed
!lengths = USegd.takeLengths segd
!elemsTotal = U.sum lengths
-- Total number of segments.
!segsTotal = U.length lengths
!segStarts = USSegd.takeStarts ussegd
!segSources = USSegd.takeSources ussegd
vsegid seg = index here vsegids (seg + seg_off)
{-# INLINE vsegid #-}
source pseg = index here segSources pseg
{-# INLINE source #-}
start pseg = index here segStarts pseg
{-# INLINE start #-}
len seg = index here lengths seg
{-# INLINE len #-}
-- seg, ix of that seg in usegd, length of seg, elem in seg
{-# INLINE_INNER fnSeg #-}
fnSeg (!ixSeg, !baSeg, !ixEnd, !ixElem)
= if ixElem >= ixEnd -- Was that the last elem in the current seg?
then if ixSeg + 1 >= segsTotal -- Was that last seg?
-- That was the last seg, we're done.
then return $ Done
-- Move to the next seg.
else let ixSeg' = ixSeg + 1
ixPSeg = vsegid ixSeg'
sourceSeg = source ixPSeg
startSeg = start ixPSeg
lenSeg = len ixSeg'
el_off' = if ixSeg' == 0 then el_off else 0
(arr, startArr, _)
= US.unsafeIndexUnpack vectors sourceSeg
in return $ Skip
( ixSeg'
, arr
, startArr + startSeg + el_off' + lenSeg
, startArr + startSeg + el_off')
-- Stream the next element from the segment.
else let !result = P.indexByteArray baSeg ixElem
in return $ Yield result (ixSeg, baSeg, ixEnd, ixElem + 1)
-- Starting state of the stream.
!dummy = unsafePerformIO
$ P.newByteArray 0 >>= P.unsafeFreezeByteArray
!initState
= ( -1 -- force fnSeg loop to load first seg
, dummy -- dummy array data to start with
, 0 -- force fnSeg loop to load first seg
, 0)
-- It's important that we set the result stream size, so Data.Vector
-- doesn't need to add code to grow the result when it overflows.
in fromStream (Stream fnSeg initState) (Exact elemsTotal)
{-# INLINE_STREAM streamSegsFromVectorsUSSegd_split #-}
| mainland/dph | dph-prim-seq/Data/Array/Parallel/Unlifted/Stream/Segments.hs | bsd-3-clause | 13,609 | 0 | 19 | 5,181 | 2,086 | 1,164 | 922 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
-- | Resolving a build plan for a set of packages in a given Stackage
-- snapshot.
module Stack.BuildPlan
( BuildPlanException (..)
, BuildPlanCheck (..)
, checkSnapBuildPlan
, DepError(..)
, DepErrors
, gpdPackageDeps
, gpdPackages
, gpdPackageName
, MiniBuildPlan(..)
, MiniPackageInfo(..)
, loadResolver
, loadMiniBuildPlan
, removeSrcPkgDefaultFlags
, resolveBuildPlan
, selectBestSnapshot
, getToolMap
, shadowMiniBuildPlan
, showItems
, showPackageFlags
, parseCustomMiniBuildPlan
) where
import Control.Applicative
import Control.Exception (assert)
import Control.Monad (liftM, forM, unless)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (asks)
import Control.Monad.State.Strict (State, execState, get, modify,
put)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Crypto.Hash.SHA256 as SHA256
import Data.Aeson.Extended (WithJSONWarnings(..), logJSONWarnings)
import Data.Store.VersionTagged
import qualified Data.ByteString as S
import qualified Data.ByteString.Base64.URL as B64URL
import qualified Data.ByteString.Char8 as S8
import Data.Either (partitionEithers)
import qualified Data.Foldable as F
import qualified Data.HashSet as HashSet
import Data.List (intercalate)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NonEmpty
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe, mapMaybe, isNothing)
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import qualified Data.Traversable as Tr
import Data.Typeable (Typeable)
import Data.Yaml.Extra (decodeEither', decodeFileEither)
import qualified Distribution.Package as C
import Distribution.PackageDescription (GenericPackageDescription,
flagDefault, flagManual,
flagName, genPackageFlags,
executables, exeName, library, libBuildInfo, buildable)
import qualified Distribution.PackageDescription as C
import Distribution.System (Platform)
import Distribution.Text (display)
import qualified Distribution.Version as C
import Network.HTTP.Download
import Path
import Path.IO
import Prelude -- Fix AMP warning
import Stack.Constants
import Stack.Fetch
import Stack.Package
import Stack.PackageIndex
import Stack.Types.BuildPlan
import Stack.Types.FlagName
import Stack.Types.PackageIdentifier
import Stack.Types.PackageIndex
import Stack.Types.PackageName
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Urls
import Stack.Types.Compiler
import Stack.Types.StackT
data BuildPlanException
= UnknownPackages
(Path Abs File) -- stack.yaml file
(Map PackageName (Maybe Version, Set PackageName)) -- truly unknown
(Map PackageName (Set PackageIdentifier)) -- shadowed
| SnapshotNotFound SnapName
| FilepathInDownloadedSnapshot T.Text
| NeitherCompilerOrResolverSpecified T.Text
deriving (Typeable)
instance Exception BuildPlanException
instance Show BuildPlanException where
show (SnapshotNotFound snapName) = unlines
[ "SnapshotNotFound " ++ snapName'
, "Non existing resolver: " ++ snapName' ++ "."
, "For a complete list of available snapshots see https://www.stackage.org/snapshots"
]
where snapName' = show $ renderSnapName snapName
show (UnknownPackages stackYaml unknown shadowed) =
unlines $ unknown' ++ shadowed'
where
unknown' :: [String]
unknown'
| Map.null unknown = []
| otherwise = concat
[ ["The following packages do not exist in the build plan:"]
, map go (Map.toList unknown)
, case mapMaybe goRecommend $ Map.toList unknown of
[] -> []
rec ->
("Recommended action: modify the extra-deps field of " ++
toFilePath stackYaml ++
" to include the following:")
: (rec
++ ["Note: further dependencies may need to be added"])
, case mapMaybe getNoKnown $ Map.toList unknown of
[] -> []
noKnown ->
[ "There are no known versions of the following packages:"
, intercalate ", " $ map packageNameString noKnown
]
]
where
go (dep, (_, users)) | Set.null users = packageNameString dep
go (dep, (_, users)) = concat
[ packageNameString dep
, " (used by "
, intercalate ", " $ map packageNameString $ Set.toList users
, ")"
]
goRecommend (name, (Just version, _)) =
Just $ "- " ++ packageIdentifierString (PackageIdentifier name version)
goRecommend (_, (Nothing, _)) = Nothing
getNoKnown (name, (Nothing, _)) = Just name
getNoKnown (_, (Just _, _)) = Nothing
shadowed' :: [String]
shadowed'
| Map.null shadowed = []
| otherwise = concat
[ ["The following packages are shadowed by local packages:"]
, map go (Map.toList shadowed)
, ["Recommended action: modify the extra-deps field of " ++
toFilePath stackYaml ++
" to include the following:"]
, extraDeps
, ["Note: further dependencies may need to be added"]
]
where
go (dep, users) | Set.null users = concat
[ packageNameString dep
, " (internal stack error: this should never be null)"
]
go (dep, users) = concat
[ packageNameString dep
, " (used by "
, intercalate ", "
$ map (packageNameString . packageIdentifierName)
$ Set.toList users
, ")"
]
extraDeps = map (\ident -> "- " ++ packageIdentifierString ident)
$ Set.toList
$ Set.unions
$ Map.elems shadowed
show (FilepathInDownloadedSnapshot url) = unlines
[ "Downloaded snapshot specified a 'resolver: { location: filepath }' "
, "field, but filepaths are not allowed in downloaded snapshots.\n"
, "Filepath specified: " ++ T.unpack url
]
show (NeitherCompilerOrResolverSpecified url) =
"Failed to load custom snapshot at " ++
T.unpack url ++
", because no 'compiler' or 'resolver' is specified."
-- | Determine the necessary packages to install to have the given set of
-- packages available.
--
-- This function will not provide test suite and benchmark dependencies.
--
-- This may fail if a target package is not present in the @BuildPlan@.
resolveBuildPlan :: (MonadThrow m, MonadIO m, MonadReader env m, HasBuildConfig env, MonadLogger m, HasHttpManager env, MonadBaseControl IO m,MonadCatch m)
=> MiniBuildPlan
-> (PackageName -> Bool) -- ^ is it shadowed by a local package?
-> Map PackageName (Set PackageName) -- ^ required packages, and users of it
-> m ( Map PackageName (Version, Map FlagName Bool)
, Map PackageName (Set PackageName)
)
resolveBuildPlan mbp isShadowed packages
| Map.null (rsUnknown rs) && Map.null (rsShadowed rs) = return (rsToInstall rs, rsUsedBy rs)
| otherwise = do
bconfig <- asks getBuildConfig
caches <- getPackageCaches
let maxVer =
Map.fromListWith max $
map toTuple $
Map.keys caches
unknown = flip Map.mapWithKey (rsUnknown rs) $ \ident x ->
(Map.lookup ident maxVer, x)
throwM $ UnknownPackages
(bcStackYaml bconfig)
unknown
(rsShadowed rs)
where
rs = getDeps mbp isShadowed packages
data ResolveState = ResolveState
{ rsVisited :: Map PackageName (Set PackageName) -- ^ set of shadowed dependencies
, rsUnknown :: Map PackageName (Set PackageName)
, rsShadowed :: Map PackageName (Set PackageIdentifier)
, rsToInstall :: Map PackageName (Version, Map FlagName Bool)
, rsUsedBy :: Map PackageName (Set PackageName)
}
toMiniBuildPlan :: (MonadIO m, MonadLogger m, MonadReader env m, HasHttpManager env, MonadMask m, HasConfig env, MonadBaseControl IO m)
=> CompilerVersion -- ^ Compiler version
-> Map PackageName Version -- ^ cores
-> Map PackageName (Version, Map FlagName Bool, [Text], Maybe GitSHA1) -- ^ non-core packages
-> m MiniBuildPlan
toMiniBuildPlan compilerVersion corePackages packages = do
-- Determine the dependencies of all of the packages in the build plan. We
-- handle core packages specially, because some of them will not be in the
-- package index. For those, we allow missing packages to exist, and then
-- remove those from the list of dependencies, since there's no way we'll
-- ever reinstall them anyway.
(cores, missingCores) <- addDeps True compilerVersion
$ fmap (, Map.empty, [], Nothing) corePackages
(extras, missing) <- addDeps False compilerVersion packages
assert (Set.null missing) $ return MiniBuildPlan
{ mbpCompilerVersion = compilerVersion
, mbpPackages = Map.unions
[ fmap (removeMissingDeps (Map.keysSet cores)) cores
, extras
, Map.fromList $ map goCore $ Set.toList missingCores
]
}
where
goCore (PackageIdentifier name version) = (name, MiniPackageInfo
{ mpiVersion = version
, mpiFlags = Map.empty
, mpiGhcOptions = []
, mpiPackageDeps = Set.empty
, mpiToolDeps = Set.empty
, mpiExes = Set.empty
, mpiHasLibrary = True
, mpiGitSHA1 = Nothing
})
removeMissingDeps cores mpi = mpi
{ mpiPackageDeps = Set.intersection cores (mpiPackageDeps mpi)
}
-- | Add in the resolved dependencies from the package index
addDeps :: (MonadIO m, MonadLogger m, MonadReader env m, HasHttpManager env, MonadMask m, HasConfig env, MonadBaseControl IO m)
=> Bool -- ^ allow missing
-> CompilerVersion -- ^ Compiler version
-> Map PackageName (Version, Map FlagName Bool, [Text], Maybe GitSHA1)
-> m (Map PackageName MiniPackageInfo, Set PackageIdentifier)
addDeps allowMissing compilerVersion toCalc = do
menv <- getMinimalEnvOverride
platform <- asks $ configPlatform . getConfig
(resolvedMap, missingIdents) <-
if allowMissing
then do
(missingNames, missingIdents, m) <-
resolvePackagesAllowMissing shaMap Set.empty
assert (Set.null missingNames)
$ return (m, missingIdents)
else do
m <- resolvePackages menv shaMap Set.empty
return (m, Set.empty)
let byIndex = Map.fromListWith (++) $ flip map (Map.toList resolvedMap)
$ \(ident, rp) ->
let (cache, ghcOptions, sha) =
case Map.lookup (packageIdentifierName ident) toCalc of
Nothing -> (Map.empty, [], Nothing)
Just (_, x, y, z) -> (x, y, z)
in (indexName $ rpIndex rp,
[( ident
, rpCache rp
, sha
, (cache, ghcOptions, sha)
)])
res <- forM (Map.toList byIndex) $ \(indexName', pkgs) -> withCabalFiles indexName' pkgs
$ \ident (flags, ghcOptions, mgitSha) cabalBS -> do
(_warnings,gpd) <- readPackageUnresolvedBS Nothing cabalBS
let packageConfig = PackageConfig
{ packageConfigEnableTests = False
, packageConfigEnableBenchmarks = False
, packageConfigFlags = flags
, packageConfigGhcOptions = ghcOptions
, packageConfigCompilerVersion = compilerVersion
, packageConfigPlatform = platform
}
name = packageIdentifierName ident
pd = resolvePackageDescription packageConfig gpd
exes = Set.fromList $ map (ExeName . T.pack . exeName) $ executables pd
notMe = Set.filter (/= name) . Map.keysSet
return (name, MiniPackageInfo
{ mpiVersion = packageIdentifierVersion ident
, mpiFlags = flags
, mpiGhcOptions = ghcOptions
, mpiPackageDeps = notMe $ packageDependencies pd
, mpiToolDeps = Map.keysSet $ packageToolDependencies pd
, mpiExes = exes
, mpiHasLibrary = maybe
False
(buildable . libBuildInfo)
(library pd)
, mpiGitSHA1 = mgitSha
})
return (Map.fromList $ concat res, missingIdents)
where
shaMap = Map.fromList
$ map (\(n, (v, _f, _ghcOptions, gitsha)) -> (PackageIdentifier n v, gitsha))
$ Map.toList toCalc
-- | Resolve all packages necessary to install for the needed packages.
getDeps :: MiniBuildPlan
-> (PackageName -> Bool) -- ^ is it shadowed by a local package?
-> Map PackageName (Set PackageName)
-> ResolveState
getDeps mbp isShadowed packages =
execState (mapM_ (uncurry goName) $ Map.toList packages) ResolveState
{ rsVisited = Map.empty
, rsUnknown = Map.empty
, rsShadowed = Map.empty
, rsToInstall = Map.empty
, rsUsedBy = Map.empty
}
where
toolMap = getToolMap mbp
-- | Returns a set of shadowed packages we depend on.
goName :: PackageName -> Set PackageName -> State ResolveState (Set PackageName)
goName name users = do
-- Even though we could check rsVisited first and short-circuit things
-- earlier, lookup in mbpPackages first so that we can produce more
-- usable error information on missing dependencies
rs <- get
put rs
{ rsUsedBy = Map.insertWith Set.union name users $ rsUsedBy rs
}
case Map.lookup name $ mbpPackages mbp of
Nothing -> do
modify $ \rs' -> rs'
{ rsUnknown = Map.insertWith Set.union name users $ rsUnknown rs'
}
return Set.empty
Just mpi -> case Map.lookup name (rsVisited rs) of
Just shadowed -> return shadowed
Nothing -> do
put rs { rsVisited = Map.insert name Set.empty $ rsVisited rs }
let depsForTools = Set.unions $ mapMaybe (flip Map.lookup toolMap) (Set.toList $ mpiToolDeps mpi)
let deps = Set.filter (/= name) (mpiPackageDeps mpi <> depsForTools)
shadowed <- fmap F.fold $ Tr.forM (Set.toList deps) $ \dep ->
if isShadowed dep
then do
modify $ \rs' -> rs'
{ rsShadowed = Map.insertWith
Set.union
dep
(Set.singleton $ PackageIdentifier name (mpiVersion mpi))
(rsShadowed rs')
}
return $ Set.singleton dep
else do
shadowed <- goName dep (Set.singleton name)
let m = Map.fromSet (\_ -> Set.singleton $ PackageIdentifier name (mpiVersion mpi)) shadowed
modify $ \rs' -> rs'
{ rsShadowed = Map.unionWith Set.union m $ rsShadowed rs'
}
return shadowed
modify $ \rs' -> rs'
{ rsToInstall = Map.insert name (mpiVersion mpi, mpiFlags mpi) $ rsToInstall rs'
, rsVisited = Map.insert name shadowed $ rsVisited rs'
}
return shadowed
-- | Map from tool name to package providing it
getToolMap :: MiniBuildPlan -> Map Text (Set PackageName)
getToolMap mbp =
Map.unionsWith Set.union
{- We no longer do this, following discussion at:
https://github.com/commercialhaskell/stack/issues/308#issuecomment-112076704
-- First grab all of the package names, for times where a build tool is
-- identified by package name
$ Map.fromList (map (packageNameByteString &&& Set.singleton) (Map.keys ps))
-}
-- And then get all of the explicit executable names
$ concatMap goPair (Map.toList ps)
where
ps = mbpPackages mbp
goPair (pname, mpi) =
map (flip Map.singleton (Set.singleton pname) . unExeName)
$ Set.toList
$ mpiExes mpi
loadResolver
:: (MonadIO m, MonadLogger m, MonadReader env m, HasHttpManager env, HasConfig env, HasGHCVariant env, MonadBaseControl IO m, MonadMask m)
=> Maybe (Path Abs File)
-> Resolver
-> m (MiniBuildPlan, LoadedResolver)
loadResolver mconfigPath resolver =
case resolver of
ResolverSnapshot snap ->
liftM (, ResolverSnapshot snap) $ loadMiniBuildPlan snap
-- TODO(mgsloan): Not sure what this FIXME means
-- FIXME instead of passing the stackYaml dir we should maintain
-- the file URL in the custom resolver always relative to stackYaml.
ResolverCustom name url -> do
(mbp, hash) <- parseCustomMiniBuildPlan mconfigPath url
return (mbp, ResolverCustomLoaded name url hash)
ResolverCompiler compiler -> return
( MiniBuildPlan
{ mbpCompilerVersion = compiler
, mbpPackages = mempty
}
, ResolverCompiler compiler
)
-- | Load up a 'MiniBuildPlan', preferably from cache
loadMiniBuildPlan
:: (MonadIO m, MonadLogger m, MonadReader env m, HasHttpManager env, HasConfig env, HasGHCVariant env, MonadBaseControl IO m, MonadMask m)
=> SnapName
-> m MiniBuildPlan
loadMiniBuildPlan name = do
path <- configMiniBuildPlanCache name
$(versionedDecodeOrLoad miniBuildPlanVC) path $ liftM buildPlanFixes $ do
bp <- loadBuildPlan name
toMiniBuildPlan
(siCompilerVersion $ bpSystemInfo bp)
(siCorePackages $ bpSystemInfo bp)
(fmap goPP $ bpPackages bp)
where
goPP pp =
( ppVersion pp
, pcFlagOverrides $ ppConstraints pp
-- TODO: store ghc options in BuildPlan?
, []
, ppCabalFileInfo pp
>>= fmap (GitSHA1 . encodeUtf8)
. Map.lookup "GitSHA1"
. cfiHashes
)
-- | Some hard-coded fixes for build plans, hopefully to be irrelevant over
-- time.
buildPlanFixes :: MiniBuildPlan -> MiniBuildPlan
buildPlanFixes mbp = mbp
{ mbpPackages = Map.fromList $ map go $ Map.toList $ mbpPackages mbp
}
where
go (name, mpi) =
(name, mpi
{ mpiFlags = goF (packageNameString name) (mpiFlags mpi)
})
goF "persistent-sqlite" = Map.insert $(mkFlagName "systemlib") False
goF "yaml" = Map.insert $(mkFlagName "system-libyaml") False
goF _ = id
-- | Load the 'BuildPlan' for the given snapshot. Will load from a local copy
-- if available, otherwise downloading from Github.
loadBuildPlan :: (MonadIO m, MonadThrow m, MonadLogger m, MonadReader env m, HasHttpManager env, HasConfig env)
=> SnapName
-> m BuildPlan
loadBuildPlan name = do
env <- ask
let stackage = getStackRoot env
file' <- parseRelFile $ T.unpack file
let fp = buildPlanDir stackage </> file'
$logDebug $ "Decoding build plan from: " <> T.pack (toFilePath fp)
eres <- liftIO $ decodeFileEither $ toFilePath fp
case eres of
Right bp -> return bp
Left e -> do
$logDebug $ "Decoding build plan from file failed: " <> T.pack (show e)
ensureDir (parent fp)
url <- buildBuildPlanUrl name file
req <- parseRequest $ T.unpack url
$logSticky $ "Downloading " <> renderSnapName name <> " build plan ..."
$logDebug $ "Downloading build plan from: " <> url
_ <- redownload req fp
$logStickyDone $ "Downloaded " <> renderSnapName name <> " build plan."
liftIO (decodeFileEither $ toFilePath fp) >>= either throwM return
where
file = renderSnapName name <> ".yaml"
buildBuildPlanUrl :: (MonadReader env m, HasConfig env) => SnapName -> Text -> m Text
buildBuildPlanUrl name file = do
urls <- asks (configUrls . getConfig)
return $
case name of
LTS _ _ -> urlsLtsBuildPlans urls <> "/" <> file
Nightly _ -> urlsNightlyBuildPlans urls <> "/" <> file
gpdPackages :: [GenericPackageDescription] -> Map PackageName Version
gpdPackages gpds = Map.fromList $
map (fromCabalIdent . C.package . C.packageDescription) gpds
where
fromCabalIdent (C.PackageIdentifier name version) =
(fromCabalPackageName name, fromCabalVersion version)
gpdPackageName :: GenericPackageDescription -> PackageName
gpdPackageName = fromCabalPackageName
. C.pkgName
. C.package
. C.packageDescription
gpdPackageDeps
:: GenericPackageDescription
-> CompilerVersion
-> Platform
-> Map FlagName Bool
-> Map PackageName VersionRange
gpdPackageDeps gpd cv platform flags =
Map.filterWithKey (const . (/= name)) (packageDependencies pkgDesc)
where
name = gpdPackageName gpd
pkgDesc = resolvePackageDescription pkgConfig gpd
pkgConfig = PackageConfig
{ packageConfigEnableTests = True
, packageConfigEnableBenchmarks = True
, packageConfigFlags = flags
, packageConfigGhcOptions = []
, packageConfigCompilerVersion = cv
, packageConfigPlatform = platform
}
-- Remove any src package flags having default values
-- Remove any package entries with no flags set
removeSrcPkgDefaultFlags :: [C.GenericPackageDescription]
-> Map PackageName (Map FlagName Bool)
-> Map PackageName (Map FlagName Bool)
removeSrcPkgDefaultFlags gpds flags =
let defaults = Map.unions (map gpdDefaultFlags gpds)
flags' = Map.differenceWith removeSame flags defaults
in Map.filter (not . Map.null) flags'
where
removeSame f1 f2 =
let diff v v' = if v == v' then Nothing else Just v
in Just $ Map.differenceWith diff f1 f2
gpdDefaultFlags gpd =
let tuples = map getDefault (C.genPackageFlags gpd)
in Map.singleton (gpdPackageName gpd) (Map.fromList tuples)
flagName' = fromCabalFlagName . C.flagName
getDefault f
| C.flagDefault f = (flagName' f, True)
| otherwise = (flagName' f, False)
-- | Find the set of @FlagName@s necessary to get the given
-- @GenericPackageDescription@ to compile against the given @BuildPlan@. Will
-- only modify non-manual flags, and will prefer default values for flags.
-- Returns the plan which produces least number of dep errors
selectPackageBuildPlan
:: Platform
-> CompilerVersion
-> Map PackageName Version
-> GenericPackageDescription
-> (Map PackageName (Map FlagName Bool), DepErrors)
selectPackageBuildPlan platform compiler pool gpd =
(selectPlan . limitSearchSpace . NonEmpty.map makePlan) flagCombinations
where
selectPlan :: NonEmpty (a, DepErrors) -> (a, DepErrors)
selectPlan = F.foldr1 fewerErrors
where
fewerErrors p1 p2
| nErrors p1 == 0 = p1
| nErrors p1 <= nErrors p2 = p1
| otherwise = p2
where nErrors = Map.size . snd
-- Avoid exponential complexity in flag combinations making us sad pandas.
-- See: https://github.com/commercialhaskell/stack/issues/543
limitSearchSpace :: NonEmpty a -> NonEmpty a
limitSearchSpace (x :| xs) = x :| take (maxFlagCombinations - 1) xs
where maxFlagCombinations = 128
makePlan :: [(FlagName, Bool)] -> (Map PackageName (Map FlagName Bool), DepErrors)
makePlan flags = checkPackageBuildPlan platform compiler pool (Map.fromList flags) gpd
flagCombinations :: NonEmpty [(FlagName, Bool)]
flagCombinations = mapM getOptions (genPackageFlags gpd)
where
getOptions :: C.Flag -> NonEmpty (FlagName, Bool)
getOptions f
| flagManual f = (fname, flagDefault f) :| []
| flagDefault f = (fname, True) :| [(fname, False)]
| otherwise = (fname, False) :| [(fname, True)]
where fname = (fromCabalFlagName . flagName) f
-- | Check whether with the given set of flags a package's dependency
-- constraints can be satisfied against a given build plan or pool of packages.
checkPackageBuildPlan
:: Platform
-> CompilerVersion
-> Map PackageName Version
-> Map FlagName Bool
-> GenericPackageDescription
-> (Map PackageName (Map FlagName Bool), DepErrors)
checkPackageBuildPlan platform compiler pool flags gpd =
(Map.singleton pkg flags, errs)
where
pkg = gpdPackageName gpd
errs = checkPackageDeps pkg constraints pool
constraints = gpdPackageDeps gpd compiler platform flags
-- | Checks if the given package dependencies can be satisfied by the given set
-- of packages. Will fail if a package is either missing or has a version
-- outside of the version range.
checkPackageDeps :: PackageName -- ^ package using dependencies, for constructing DepErrors
-> Map PackageName VersionRange -- ^ dependency constraints
-> Map PackageName Version -- ^ Available package pool or index
-> DepErrors
checkPackageDeps myName deps packages =
Map.unionsWith combineDepError $ map go $ Map.toList deps
where
go :: (PackageName, VersionRange) -> DepErrors
go (name, range) =
case Map.lookup name packages of
Nothing -> Map.singleton name DepError
{ deVersion = Nothing
, deNeededBy = Map.singleton myName range
}
Just v
| withinRange v range -> Map.empty
| otherwise -> Map.singleton name DepError
{ deVersion = Just v
, deNeededBy = Map.singleton myName range
}
type DepErrors = Map PackageName DepError
data DepError = DepError
{ deVersion :: !(Maybe Version)
, deNeededBy :: !(Map PackageName VersionRange)
} deriving Show
-- | Combine two 'DepError's for the same 'Version'.
combineDepError :: DepError -> DepError -> DepError
combineDepError (DepError a x) (DepError b y) =
assert (a == b) $ DepError a (Map.unionWith C.intersectVersionRanges x y)
-- | Given a bundle of packages (a list of @GenericPackageDescriptions@'s) to
-- build and an available package pool (snapshot) check whether the bundle's
-- dependencies can be satisfied. If flags is passed as Nothing flag settings
-- will be chosen automatically.
checkBundleBuildPlan
:: Platform
-> CompilerVersion
-> Map PackageName Version
-> Maybe (Map PackageName (Map FlagName Bool))
-> [GenericPackageDescription]
-> (Map PackageName (Map FlagName Bool), DepErrors)
checkBundleBuildPlan platform compiler pool flags gpds =
(Map.unionsWith dupError (map fst plans)
, Map.unionsWith combineDepError (map snd plans))
where
plans = map (pkgPlan flags) gpds
pkgPlan Nothing gpd =
selectPackageBuildPlan platform compiler pool' gpd
pkgPlan (Just f) gpd =
checkPackageBuildPlan platform compiler pool' (flags' f gpd) gpd
flags' f gpd = maybe Map.empty id (Map.lookup (gpdPackageName gpd) f)
pool' = Map.union (gpdPackages gpds) pool
dupError _ _ = error "Bug: Duplicate packages are not expected here"
data BuildPlanCheck =
BuildPlanCheckOk (Map PackageName (Map FlagName Bool))
| BuildPlanCheckPartial (Map PackageName (Map FlagName Bool)) DepErrors
| BuildPlanCheckFail (Map PackageName (Map FlagName Bool)) DepErrors
CompilerVersion
-- | Compare 'BuildPlanCheck', where GT means a better plan.
compareBuildPlanCheck :: BuildPlanCheck -> BuildPlanCheck -> Ordering
compareBuildPlanCheck (BuildPlanCheckPartial _ e1) (BuildPlanCheckPartial _ e2) =
-- Note: order of comparison flipped, since it's better to have fewer errors.
compare (Map.size e2) (Map.size e1)
compareBuildPlanCheck (BuildPlanCheckFail _ e1 _) (BuildPlanCheckFail _ e2 _) =
let numUserPkgs e = Map.size $ Map.unions (Map.elems (fmap deNeededBy e))
in compare (numUserPkgs e2) (numUserPkgs e1)
compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckOk{} = EQ
compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckPartial{} = GT
compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckFail{} = GT
compareBuildPlanCheck BuildPlanCheckPartial{} BuildPlanCheckFail{} = GT
compareBuildPlanCheck _ _ = LT
instance Show BuildPlanCheck where
show BuildPlanCheckOk {} = ""
show (BuildPlanCheckPartial f e) = T.unpack $ showDepErrors f e
show (BuildPlanCheckFail f e c) = T.unpack $ showCompilerErrors f e c
-- | Check a set of 'GenericPackageDescription's and a set of flags against a
-- given snapshot. Returns how well the snapshot satisfies the dependencies of
-- the packages.
checkSnapBuildPlan
:: ( MonadIO m, MonadMask m, MonadLogger m, MonadReader env m
, HasHttpManager env, HasConfig env, HasGHCVariant env
, MonadBaseControl IO m)
=> [GenericPackageDescription]
-> Maybe (Map PackageName (Map FlagName Bool))
-> SnapName
-> m BuildPlanCheck
checkSnapBuildPlan gpds flags snap = do
platform <- asks (configPlatform . getConfig)
mbp <- loadMiniBuildPlan snap
let
compiler = mbpCompilerVersion mbp
snapPkgs = fmap mpiVersion $ mbpPackages mbp
(f, errs) = checkBundleBuildPlan platform compiler snapPkgs flags gpds
cerrs = compilerErrors compiler errs
if Map.null errs then
return $ BuildPlanCheckOk f
else if Map.null cerrs then do
return $ BuildPlanCheckPartial f errs
else
return $ BuildPlanCheckFail f cerrs compiler
where
compilerErrors compiler errs
| whichCompiler compiler == Ghc = ghcErrors errs
-- FIXME not sure how to handle ghcjs boot packages
| otherwise = Map.empty
isGhcWiredIn p _ = p `HashSet.member` wiredInPackages
ghcErrors = Map.filterWithKey isGhcWiredIn
-- | Find a snapshot and set of flags that is compatible with and matches as
-- best as possible with the given 'GenericPackageDescription's.
selectBestSnapshot
:: ( MonadIO m, MonadMask m, MonadLogger m, MonadReader env m
, HasHttpManager env, HasConfig env, HasGHCVariant env
, MonadBaseControl IO m)
=> [GenericPackageDescription]
-> NonEmpty SnapName
-> m (SnapName, BuildPlanCheck)
selectBestSnapshot gpds snaps = do
$logInfo $ "Selecting the best among "
<> T.pack (show (NonEmpty.length snaps))
<> " snapshots...\n"
F.foldr1 go (NonEmpty.map getResult snaps)
where
go mold mnew = do
old@(_snap, bpc) <- mold
case bpc of
BuildPlanCheckOk {} -> return old
_ -> fmap (betterSnap old) mnew
getResult snap = do
result <- checkSnapBuildPlan gpds Nothing snap
reportResult result snap
return (snap, result)
betterSnap (s1, r1) (s2, r2)
| compareBuildPlanCheck r1 r2 /= LT = (s1, r1)
| otherwise = (s2, r2)
reportResult BuildPlanCheckOk {} snap = do
$logInfo $ "* Matches " <> renderSnapName snap
$logInfo ""
reportResult r@BuildPlanCheckPartial {} snap = do
$logWarn $ "* Partially matches " <> renderSnapName snap
$logWarn $ indent $ T.pack $ show r
reportResult r@BuildPlanCheckFail {} snap = do
$logWarn $ "* Rejected " <> renderSnapName snap
$logWarn $ indent $ T.pack $ show r
indent t = T.unlines $ fmap (" " <>) (T.lines t)
showItems :: Show a => [a] -> Text
showItems items = T.concat (map formatItem items)
where
formatItem item = T.concat
[ " - "
, T.pack $ show item
, "\n"
]
showPackageFlags :: PackageName -> Map FlagName Bool -> Text
showPackageFlags pkg fl =
if (not $ Map.null fl) then
T.concat
[ " - "
, T.pack $ packageNameString pkg
, ": "
, T.pack $ intercalate ", "
$ map formatFlags (Map.toList fl)
, "\n"
]
else ""
where
formatFlags (f, v) = (show f) ++ " = " ++ (show v)
showMapPackages :: Map PackageName a -> Text
showMapPackages mp = showItems $ Map.keys mp
showCompilerErrors
:: Map PackageName (Map FlagName Bool)
-> DepErrors
-> CompilerVersion
-> Text
showCompilerErrors flags errs compiler =
T.concat
[ compilerVersionText compiler
, " cannot be used for these packages:\n"
, showMapPackages $ Map.unions (Map.elems (fmap deNeededBy errs))
, showDepErrors flags errs -- TODO only in debug mode
]
showDepErrors :: Map PackageName (Map FlagName Bool) -> DepErrors -> Text
showDepErrors flags errs =
T.concat
[ T.concat $ map formatError (Map.toList errs)
, if T.null flagVals then ""
else ("Using package flags:\n" <> flagVals)
]
where
formatError (depName, DepError mversion neededBy) = T.concat
[ showDepVersion depName mversion
, T.concat (map showRequirement (Map.toList neededBy))
]
showDepVersion depName mversion = T.concat
[ T.pack $ packageNameString depName
, case mversion of
Nothing -> " not found"
Just version -> T.concat
[ " version "
, T.pack $ versionString version
, " found"
]
, "\n"
]
showRequirement (user, range) = T.concat
[ " - "
, T.pack $ packageNameString user
, " requires "
, T.pack $ display range
, "\n"
]
flagVals = T.concat (map showFlags userPkgs)
userPkgs = Map.keys $ Map.unions (Map.elems (fmap deNeededBy errs))
showFlags pkg = maybe "" (showPackageFlags pkg) (Map.lookup pkg flags)
shadowMiniBuildPlan :: MiniBuildPlan
-> Set PackageName
-> (MiniBuildPlan, Map PackageName MiniPackageInfo)
shadowMiniBuildPlan (MiniBuildPlan cv pkgs0) shadowed =
(MiniBuildPlan cv (Map.fromList met), Map.fromList unmet)
where
pkgs1 = Map.difference pkgs0 $ Map.fromSet (\_ -> ()) shadowed
depsMet = flip execState Map.empty $ mapM_ (check Set.empty) (Map.keys pkgs1)
check visited name
| name `Set.member` visited =
error $ "shadowMiniBuildPlan: cycle detected, your MiniBuildPlan is broken: " ++ show (visited, name)
| otherwise = do
m <- get
case Map.lookup name m of
Just x -> return x
Nothing ->
case Map.lookup name pkgs1 of
Nothing
| name `Set.member` shadowed -> return False
-- In this case, we have to assume that we're
-- constructing a build plan on a different OS or
-- architecture, and therefore different packages
-- are being chosen. The common example of this is
-- the Win32 package.
| otherwise -> return True
Just mpi -> do
let visited' = Set.insert name visited
ress <- mapM (check visited') (Set.toList $ mpiPackageDeps mpi)
let res = and ress
modify $ \m' -> Map.insert name res m'
return res
(met, unmet) = partitionEithers $ map toEither $ Map.toList pkgs1
toEither pair@(name, _) =
wrapper pair
where
wrapper =
case Map.lookup name depsMet of
Just True -> Left
Just False -> Right
Nothing -> assert False Right
-- This works differently for snapshots fetched from URL and those
-- fetched from file:
--
-- 1) If downloading the snapshot from a URL, assume the fetched data is
-- immutable. Hash the URL in order to determine the location of the
-- cached download. The file contents of the snapshot determines the
-- hash for looking up cached MBP.
--
-- 2) If loading the snapshot from a file, load all of the involved
-- snapshot files. The hash used to determine the cached MBP is the hash
-- of the concatenation of the parent's hash with the snapshot contents.
--
-- Why this difference? We want to make it easy to simply edit snapshots
-- in the filesystem, but we want caching for remote snapshots. In order
-- to avoid reparsing / reloading all the yaml for remote snapshots, we
-- need a different hash system.
-- TODO: This could probably be more efficient if it first merged the
-- custom snapshots, and then applied them to the MBP. It is nice to
-- apply directly, because then we have the guarantee that it's
-- semantically identical to snapshot extension. If this optimization is
-- implemented, note that the direct Monoid for CustomSnapshot is not
-- correct. Crucially, if a package is present in the snapshot, its
-- flags and ghc-options are not based on settings from prior snapshots.
-- TODO: This semantics should be discussed / documented more.
-- TODO: allow a hash check in the resolver. This adds safety /
-- correctness, allowing you to ensure that you are indeed getting the
-- right custom snapshot.
-- TODO: Allow custom plan to specify a name.
parseCustomMiniBuildPlan
:: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasHttpManager env, HasConfig env, HasGHCVariant env, MonadBaseControl IO m)
=> Maybe (Path Abs File) -- ^ Root directory for when url is a filepath
-> T.Text
-> m (MiniBuildPlan, SnapshotHash)
parseCustomMiniBuildPlan mconfigPath0 url0 = do
$logDebug $ "Loading " <> url0 <> " build plan"
case parseUrlThrow $ T.unpack url0 of
Just req -> downloadCustom url0 req
Nothing ->
case mconfigPath0 of
Nothing -> throwM $ FilepathInDownloadedSnapshot url0
Just configPath -> do
(getMbp, hash) <- readCustom configPath url0
mbp <- getMbp
-- NOTE: We make the choice of only writing a cache
-- file for the full MBP, not the intermediate ones.
-- This isn't necessarily the best choice if we want
-- to share work extended snapshots. I think only
-- writing this one is more efficient for common
-- cases.
binaryPath <- getBinaryPath hash
alreadyCached <- doesFileExist binaryPath
unless alreadyCached $ $(versionedEncodeFile miniBuildPlanVC) binaryPath mbp
return (mbp, hash)
where
downloadCustom url req = do
let urlHash = S8.unpack $ trimmedSnapshotHash $ doHash $ encodeUtf8 url
hashFP <- parseRelFile $ urlHash ++ ".yaml"
customPlanDir <- getCustomPlanDir
let cacheFP = customPlanDir </> $(mkRelDir "yaml") </> hashFP
_ <- download req cacheFP
yamlBS <- liftIO $ S.readFile $ toFilePath cacheFP
let yamlHash = doHash yamlBS
binaryPath <- getBinaryPath yamlHash
liftM (, yamlHash) $ $(versionedDecodeOrLoad miniBuildPlanVC) binaryPath $ do
(cs, mresolver) <- decodeYaml yamlBS
parentMbp <- case (csCompilerVersion cs, mresolver) of
(Nothing, Nothing) -> throwM (NeitherCompilerOrResolverSpecified url)
(Just cv, Nothing) -> return (compilerBuildPlan cv)
-- NOTE: ignoring the parent's hash, even though
-- there could be one. URL snapshot's hash are
-- determined just from their contents.
(_, Just resolver) -> liftM fst (loadResolver Nothing resolver)
applyCustomSnapshot cs parentMbp
readCustom configPath path = do
yamlFP <- resolveFile (parent configPath) (T.unpack $ fromMaybe path $
T.stripPrefix "file://" path <|> T.stripPrefix "file:" path)
yamlBS <- liftIO $ S.readFile $ toFilePath yamlFP
(cs, mresolver) <- decodeYaml yamlBS
(getMbp, hash) <- case mresolver of
Just (ResolverCustom _ url ) ->
case parseUrlThrow $ T.unpack url of
Just req -> do
let getMbp = do
-- Ignore custom hash, under the
-- assumption that the URL is sufficient
-- for identity.
(mbp, _) <- downloadCustom url req
return mbp
return (getMbp, doHash yamlBS)
Nothing -> do
(getMbp0, SnapshotHash hash0) <- readCustom yamlFP url
let hash = doHash (hash0 <> yamlBS)
getMbp = do
binaryPath <- getBinaryPath hash
-- Idea here is to not waste time
-- writing out intermediate cache files,
-- but check for them.
exists <- doesFileExist binaryPath
if exists
then do
eres <- $(versionedDecodeFile miniBuildPlanVC) binaryPath
case eres of
Just mbp -> return mbp
-- Invalid format cache file, remove.
Nothing -> do
removeFile binaryPath
getMbp0
else getMbp0
return (getMbp, hash)
Just resolver -> do
-- NOTE: in the cases where we don't have a hash, the
-- normal resolver name is enough. Since this name is
-- part of the yaml file, it ends up in our hash.
let hash = doHash yamlBS
getMbp = do
(mbp, resolver') <- loadResolver (Just configPath) resolver
let mhash = customResolverHash resolver'
assert (isNothing mhash) (return mbp)
return (getMbp, hash)
Nothing -> do
case csCompilerVersion cs of
Nothing -> throwM (NeitherCompilerOrResolverSpecified path)
Just cv -> do
let hash = doHash yamlBS
getMbp = return (compilerBuildPlan cv)
return (getMbp, hash)
return (applyCustomSnapshot cs =<< getMbp, hash)
getBinaryPath hash = do
binaryFilename <- parseRelFile $ S8.unpack (trimmedSnapshotHash hash) ++ ".bin"
customPlanDir <- getCustomPlanDir
return $ customPlanDir </> $(mkRelDir "bin") </> binaryFilename
decodeYaml yamlBS = do
WithJSONWarnings res warnings <-
either (throwM . ParseCustomSnapshotException url0) return $
decodeEither' yamlBS
logJSONWarnings (T.unpack url0) warnings
return res
compilerBuildPlan cv = MiniBuildPlan
{ mbpCompilerVersion = cv
, mbpPackages = mempty
}
getCustomPlanDir = do
root <- asks $ configStackRoot . getConfig
return $ root </> $(mkRelDir "custom-plan")
doHash = SnapshotHash . B64URL.encode . SHA256.hash
applyCustomSnapshot
:: (MonadIO m, MonadLogger m, MonadReader env m, HasHttpManager env, HasConfig env, MonadBaseControl IO m, MonadMask m)
=> CustomSnapshot
-> MiniBuildPlan
-> m MiniBuildPlan
applyCustomSnapshot cs mbp0 = do
let CustomSnapshot mcompilerVersion
packages
dropPackages
(PackageFlags flags)
ghcOptions
= cs
addFlagsAndOpts :: PackageIdentifier -> (PackageName, (Version, Map FlagName Bool, [Text], Maybe GitSHA1))
addFlagsAndOpts (PackageIdentifier name ver) =
( name
, ( ver
, Map.findWithDefault Map.empty name flags
-- NOTE: similar to 'allGhcOptions' in Stack.Types.Build
, ghcOptionsFor name ghcOptions
-- we add a Nothing since we don't yet collect Git SHAs for custom snapshots
, Nothing
)
)
packageMap = Map.fromList $ map addFlagsAndOpts $ Set.toList packages
cv = fromMaybe (mbpCompilerVersion mbp0) mcompilerVersion
packages0 =
mbpPackages mbp0 `Map.difference` (Map.fromSet (\_ -> ()) dropPackages)
mbp1 <- toMiniBuildPlan cv mempty packageMap
return $ MiniBuildPlan
{ mbpCompilerVersion = cv
, mbpPackages = Map.union (mbpPackages mbp1) packages0
}
| AndrewRademacher/stack | src/Stack/BuildPlan.hs | bsd-3-clause | 47,672 | 0 | 32 | 15,870 | 10,666 | 5,479 | 5,187 | 842 | 11 |
{-# LANGUAGE CPP, OverloadedStrings, RecordWildCards, ScopedTypeVariables #-}
import Control.Monad (forM)
import Data.Aeson (eitherDecode)
import Data.Aeson.Encode
import Data.Aeson.Parser (value)
import Data.Aeson.Types
import Data.Char (toUpper)
import Test.Framework (Test, defaultMain, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (Assertion, assertFailure, assertEqual)
import Test.QuickCheck (Arbitrary(..))
import qualified Data.Vector as V
import qualified Data.Attoparsec.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.Text as T
import qualified Data.Text.Lazy.Builder as TLB
import qualified Data.Text.Lazy.Encoding as TLE
import qualified Data.HashMap.Strict as H
import Data.Time.Clock (UTCTime(..))
import Data.Time (ZonedTime(..))
import Instances ()
import Types
import Encoders
import Properties.Deprecated (deprecatedTests)
#ifdef GHC_GENERICS
import Data.Int
import qualified Data.Map as Map
#endif
roundTripCamel :: String -> Assertion
roundTripCamel name = assertEqual "" name (camelFrom '_' $ camelTo '_' name)
where
camelFrom c s = let (p:ps) = split c s
in concat $ p : map capitalize ps
split c s = map L.unpack $ L.split c $ L.pack s
capitalize t = toUpper (head t) : tail t
encodeDouble :: Double -> Double -> Bool
encodeDouble num denom
| isInfinite d || isNaN d = encode d == "null"
| otherwise = (read . L.unpack . encode) d == d
where d = num / denom
encodeInteger :: Integer -> Bool
encodeInteger i = encode i == L.pack (show i)
toParseJSON :: (Arbitrary a, Eq a) => (Value -> Parser a) -> (a -> Value) -> a -> Bool
toParseJSON parsejson tojson x =
case parse parsejson . tojson $ x of
Error _ -> False
Success x' -> x == x'
roundTrip :: (FromJSON a, ToJSON a) => (a -> a -> Bool) -> a -> a -> Bool
roundTrip eq _ i =
case fmap fromJSON . L.parse value . encode . toJSON $ i of
L.Done _ (Success v) -> v `eq` i
_ -> False
roundTripEq :: (Eq a, FromJSON a, ToJSON a) => a -> a -> Bool
roundTripEq x y = roundTrip (==) x y
toFromJSON :: (Arbitrary a, Eq a, FromJSON a, ToJSON a) => a -> Bool
toFromJSON x = case fromJSON . toJSON $ x of
Error _ -> False
Success x' -> x == x'
modifyFailureProp :: String -> String -> Bool
modifyFailureProp orig added =
result == Error (added ++ orig)
where
parser = const $ modifyFailure (added ++) $ fail orig
result :: Result ()
result = parse parser ()
main :: IO ()
main = do
comparisonTest <- encoderComparisonTests
defaultMain (comparisonTest : tests)
#ifdef GHC_GENERICS
type P6 = Product6 Int Bool String (Approx Double) (Int, Approx Double) ()
type S4 = Sum4 Int8 ZonedTime T.Text (Map.Map String Int)
#endif
--------------------------------------------------------------------------------
-- Value properties
--------------------------------------------------------------------------------
isString :: Value -> Bool
isString (String _) = True
isString _ = False
is2ElemArray :: Value -> Bool
is2ElemArray (Array v) = V.length v == 2 && isString (V.head v)
is2ElemArray _ = False
isTaggedObjectValue :: Value -> Bool
isTaggedObjectValue (Object obj) = "tag" `H.member` obj &&
"contents" `H.member` obj
isTaggedObjectValue _ = False
isTaggedObject :: Value -> Bool
isTaggedObject (Object obj) = "tag" `H.member` obj
isTaggedObject _ = False
isObjectWithSingleField :: Value -> Bool
isObjectWithSingleField (Object obj) = H.size obj == 1
isObjectWithSingleField _ = False
--------------------------------------------------------------------------------
tests :: [Test]
tests = [
testGroup "encode" [
testProperty "encodeDouble" encodeDouble
, testProperty "encodeInteger" encodeInteger
],
testGroup "camelCase" [
testCase "camelTo" $ roundTripCamel "aName"
, testCase "camelTo" $ roundTripCamel "another"
, testCase "camelTo" $ roundTripCamel "someOtherName"
],
testGroup "roundTrip" [
testProperty "Bool" $ roundTripEq True
, testProperty "Double" $ roundTripEq (1 :: Approx Double)
, testProperty "Int" $ roundTripEq (1::Int)
, testProperty "Integer" $ roundTripEq (1::Integer)
, testProperty "String" $ roundTripEq (""::String)
, testProperty "Text" $ roundTripEq T.empty
, testProperty "Foo" $ roundTripEq (undefined::Foo)
, testProperty "DotNetTime" $ roundTripEq (undefined :: DotNetTime)
, testProperty "UTCTime" $ roundTripEq (undefined :: UTCTime)
, testProperty "ZonedTime" $ roundTripEq (undefined::ZonedTime)
#ifdef GHC_GENERICS
, testGroup "ghcGenerics" [
testProperty "OneConstructor" $ roundTripEq OneConstructor
, testProperty "Product2" $ roundTripEq (undefined :: Product2 Int Bool)
, testProperty "Product6" $ roundTripEq (undefined :: P6)
, testProperty "Sum4" $ roundTripEq (undefined :: S4)
]
#endif
],
testGroup "toFromJSON" [
testProperty "Integer" (toFromJSON :: Integer -> Bool)
, testProperty "Double" (toFromJSON :: Double -> Bool)
, testProperty "Maybe Integer" (toFromJSON :: Maybe Integer -> Bool)
, testProperty "Either Integer Double" (toFromJSON :: Either Integer Double -> Bool)
, testProperty "Either Integer Integer" (toFromJSON :: Either Integer Integer -> Bool)
],
testGroup "deprecated" deprecatedTests,
testGroup "failure messages" [
testProperty "modify failure" modifyFailureProp
],
testGroup "template-haskell" [
testGroup "Nullary" [
testProperty "string" (isString . thNullaryToJSONString)
, testProperty "2ElemArray" (is2ElemArray . thNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (isTaggedObjectValue . thNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . thNullaryToJSONObjectWithSingleField)
, testGroup "roundTrip" [
testProperty "string" (toParseJSON thNullaryParseJSONString thNullaryToJSONString)
, testProperty "2ElemArray" (toParseJSON thNullaryParseJSON2ElemArray thNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (toParseJSON thNullaryParseJSONTaggedObject thNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (toParseJSON thNullaryParseJSONObjectWithSingleField thNullaryToJSONObjectWithSingleField)
]
]
, testGroup "SomeType" [
testProperty "2ElemArray" (is2ElemArray . (thSomeTypeToJSON2ElemArray :: SomeTypeToJSON))
, testProperty "TaggedObject" (isTaggedObject . (thSomeTypeToJSONTaggedObject :: SomeTypeToJSON))
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . (thSomeTypeToJSONObjectWithSingleField :: SomeTypeToJSON))
, testGroup "roundTrip" [
testProperty "2ElemArray" (toParseJSON thSomeTypeParseJSON2ElemArray (thSomeTypeToJSON2ElemArray :: SomeTypeToJSON))
, testProperty "TaggedObject" (toParseJSON thSomeTypeParseJSONTaggedObject (thSomeTypeToJSONTaggedObject :: SomeTypeToJSON))
, testProperty "ObjectWithSingleField" (toParseJSON thSomeTypeParseJSONObjectWithSingleField (thSomeTypeToJSONObjectWithSingleField :: SomeTypeToJSON))
]
]
]
#ifdef GHC_GENERICS
, testGroup "GHC-generics" [
testGroup "Nullary" [
testProperty "string" (isString . gNullaryToJSONString)
, testProperty "2ElemArray" (is2ElemArray . gNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (isTaggedObjectValue . gNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . gNullaryToJSONObjectWithSingleField)
, testGroup "eq" [
testProperty "string" (\n -> gNullaryToJSONString n == thNullaryToJSONString n)
, testProperty "2ElemArray" (\n -> gNullaryToJSON2ElemArray n == thNullaryToJSON2ElemArray n)
, testProperty "TaggedObject" (\n -> gNullaryToJSONTaggedObject n == thNullaryToJSONTaggedObject n)
, testProperty "ObjectWithSingleField" (\n -> gNullaryToJSONObjectWithSingleField n == thNullaryToJSONObjectWithSingleField n)
]
, testGroup "roundTrip" [
testProperty "string" (toParseJSON gNullaryParseJSONString gNullaryToJSONString)
, testProperty "2ElemArray" (toParseJSON gNullaryParseJSON2ElemArray gNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (toParseJSON gNullaryParseJSONTaggedObject gNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (toParseJSON gNullaryParseJSONObjectWithSingleField gNullaryToJSONObjectWithSingleField)
]
]
, testGroup "SomeType" [
testProperty "2ElemArray" (is2ElemArray . (gSomeTypeToJSON2ElemArray :: SomeTypeToJSON))
, testProperty "TaggedObject" (isTaggedObject . (gSomeTypeToJSONTaggedObject :: SomeTypeToJSON))
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . (gSomeTypeToJSONObjectWithSingleField :: SomeTypeToJSON))
, testGroup "eq" [
testProperty "2ElemArray" (\n -> (gSomeTypeToJSON2ElemArray :: SomeTypeToJSON) n == thSomeTypeToJSON2ElemArray n)
, testProperty "TaggedObject" (\n -> (gSomeTypeToJSONTaggedObject :: SomeTypeToJSON) n == thSomeTypeToJSONTaggedObject n)
, testProperty "ObjectWithSingleField" (\n -> (gSomeTypeToJSONObjectWithSingleField :: SomeTypeToJSON) n == thSomeTypeToJSONObjectWithSingleField n)
]
, testGroup "roundTrip" [
testProperty "2ElemArray" (toParseJSON gSomeTypeParseJSON2ElemArray (gSomeTypeToJSON2ElemArray :: SomeTypeToJSON))
, testProperty "TaggedObject" (toParseJSON gSomeTypeParseJSONTaggedObject (gSomeTypeToJSONTaggedObject :: SomeTypeToJSON))
, testProperty "ObjectWithSingleField" (toParseJSON gSomeTypeParseJSONObjectWithSingleField (gSomeTypeToJSONObjectWithSingleField :: SomeTypeToJSON))
]
]
]
#endif
]
------------------------------------------------------------------------------
-- Comparison between bytestring and text encoders
------------------------------------------------------------------------------
encoderComparisonTests :: IO Test
encoderComparisonTests = do
encoderTests <- forM testFiles $ \file0 -> do
let file = "benchmarks/json-data/" ++ file0
return $ testCase file $ do
inp <- L.readFile file
case eitherDecode inp of
Left err -> assertFailure $ "Decoding failure: " ++ err
Right val -> assertEqual "" (encode val) (encodeViaText val)
return $ testGroup "Compare bytestring and text encoders" encoderTests
where
encodeViaText :: Value -> L.ByteString
encodeViaText =
TLE.encodeUtf8 . TLB.toLazyText . encodeToTextBuilder . toJSON
testFiles =
[ "example.json"
, "integers.json"
, "jp100.json"
, "numbers.json"
, "twitter10.json"
, "twitter20.json"
, "geometry.json"
, "jp10.json"
, "jp50.json"
, "twitter1.json"
, "twitter100.json"
, "twitter50.json"
]
| maximkulkin/aeson | tests/Properties.hs | bsd-3-clause | 12,070 | 9 | 20 | 3,068 | 2,760 | 1,464 | 1,296 | 154 | 2 |
module Shared.Image where
import qualified Graphics.UI.SDL as SDL
import Shared.Lifecycle
import Foreign.Ptr
import Foreign.C.String
getSurfaceFrom' :: FilePath -> IO (Ptr SDL.Surface)
getSurfaceFrom' path = imgLoadSurface path >>= either throwSDLError return
imgLoadSurface :: String -> IO (Either String (Ptr SDL.Surface))
imgLoadSurface file
= withCString file $ \cFile -> do
tex <- _imgLoad cFile
if tex == nullPtr
then return (Left "IMG_LoadSurface(): Unknown error!")
else return (Right tex)
foreign import ccall unsafe "IMG_Load" _imgLoad :: CString -> IO (Ptr SDL.Surface)
| oldmanmike/haskellSDL2Examples | src/shared/image.hs | gpl-2.0 | 631 | 0 | 12 | 128 | 191 | 99 | 92 | 15 | 2 |
{-# LANGUAGE ScopedTypeVariables, CPP, ForeignFunctionInterface,
MagicHash, UnboxedTuples #-}
{-# OPTIONS_HADDOCK hide #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Trustworthy #-}
#endif
-- | Copyright : (c) 2010 - 2011 Simon Meier
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : GHC
--
-- Constructing 'Builder's using ASCII-based encodings.
--
module Data.ByteString.Builder.ASCII
(
-- ** ASCII text
-- *** Decimal numbers
-- | Decimal encoding of numbers using ASCII encoded characters.
int8Dec
, int16Dec
, int32Dec
, int64Dec
, intDec
, integerDec
, word8Dec
, word16Dec
, word32Dec
, word64Dec
, wordDec
, floatDec
, doubleDec
-- *** Hexadecimal numbers
-- | Encoding positive integers as hexadecimal numbers using lower-case
-- ASCII characters. The shortest
-- possible representation is used. For example,
--
-- >>> toLazyByteString (word16Hex 0x0a10)
-- Chunk "a10" Empty
--
-- Note that there is no support for using upper-case characters. Please
-- contact the maintainer, if your application cannot work without
-- hexadecimal encodings that use upper-case characters.
--
, word8Hex
, word16Hex
, word32Hex
, word64Hex
, wordHex
-- *** Fixed-width hexadecimal numbers
--
, int8HexFixed
, int16HexFixed
, int32HexFixed
, int64HexFixed
, word8HexFixed
, word16HexFixed
, word32HexFixed
, word64HexFixed
, floatHexFixed
, doubleHexFixed
, byteStringHex
, lazyByteStringHex
) where
import Data.ByteString as S
import Data.ByteString.Lazy as L
import Data.ByteString.Builder.Internal (Builder)
import qualified Data.ByteString.Builder.Prim as P
import Foreign
#if defined(__GLASGOW_HASKELL__) && defined(INTEGER_GMP)
import Data.Monoid (mappend)
import Foreign.C.Types
import qualified Data.ByteString.Builder.Prim.Internal as P
import Data.ByteString.Builder.Prim.Internal.UncheckedShifts
( caseWordSize_32_64 )
import GHC.Num (quotRemInteger)
import GHC.Types (Int(..))
# if __GLASGOW_HASKELL__ < 611
import GHC.Integer.Internals
# else
import GHC.Integer.GMP.Internals
# endif
#endif
------------------------------------------------------------------------------
-- Decimal Encoding
------------------------------------------------------------------------------
-- | Encode a 'String' using 'P.char7'.
{-# INLINE string7 #-}
string7 :: String -> Builder
string7 = P.primMapListFixed P.char7
------------------------------------------------------------------------------
-- Decimal Encoding
------------------------------------------------------------------------------
-- Signed integers
------------------
-- | Decimal encoding of an 'Int8' using the ASCII digits.
--
-- e.g.
--
-- > toLazyByteString (int8Dec 42) = "42"
-- > toLazyByteString (int8Dec (-1)) = "-1"
--
{-# INLINE int8Dec #-}
int8Dec :: Int8 -> Builder
int8Dec = P.primBounded P.int8Dec
-- | Decimal encoding of an 'Int16' using the ASCII digits.
{-# INLINE int16Dec #-}
int16Dec :: Int16 -> Builder
int16Dec = P.primBounded P.int16Dec
-- | Decimal encoding of an 'Int32' using the ASCII digits.
{-# INLINE int32Dec #-}
int32Dec :: Int32 -> Builder
int32Dec = P.primBounded P.int32Dec
-- | Decimal encoding of an 'Int64' using the ASCII digits.
{-# INLINE int64Dec #-}
int64Dec :: Int64 -> Builder
int64Dec = P.primBounded P.int64Dec
-- | Decimal encoding of an 'Int' using the ASCII digits.
{-# INLINE intDec #-}
intDec :: Int -> Builder
intDec = P.primBounded P.intDec
-- Unsigned integers
--------------------
-- | Decimal encoding of a 'Word8' using the ASCII digits.
{-# INLINE word8Dec #-}
word8Dec :: Word8 -> Builder
word8Dec = P.primBounded P.word8Dec
-- | Decimal encoding of a 'Word16' using the ASCII digits.
{-# INLINE word16Dec #-}
word16Dec :: Word16 -> Builder
word16Dec = P.primBounded P.word16Dec
-- | Decimal encoding of a 'Word32' using the ASCII digits.
{-# INLINE word32Dec #-}
word32Dec :: Word32 -> Builder
word32Dec = P.primBounded P.word32Dec
-- | Decimal encoding of a 'Word64' using the ASCII digits.
{-# INLINE word64Dec #-}
word64Dec :: Word64 -> Builder
word64Dec = P.primBounded P.word64Dec
-- | Decimal encoding of a 'Word' using the ASCII digits.
{-# INLINE wordDec #-}
wordDec :: Word -> Builder
wordDec = P.primBounded P.wordDec
-- Floating point numbers
-------------------------
-- TODO: Use Bryan O'Sullivan's double-conversion package to speed it up.
-- | /Currently slow./ Decimal encoding of an IEEE 'Float'.
{-# INLINE floatDec #-}
floatDec :: Float -> Builder
floatDec = string7 . show
-- | /Currently slow./ Decimal encoding of an IEEE 'Double'.
{-# INLINE doubleDec #-}
doubleDec :: Double -> Builder
doubleDec = string7 . show
------------------------------------------------------------------------------
-- Hexadecimal Encoding
------------------------------------------------------------------------------
-- without lead
---------------
-- | Shortest hexadecimal encoding of a 'Word8' using lower-case characters.
{-# INLINE word8Hex #-}
word8Hex :: Word8 -> Builder
word8Hex = P.primBounded P.word8Hex
-- | Shortest hexadecimal encoding of a 'Word16' using lower-case characters.
{-# INLINE word16Hex #-}
word16Hex :: Word16 -> Builder
word16Hex = P.primBounded P.word16Hex
-- | Shortest hexadecimal encoding of a 'Word32' using lower-case characters.
{-# INLINE word32Hex #-}
word32Hex :: Word32 -> Builder
word32Hex = P.primBounded P.word32Hex
-- | Shortest hexadecimal encoding of a 'Word64' using lower-case characters.
{-# INLINE word64Hex #-}
word64Hex :: Word64 -> Builder
word64Hex = P.primBounded P.word64Hex
-- | Shortest hexadecimal encoding of a 'Word' using lower-case characters.
{-# INLINE wordHex #-}
wordHex :: Word -> Builder
wordHex = P.primBounded P.wordHex
-- fixed width; leading zeroes
------------------------------
-- | Encode a 'Int8' using 2 nibbles (hexadecimal digits).
{-# INLINE int8HexFixed #-}
int8HexFixed :: Int8 -> Builder
int8HexFixed = P.primFixed P.int8HexFixed
-- | Encode a 'Int16' using 4 nibbles.
{-# INLINE int16HexFixed #-}
int16HexFixed :: Int16 -> Builder
int16HexFixed = P.primFixed P.int16HexFixed
-- | Encode a 'Int32' using 8 nibbles.
{-# INLINE int32HexFixed #-}
int32HexFixed :: Int32 -> Builder
int32HexFixed = P.primFixed P.int32HexFixed
-- | Encode a 'Int64' using 16 nibbles.
{-# INLINE int64HexFixed #-}
int64HexFixed :: Int64 -> Builder
int64HexFixed = P.primFixed P.int64HexFixed
-- | Encode a 'Word8' using 2 nibbles (hexadecimal digits).
{-# INLINE word8HexFixed #-}
word8HexFixed :: Word8 -> Builder
word8HexFixed = P.primFixed P.word8HexFixed
-- | Encode a 'Word16' using 4 nibbles.
{-# INLINE word16HexFixed #-}
word16HexFixed :: Word16 -> Builder
word16HexFixed = P.primFixed P.word16HexFixed
-- | Encode a 'Word32' using 8 nibbles.
{-# INLINE word32HexFixed #-}
word32HexFixed :: Word32 -> Builder
word32HexFixed = P.primFixed P.word32HexFixed
-- | Encode a 'Word64' using 16 nibbles.
{-# INLINE word64HexFixed #-}
word64HexFixed :: Word64 -> Builder
word64HexFixed = P.primFixed P.word64HexFixed
-- | Encode an IEEE 'Float' using 8 nibbles.
{-# INLINE floatHexFixed #-}
floatHexFixed :: Float -> Builder
floatHexFixed = P.primFixed P.floatHexFixed
-- | Encode an IEEE 'Double' using 16 nibbles.
{-# INLINE doubleHexFixed #-}
doubleHexFixed :: Double -> Builder
doubleHexFixed = P.primFixed P.doubleHexFixed
-- | Encode each byte of a 'S.ByteString' using its fixed-width hex encoding.
{-# NOINLINE byteStringHex #-} -- share code
byteStringHex :: S.ByteString -> Builder
byteStringHex = P.primMapByteStringFixed P.word8HexFixed
-- | Encode each byte of a lazy 'L.ByteString' using its fixed-width hex encoding.
{-# NOINLINE lazyByteStringHex #-} -- share code
lazyByteStringHex :: L.ByteString -> Builder
lazyByteStringHex = P.primMapLazyByteStringFixed P.word8HexFixed
------------------------------------------------------------------------------
-- Fast decimal 'Integer' encoding.
------------------------------------------------------------------------------
#if defined(__GLASGOW_HASKELL__) && defined(INTEGER_GMP)
-- An optimized version of the integer serialization code
-- in blaze-textual (c) 2011 MailRank, Inc. Bryan O'Sullivan
-- <bos@mailrank.com>. It is 2.5x faster on Int-sized integers and 4.5x faster
-- on larger integers.
# define PAIR(a,b) (# a,b #)
-- | Maximal power of 10 fitting into an 'Int' without using the MSB.
-- 10 ^ 9 for 32 bit ints (31 * log 2 / log 10 = 9.33)
-- 10 ^ 18 for 64 bit ints (63 * log 2 / log 10 = 18.96)
--
-- FIXME: Think about also using the MSB. For 64 bit 'Int's this makes a
-- difference.
maxPow10 :: Integer
maxPow10 = toInteger $ (10 :: Int) ^ caseWordSize_32_64 (9 :: Int) 18
-- | Decimal encoding of an 'Integer' using the ASCII digits.
integerDec :: Integer -> Builder
integerDec (S# i#) = intDec (I# i#)
integerDec i
| i < 0 = P.primFixed P.char8 '-' `mappend` go (-i)
| otherwise = go ( i)
where
errImpossible fun =
error $ "integerDec: " ++ fun ++ ": the impossible happened."
go :: Integer -> Builder
go n | n < maxPow10 = intDec (fromInteger n)
| otherwise =
case putH (splitf (maxPow10 * maxPow10) n) of
(x:xs) -> intDec x `mappend` P.primMapListBounded intDecPadded xs
[] -> errImpossible "integerDec: go"
splitf :: Integer -> Integer -> [Integer]
splitf pow10 n0
| pow10 > n0 = [n0]
| otherwise = splith (splitf (pow10 * pow10) n0)
where
splith [] = errImpossible "splith"
splith (n:ns) =
case n `quotRemInteger` pow10 of
PAIR(q,r) | q > 0 -> q : r : splitb ns
| otherwise -> r : splitb ns
splitb [] = []
splitb (n:ns) = case n `quotRemInteger` pow10 of
PAIR(q,r) -> q : r : splitb ns
putH :: [Integer] -> [Int]
putH [] = errImpossible "putH"
putH (n:ns) = case n `quotRemInteger` maxPow10 of
PAIR(x,y)
| q > 0 -> q : r : putB ns
| otherwise -> r : putB ns
where q = fromInteger x
r = fromInteger y
putB :: [Integer] -> [Int]
putB [] = []
putB (n:ns) = case n `quotRemInteger` maxPow10 of
PAIR(q,r) -> fromInteger q : fromInteger r : putB ns
foreign import ccall unsafe "static _hs_bytestring_int_dec_padded9"
c_int_dec_padded9 :: CInt -> Ptr Word8 -> IO ()
foreign import ccall unsafe "static _hs_bytestring_long_long_int_dec_padded18"
c_long_long_int_dec_padded18 :: CLLong -> Ptr Word8 -> IO ()
{-# INLINE intDecPadded #-}
intDecPadded :: P.BoundedPrim Int
intDecPadded = P.liftFixedToBounded $ caseWordSize_32_64
(P.fixedPrim 9 $ c_int_dec_padded9 . fromIntegral)
(P.fixedPrim 18 $ c_long_long_int_dec_padded18 . fromIntegral)
#else
-- compilers other than GHC
-- | Decimal encoding of an 'Integer' using the ASCII digits. Implemented
-- using via the 'Show' instance of 'Integer's.
integerDec :: Integer -> Builder
integerDec = string7 . show
#endif
| jwiegley/ghc-release | libraries/bytestring/Data/ByteString/Builder/ASCII.hs | gpl-3.0 | 11,618 | 0 | 15 | 2,547 | 1,786 | 1,023 | 763 | 132 | 1 |
module Network.Haskoin.Wallet.Settings
( SPVMode(..)
, OutputFormat(..)
, Config(..)
) where
import Control.Monad (forM, mzero)
import Control.Exception (throw)
import Control.Monad.Logger (LogLevel(..))
import Data.Default (Default, def)
import Data.FileEmbed (embedFile)
import Data.Yaml (decodeEither')
import Data.Word (Word32, Word64)
import Data.HashMap.Strict (HashMap)
import qualified Data.Traversable as V (mapM)
import qualified Data.ByteString as BS (ByteString)
import qualified Data.Text as T (Text)
import Data.Aeson
( Value(..)
, FromJSON
, parseJSON
, withObject
, (.:), (.:?), (.!=)
)
import Network.Haskoin.Wallet.Database
import Network.Haskoin.Wallet.Types
data SPVMode = SPVOnline | SPVOffline
deriving (Eq, Show, Read)
data OutputFormat
= OutputNormal
| OutputJSON
| OutputYAML
data Config = Config
{ configKeyRing :: !T.Text
-- ^ Keyring to use in commands
, configCount :: !Word32
-- ^ Output size of commands
, configMinConf :: !Word32
-- ^ Minimum number of confirmations
, configSignTx :: !Bool
-- ^ Sign transactions
, configFee :: !Word64
-- ^ Fee to pay per 1000 bytes when creating new transactions
, configRcptFee :: !Bool
-- ^ Recipient pays fee (dangerous, no config file setting)
, configAddrType :: !AddressType
-- ^ Return internal instead of external addresses
, configOffline :: !Bool
-- ^ Display the balance including offline transactions
, configReversePaging :: !Bool
-- ^ Use reverse paging for displaying addresses and transactions
, configPass :: !(Maybe T.Text)
-- ^ Passphrase to use when creating new keyrings (bip39 mnemonic)
, configFormat :: !OutputFormat
-- ^ How to format the command-line results
, configConnect :: !String
-- ^ ZeroMQ socket to connect to (location of the server)
, configDetach :: !Bool
-- ^ Detach server when launched from command-line
, configFile :: !FilePath
-- ^ Configuration file
, configTestnet :: !Bool
-- ^ Use Testnet3 network
, configDir :: !FilePath
-- ^ Working directory
, configBind :: !String
-- ^ Bind address for the zeromq socket
, configBTCNodes :: !(HashMap T.Text [(String, Int)])
-- ^ Trusted Bitcoin full nodes to connect to
, configMode :: !SPVMode
-- ^ Operation mode of the SPV node.
, configBloomFP :: !Double
-- ^ False positive rate for the bloom filter.
, configDatabase :: !(HashMap T.Text DatabaseConfType)
-- ^ Database configuration
, configLogFile :: !FilePath
-- ^ Log file
, configPidFile :: !FilePath
-- ^ PID File
, configLogLevel :: !LogLevel
-- ^ Log level
, configVerbose :: !Bool
-- ^ Verbose
}
configBS :: BS.ByteString
configBS = $(embedFile "config/config.yml")
instance Default Config where
def = either throw id $ decodeEither' configBS
instance FromJSON Config where
parseJSON = withObject "Config" $ \o -> do
let configRcptFee = False
configFile = "config.yml"
configPass = Nothing
configKeyRing <- o .:? "keyring-name"
.!= configKeyRing def
configCount <- o .:? "output-size"
.!= configCount def
configMinConf <- o .:? "minimum-confirmations"
.!= configMinConf def
configSignTx <- o .:? "sign-transactions"
.!= configSignTx def
configFee <- o .:? "transaction-fee"
.!= configFee def
configAddrType <- k =<< o .:? "address-type"
configOffline <- o .:? "offline"
.!= configOffline def
configReversePaging <- o .:? "reverse-paging"
.!= configReversePaging def
configFormat <- f =<< o .:? "display-format"
configConnect <- o .:? "connect-uri"
.!= configConnect def
configDetach <- o .:? "detach-server"
.!= configDetach def
configTestnet <- o .:? "use-testnet"
.!= configTestnet def
configDir <- o .:? "work-dir"
.!= configDir def
configBind <- o .:? "bind-socket"
.!= configBind def
configBTCNodes <- g =<< o .:? "bitcoin-full-nodes"
configMode <- h =<< o .:? "server-mode"
configBloomFP <- o .:? "bloom-false-positive"
.!= configBloomFP def
configDatabase <- i =<< o .:? "database"
configLogFile <- o .:? "log-file"
.!= configLogFile def
configPidFile <- o .:? "pid-file"
.!= configPidFile def
configLogLevel <- j =<< o .:? "log-level"
configVerbose <- o .:? "verbose"
.!= configVerbose def
return Config {..}
where
f format = case format of
Just (String "normal") -> return OutputNormal
Just (String "json") -> return OutputJSON
Just (String "yaml") -> return OutputYAML
Just _ -> mzero
Nothing -> return $ configFormat def
g (Just x) = flip (withObject "btcnodesobj") x $ V.mapM $ \a -> do
ls <- parseJSON a
forM ls $ withObject "bitcoinnode" $ \o ->
(,) <$> (o .: "host") <*> (o .: "port")
g Nothing = return $ configBTCNodes def
h mode = case mode of
Just (String "online") -> return SPVOnline
Just (String "offline") -> return SPVOffline
Just _ -> mzero
Nothing -> return $ configMode def
i (Just x) = flip (withObject "databases") x $ V.mapM .
withObject "database" $ \v -> v .: databaseEngine
i Nothing = return $ configDatabase def
j level = case level of
Just (String "debug") -> return LevelDebug
Just (String "info") -> return LevelInfo
Just (String "warn") -> return LevelWarn
Just (String "error") -> return LevelError
Just _ -> mzero
Nothing -> return $ configLogLevel def
k addrtype = case addrtype of
Just (String "internal") -> return AddressInternal
Just (String "external") -> return AddressExternal
Just _ -> mzero
Nothing -> return $ configAddrType def
| tphyahoo/haskoin | haskoin-wallet/Network/Haskoin/Wallet/Settings.hs | unlicense | 7,015 | 0 | 17 | 2,605 | 1,494 | 778 | 716 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
#ifndef MIN_VERSION_base
#define MIN_VERSION_base(x,y,z) 1
#endif
module Test.System.GPIO.TypesSpec (spec) where
import Protolude
import System.GPIO.Types
import Data.Bits (unsafeShiftL, unsafeShiftR)
import Test.Hspec
import Test.QuickCheck (property)
-- Test all our hand-derived instances and functions.
-- PinValue mimics Bool with respect to Bits and FiniteBits.
--
ib2v :: (Int -> Bool) -> Int -> PinValue
ib2v f n = boolToValue $ f n
bi2v :: (Bool -> Int) -> PinValue -> Int
bi2v f a = f (valueToBool a)
bb2v :: (Bool -> Bool) -> PinValue -> PinValue
bb2v f a = boolToValue $ f (valueToBool a)
bbb2v :: (Bool -> Bool -> Bool) -> PinValue -> PinValue -> PinValue
bbb2v f a b = boolToValue $ f (valueToBool a) (valueToBool b)
bib2v :: (Bool -> Int -> Bool) -> PinValue -> Int -> PinValue
bib2v f a n = boolToValue $ f (valueToBool a) n
#if MIN_VERSION_base(4,8,0)
newBase :: Spec
newBase =
do context "implements the new base-4.8.0.0 FiniteBits typeclass methods" $
do it "countLeadingZeros" $ property $
\a -> countLeadingZeros a == bi2v countLeadingZeros a
it "countTrailingZeros" $ property $
\a -> countTrailingZeros a == bi2v countTrailingZeros a
#else
newBase :: Spec
newBase = return ()
#endif
spec :: Spec
spec =
do describe "Pin" $
do it "pinNumber" $ property $
\p@(Pin n) -> n == pinNumber p
describe "PinDirection" $
do it "invertDirection" $
invertDirection In == Out
&& invertDirection Out == In
describe "PinValue" $
do it "invertValue" $ property $
\a -> invertValue a == complement a
it "valueToBool" $
valueToBool Low == False
&& valueToBool High == True
it "boolToValue" $
boolToValue False == Low
&& boolToValue True == High
context "implements the Bits typeclass" $
do it "(.&.)" $ property $
\a b -> a .&. b == bbb2v (.&.) a b
it "(.|.)" $ property $
\a b -> a .|. b == bbb2v (.|.) a b
it "xor" $ property $
\a b -> a `xor` b == bbb2v xor a b
it "complement" $ property $
\a -> complement a == bb2v complement a
it "shift" $ property $
\a n -> a `shift` n == bib2v shift a n
it "rotate" $ property $
\a n -> a `rotate` n == bib2v rotate a n
it "zeroBits" $ property $
(zeroBits :: PinValue) == boolToValue (zeroBits:: Bool)
it "bit" $ property $
\n -> bit n == ib2v bit n
it "setBit" $ property $
\a n -> a `setBit` n == bib2v setBit a n
it "clearBit" $ property $
\a n -> a `clearBit` n == bib2v clearBit a n
it "complementBit" $ property $
\a n -> a `complementBit` n == bib2v complementBit a n
it "testBit" $ property $
\a n -> testBit a n == testBit (valueToBool a) n
it "bitSizeMaybe" $ property $
\a -> bitSizeMaybe a == bitSizeMaybe (valueToBool a)
it "bitSize" $ property $
\a -> bitSize a == bitSize (valueToBool a)
it "isSigned" $ property $
\a -> isSigned a == isSigned (valueToBool a)
it "shiftL" $ property $
\a n -> a `shiftL` n == bib2v shiftL a n
it "unsafeShiftL" $ property $
\a n -> a `unsafeShiftL` n == bib2v unsafeShiftL a n
it "shiftR" $ property $
\a n -> a `shiftR` n == bib2v shiftR a n
it "unsafeShiftR" $ property $
\a n -> a `unsafeShiftR` n == bib2v unsafeShiftR a n
it "rotateL" $ property $
\a n -> a `rotateL` n == bib2v rotateL a n
it "rotateR" $ property $
\a n -> a `rotateR` n == bib2v rotateR a n
it "popCount" $ property $
\a -> popCount a == popCount (valueToBool a)
context "implements the FiniteBits typeclass" $
do it "finiteBitSize" $ property $
\a -> finiteBitSize a == bi2v finiteBitSize a
newBase
| dhess/gpio | test/Test/System/GPIO/TypesSpec.hs | bsd-3-clause | 4,438 | 0 | 18 | 1,602 | 1,429 | 703 | 726 | 93 | 1 |
{-|
Module : System.GPIO.Linux.Sysfs.IO
Description : Linux @sysfs@ GPIO operations in IO
Copyright : (c) 2019, Drew Hess
License : BSD3
Maintainer : Drew Hess <src@drewhess.com>
Stability : experimental
Portability : non-portable
The actual Linux @sysfs@ implementation. This implementation will only
function properly on Linux systems with a @sysfs@ subsystem,
obviously.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE InterruptibleFFI #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module System.GPIO.Linux.Sysfs.IO
( -- * SysfsIOT transformer
SysfsIOT(..)
) where
import Protolude hiding (bracket)
import Control.Monad.Base (MonadBase)
import Control.Monad.Catch (MonadCatch, MonadMask, MonadThrow, bracket)
import Control.Monad.Cont (MonadCont)
import Control.Monad.Fix (MonadFix)
import Control.Monad.Logger (MonadLogger, MonadLoggerIO)
import Control.Monad.RWS (MonadRWS)
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Control
(ComposeSt, MonadBaseControl(..), MonadTransControl(..),
defaultLiftBaseWith, defaultRestoreM)
import Control.Monad.Writer (MonadWriter)
import qualified Data.ByteString as BS (readFile, writeFile)
import Foreign.C.Error (throwErrnoIfMinus1Retry)
import Foreign.C.Types (CInt(..))
import qualified System.Directory as D (doesDirectoryExist, doesFileExist, getDirectoryContents)
import "unix" System.Posix.IO (OpenMode(ReadOnly, WriteOnly), closeFd, defaultFileFlags, openFd)
import "unix-bytestring" System.Posix.IO.ByteString (fdWrite)
import System.GPIO.Linux.Sysfs.Monad (MonadSysfs(..))
-- | An instance of 'MonadSysfs' which runs 'MonadSysfs' operations in
-- IO. This instance must be run on an actual Linux @sysfs@ GPIO
-- filesystem and will fail in any other environment.
--
-- == Interactions with threads
--
-- Some parts of this implementation use the Haskell C FFI, and may
-- block on C I/O operations. (Specifically, 'pollFile' will block in
-- the C FFI until its event is triggered.) When using this
-- implementation with GHC, you should compile your program with the
-- @-threaded@ option, so that threads performing these blocking
-- operations do not block other Haskell threads in the system.
--
-- Note that the C FFI bits in this implementation are marked as
-- 'interruptible', so that, on versions of GHC later than 7.8.1,
-- functions such as 'Control.Concurent.throwTo' will work properly
-- when targeting a Haskell thread that uses this implementation.
--
-- (On Haskell implementations other than GHC, the threading
-- implications are unknown; see the implementation's notes on how its
-- threading system interacts with the C FFI.)
newtype SysfsIOT m a = SysfsIOT
{ runSysfsIOT :: m a
} deriving ( Functor
, Alternative
, Applicative
, Monad
, MonadBase b
, MonadFix
, MonadPlus
, MonadThrow
, MonadCatch
, MonadMask
, MonadCont
, MonadIO
, MonadReader r
, MonadError e
, MonadWriter w
, MonadState s
, MonadRWS r w s
, MonadLogger
, MonadLoggerIO
)
instance MonadTrans SysfsIOT where
lift = SysfsIOT
instance MonadBaseControl b m => MonadBaseControl b (SysfsIOT m) where
type StM (SysfsIOT m) a = ComposeSt SysfsIOT m a
liftBaseWith = defaultLiftBaseWith
restoreM = defaultRestoreM
{-# INLINABLE liftBaseWith #-}
{-# INLINABLE restoreM #-}
instance MonadTransControl SysfsIOT where
type StT SysfsIOT a = a
liftWith f = SysfsIOT $ f runSysfsIOT
restoreT = SysfsIOT
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance (MonadIO m, MonadThrow m) => MonadSysfs (SysfsIOT m) where
doesDirectoryExist = liftIO . D.doesDirectoryExist
doesFileExist = liftIO . D.doesFileExist
getDirectoryContents = liftIO . D.getDirectoryContents
readFile = liftIO . BS.readFile
writeFile fn bs = liftIO $ BS.writeFile fn bs
unlockedWriteFile fn bs = liftIO $ unlockedWriteFileIO fn bs
pollFile fn timeout = liftIO $ pollFileIO fn timeout
unlockedWriteFileIO :: FilePath -> ByteString -> IO ()
unlockedWriteFileIO fn bs =
bracket
(openFd fn WriteOnly Nothing defaultFileFlags)
closeFd
(\fd -> void $ fdWrite fd bs)
foreign import ccall interruptible "pollSysfs" pollSysfs :: CInt -> CInt -> IO CInt
pollFileIO :: FilePath -> Int -> IO CInt
pollFileIO fn timeout =
bracket
(openFd fn ReadOnly Nothing defaultFileFlags)
closeFd
(\fd -> throwErrnoIfMinus1Retry "pollSysfs" $ pollSysfs (fromIntegral fd) (fromIntegral timeout))
| dhess/gpio | src/System/GPIO/Linux/Sysfs/IO.hs | bsd-3-clause | 4,945 | 0 | 11 | 997 | 841 | 494 | 347 | 84 | 1 |
module Data.Graph.Inductive.Internal.Heap
(Heap(..), empty, unit, insert, merge, mergeAll, isEmpty, findMin,
deleteMin, splitMin, build, toList, heapsort)
where
{
data (Ord a) => Heap a b = Empty
| Node a b [Heap a b]
deriving Eq;
showsHeap :: (Show a, Ord a, Show b) => Heap a b -> ShowS;
showsHeap (Empty) = id;
showsHeap (Node key val []) = shows key . (": " ++) . shows val;
showsHeap (Node key val hs)
= shows key . (": " ++) . shows val . (' ' :) . shows hs;
instance (Show a, Ord a, Show b) => Show (Heap a b) where
{ showsPrec _ d = showsHeap d};
empty :: (Ord a) => Heap a b;
empty = Empty;
unit :: (Ord a) => a -> b -> Heap a b;
unit key val = Node key val [];
insert :: (Ord a) => (a, b) -> Heap a b -> Heap a b;
insert (key, val) h = merge (unit key val) h;
merge :: (Ord a) => Heap a b -> Heap a b -> Heap a b;
merge h (Empty) = h;
merge (Empty) h = h;
merge h@(Node key1 val1 hs) h'@(Node key2 val2 hs')
| key1 < key2 = Node key1 val1 (h' : hs)
| otherwise = Node key2 val2 (h : hs');
mergeAll :: (Ord a) => [Heap a b] -> Heap a b;
mergeAll [] = Empty;
mergeAll [h] = h;
mergeAll (h : h' : hs) = merge (merge h h') (mergeAll hs);
isEmpty :: (Ord a) => Heap a b -> Bool;
isEmpty (Empty) = True;
isEmpty _ = False;
findMin :: (Ord a) => Heap a b -> (a, b);
findMin (Empty) = error "Heap.findMin: empty heap";
findMin (Node key val _) = (key, val);
deleteMin :: (Ord a) => Heap a b -> Heap a b;
deleteMin (Empty) = Empty;
deleteMin (Node _ _ hs) = mergeAll hs;
splitMin :: (Ord a) => Heap a b -> (a, b, Heap a b);
splitMin (Empty) = error "Heap.splitMin: empty heap";
splitMin (Node key val hs) = (key, val, mergeAll hs);
build :: (Ord a) => [(a, b)] -> Heap a b;
build = foldr insert Empty;
toList :: (Ord a) => Heap a b -> [(a, b)];
toList (Empty) = [];
toList h = x : toList r
where { (x, r) = (findMin h, deleteMin h)};
heapsort :: (Ord a) => [a] -> [a];
heapsort = (map fst) . toList . build . map (\ x -> (x, x))}
| ckaestne/CIDE | other/CaseStudies/fgl/CIDEfgl/Data/Graph/Inductive/Internal/Heap.hs | gpl-3.0 | 2,169 | 0 | 9 | 661 | 1,136 | 623 | 513 | 49 | 1 |
{-# LANGUAGE CPP #-}
#ifndef MIN_VERSION_profunctors
#define MIN_VERSION_profunctors(x,y,z) 0
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Machine.Mealy
-- Copyright : (C) 2012 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : portable
--
-- <http://en.wikipedia.org/wiki/Mealy_machine>
----------------------------------------------------------------------------
module Data.Machine.Mealy
( Mealy(..)
, unfoldMealy
, logMealy
) where
import Control.Applicative
import Control.Arrow
import Control.Category
import Data.Machine.Plan
import Data.Machine.Type
import Data.Machine.Process
import Data.Profunctor
import Data.Pointed
import Data.Semigroup
import Data.Sequence as Seq
import Prelude hiding ((.),id)
-- | 'Mealy' machines
newtype Mealy a b = Mealy { runMealy :: a -> (b, Mealy a b) }
instance Functor (Mealy a) where
fmap f (Mealy m) = Mealy $ \a -> case m a of
(b, n) -> (f b, fmap f n)
{-# INLINE fmap #-}
b <$ _ = pure b
{-# INLINE (<$) #-}
instance Applicative (Mealy a) where
pure b = r where r = Mealy (const (b, r))
{-# INLINE pure #-}
Mealy m <*> Mealy n = Mealy $ \a -> case m a of
(f, m') -> case n a of
(b, n') -> (f b, m' <*> n')
m <* _ = m
{-# INLINE (<*) #-}
_ *> n = n
{-# INLINE (*>) #-}
instance Pointed (Mealy a) where
point b = r where r = Mealy (const (b, r))
{-# INLINE point #-}
-- | A 'Mealy' machine modeled with explicit state.
unfoldMealy :: (s -> a -> (b, s)) -> s -> Mealy a b
unfoldMealy f = go where
go s = Mealy $ \a -> case f s a of
(b, t) -> (b, go t)
{-# INLINE unfoldMealy #-}
-- | slow diagonalization
instance Monad (Mealy a) where
return b = r where r = Mealy (const (b, r))
{-# INLINE return #-}
m >>= f = Mealy $ \a -> case runMealy m a of
(b, m') -> (fst (runMealy (f b) a), m' >>= f)
{-# INLINE (>>=) #-}
_ >> n = n
{-# INLINE (>>) #-}
instance Profunctor Mealy where
rmap = fmap
{-# INLINE rmap #-}
lmap f = go where
go (Mealy m) = Mealy $ \a -> case m (f a) of
(b, n) -> (b, go n)
{-# INLINE lmap #-}
#if MIN_VERSION_profunctors(3,1,1)
dimap f g = go where
go (Mealy m) = Mealy $ \a -> case m (f a) of
(b, n) -> (g b, go n)
{-# INLINE dimap #-}
#endif
instance Automaton Mealy where
auto = construct . go where
go (Mealy f) = await >>= \a -> case f a of
(b, m) -> do
yield b
go m
{-# INLINE auto #-}
instance Category Mealy where
id = Mealy (\a -> (a, id))
Mealy bc . Mealy ab = Mealy $ \ a -> case ab a of
(b, nab) -> case bc b of
(c, nbc) -> (c, nbc . nab)
instance Arrow Mealy where
arr f = r where r = Mealy (\a -> (f a, r))
{-# INLINE arr #-}
first (Mealy m) = Mealy $ \(a,c) -> case m a of
(b, n) -> ((b, c), first n)
instance ArrowChoice Mealy where
left m = Mealy $ \a -> case a of
Left l -> case runMealy m l of
(b, m') -> (Left b, left m')
Right r -> (Right r, left m)
right m = Mealy $ \a -> case a of
Left l -> (Left l, right m)
Right r -> case runMealy m r of
(b, m') -> (Right b, right m')
m +++ n = Mealy $ \a -> case a of
Left b -> case runMealy m b of
(c, m') -> (Left c, m' +++ n)
Right b -> case runMealy n b of
(c, n') -> (Right c, m +++ n')
m ||| n = Mealy $ \a -> case a of
Left b -> case runMealy m b of
(d, m') -> (d, m' ||| n)
Right b -> case runMealy n b of
(d, n') -> (d, m ||| n')
#if MIN_VERSION_profunctors(3,2,0)
instance Strong Mealy where
first' = first
instance Choice Mealy where
left' = left
right' = right
#endif
-- | Fast forward a mealy machine forward
driveMealy :: Mealy a b -> Seq a -> a -> (b, Mealy a b)
driveMealy m xs z = case viewl xs of
y :< ys -> case runMealy m y of
(_, n) -> driveMealy n ys z
EmptyL -> runMealy m z
-- | Accumulate history.
logMealy :: Semigroup a => Mealy a a
logMealy = Mealy $ \a -> (a, h a) where
h a = Mealy $ \b -> let c = a <> b in (c, h c)
{-# INLINE logMealy #-}
instance ArrowApply Mealy where
app = go Seq.empty where
go xs = Mealy $ \(m,x) -> case driveMealy m xs x of
(c, _) -> (c, go (xs |> x))
{-# INLINE app #-}
| fumieval/machines | src/Data/Machine/Mealy.hs | bsd-3-clause | 4,321 | 0 | 16 | 1,138 | 1,776 | 952 | 824 | 106 | 2 |
-- !!! make sure context of EQ is minimised in interface file.
--
module ShouldSucceed where
data NUM = ONE | TWO
class (Num a) => ORD a
class (ORD a, Show a) => EQ a where
(===) :: a -> a -> Bool
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/typecheck/should_compile/tc077.hs | bsd-3-clause | 200 | 0 | 8 | 47 | 71 | 40 | 31 | -1 | -1 |
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.STRef.Lazy
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (uses Control.Monad.ST.Lazy)
--
-- Mutable references in the lazy ST monad.
--
-----------------------------------------------------------------------------
module Data.STRef.Lazy (
-- * STRefs
ST.STRef, -- abstract
newSTRef,
readSTRef,
writeSTRef,
modifySTRef
) where
import Control.Monad.ST.Lazy
import qualified Data.STRef as ST
newSTRef :: a -> ST s (ST.STRef s a)
readSTRef :: ST.STRef s a -> ST s a
writeSTRef :: ST.STRef s a -> a -> ST s ()
modifySTRef :: ST.STRef s a -> (a -> a) -> ST s ()
newSTRef = strictToLazyST . ST.newSTRef
readSTRef = strictToLazyST . ST.readSTRef
writeSTRef r a = strictToLazyST (ST.writeSTRef r a)
modifySTRef r f = strictToLazyST (ST.modifySTRef r f)
| tolysz/prepare-ghcjs | spec-lts8/base/Data/STRef/Lazy.hs | bsd-3-clause | 1,132 | 0 | 9 | 246 | 231 | 130 | 101 | 17 | 1 |
{-# LANGUAGE DeriveGeneric, DatatypeContexts #-}
module CannotDoRep1_1 where
import GHC.Generics
-- We do not support datatypes with context
data (Show a) => Context a = Context a deriving Generic1
| urbanslug/ghc | testsuite/tests/generics/GenCannotDoRep1_1.hs | bsd-3-clause | 201 | 0 | 6 | 32 | 33 | 21 | 12 | 4 | 0 |
module Rebase.GHC.Float
(
module GHC.Float
)
where
import GHC.Float
| nikita-volkov/rebase | library/Rebase/GHC/Float.hs | mit | 71 | 0 | 5 | 12 | 20 | 13 | 7 | 4 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
module Model.Post where
import Import
import qualified Database.Esqueleto as E
import Database.Esqueleto((^.))
getPosts :: Int -> Int -> DB [Entity Post]
getPosts page postsPerPage
| page > 0 && postsPerPage > 0 = selectList
[]
[ Desc PostCreated
, LimitTo postsPerPage
, OffsetBy $ (page - 1) * postsPerPage
]
| otherwise = return []
mkPostFromEvent :: Event -> Post
mkPostFromEvent Event {..} =
Post eventUser eventCreated Nothing eventTitle eventContent | isankadn/yesod-testweb-full | Model/Post.hs | mit | 524 | 0 | 11 | 104 | 165 | 87 | 78 | -1 | -1 |
{-# OPTIONS -Wall -Werror #-}
import Control.Applicative
import Control.Exception
import Control.Monad
import Data.List
import System.Directory
import System.Environment
import System.IO
-- dist .lib include
main :: IO ()
main = getArgs >>= write
where
write (dist : lib : inc : _) = readFile dist >>= writeIncludes dist inc >> writePragmas dist lib
write _ = error "write"
writePragmas :: FilePath -> FilePath -> IO ()
writePragmas x = appendFile x . makeContents <=< getDirectoryContents
writeIncludes :: FilePath -> FilePath -> String -> IO ()
writeIncludes x y zs = bracketOnError (openTempFile "." "temp") finalize editTemp
where
finalize (tempName, tempHandle) = hClose tempHandle >> removeFile tempName
editTemp (tempName, tempHandle) = searchIncludes y
>>= hPutStr tempHandle . (++ zs) . unlines . map addinc
>> hClose tempHandle >> removeFile x >> renameFile tempName x
addinc s = "#include <llvm/" ++ s ++ ">"
searchIncludes :: FilePath -> IO [FilePath]
searchIncludes x = getDirectoryContents x >>= searchDir x
searchDir :: FilePath -> [FilePath] -> IO [FilePath]
searchDir _ [] = return []
searchDir x (z : zs)
| ".h" `isSuffixOf` z = (z :) <$> res
| '.' `elem` z = res
| otherwise = (++) <$> indir <*> res
where
x' = x ++ '/' : z
indir = map ((z ++) . ('/' :)) <$> searchIncludes x'
res = searchDir x zs
makeContents :: [FilePath] -> String
makeContents = unlines . ("" :) . foldr ff []
where
ff x acc
| ".lib" `isSuffixOf` x = ("#pragma comment( lib, \"" ++ x ++ "\" )") : acc
| otherwise = acc
| MichaeGon/pragma-maker-for-LLVM | pragma-maker-old.hs | mit | 1,604 | 2 | 14 | 355 | 592 | 304 | 288 | -1 | -1 |
doubleMe x = x + x
doubleUs x y = doubleMe x + doubleMe y
doubleSmallNumber x = if x > 100
then x
else x*2
doubleSmallNumber' x = (if x > 100 then x else x*2) + 1
boomBangs xs = [ if x < 10 then "BOOM!" else "BANG!" | x <- xs, odd x]
| Sgoettschkes/learning | haskell/LearnYouAHaskell/old/baby.hs | mit | 312 | 0 | 8 | 135 | 126 | 65 | 61 | 7 | 2 |
module Main where
import Test.Tasty (defaultMain, testGroup, TestTree)
import Utrecht.MasterMind.Test
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "All tests" [
masterMindSuite
] | kandersen/Utrecht | test/Test.hs | mit | 215 | 2 | 6 | 35 | 64 | 37 | 27 | 8 | 1 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings, PatternSynonyms #-}
{-# LANGUAGE RankNTypes, ScopedTypeVariables, StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell, TypeFamilies, ConstraintKinds #-}
{-# OPTIONS_GHC -fno-warn-partial-type-signatures -fno-warn-missing-signatures #-}
module Commands.RHS.Types where
import Commands.Extra (Exists)
import Control.Lens hiding (Empty) -- TODO
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NonEmpty
import Control.Applicative
import Data.Monoid
import GHC.Exts (IsList (..), IsString (..))
-- import Text.Printf
import Prelude
newtype ConstNonTerminal n t (f :: (* -> *)) a = ConstNonTerminal n
{-| a grammatical right hand side.
-}
data RHS n t f a where
Pure :: a -> RHS n t f a -- Applicative 'pure'
Apply :: (RHS n t f (x -> a)) -> (f x) -> RHS n t f a -- Applicative '<*>'
(:<*>) :: (RHS n t f (x -> a)) -> (RHS n t f x) -> RHS n t f a -- Applicative '<*>'
Alter :: [RHS n t f a] -> RHS n t f a -- Alternative '<|>' / Monoid '<>'
Opt :: (Maybe x -> a) -> RHS n t f x -> RHS n t f a -- Alternative 'optional'
Many :: ([x] -> a) -> RHS n t f x -> RHS n t f a -- Alternative 'many'
Some :: (NonEmpty x -> a) -> RHS n t f x -> RHS n t f a -- Alternative 'some'
-- grammar-specific stuff
Terminal :: (t -> a) -> !t -> RHS n t f a -- grammatical terminal symbol (Coyoneda'd)
NonTerminal :: (n t f a) -> RHS n t f a -> RHS n t f a -- grammatical non-terminal symbol
Terminals :: (t -> a) -> RHS n t f a -- a placeholder for a set of terminals (e.g. set of all terminal symbols in the grammar. see 'getTerminals')
-- | @pattern Empty = Alter []@
pattern Empty :: forall (n :: * -> (* -> *) -> * -> *) t (f :: * -> *) a. RHS n t f a
pattern Empty = Alter []
-- | @ConstraintKinds@
type Functor'RHS n t f = (Functor (n t f), Functor f)
-- type RHSFunctorC n t f = (Functor f, Functor (n t f)) ConstraintKinds
deriving instance (Functor (n t f)) => (Functor (RHS n t f)) -- TODO expects constraint:
-- deriving instance () => Data (RHS n t f a)
-- | lawful (coincides with 'Alternative' instance)
instance (Functor f, Functor (n t f)) => Monoid (RHS n t f a) where
mempty = Empty
mappend = (<|>)
{- | mostly lawful. 'fmap' and 'pure' behave lawfully.
left-distributivity of '<*>' over '<|>' is intentionally violated. that is, we want @(x \<|> y) \<*> z@ to be preserved, not to be distributed into @(x \<*> z) \<|> (y \<*> z)@. this helps:
* when @(x \<|> y)@ is actually the infinite @(x \<|> y \<|> ...)@, interpreting the undistributed @(x \<|> y) \<*> z@ might terminate while the @(x \<|> y \<|> ...) \<*> z@ may terminate while the distributed @(x \<*> z) \<|> (y \<*> z) \<|> ...@ will not.
* when the interpretation (e.g. a chart parser) can increase performance by sharing such "inner alternation".
'<*>' is left-associated.
-}
instance (Functor f, Functor (n t f)) => Applicative (RHS n t f) where
pure = Pure
Pure xa <*> tx = fmap xa tx -- Functor
-- Pure {id} <*> x = fmap {id} x = x -- Identity
-- Pure f <*> Pure x = fmap f (Pure x) = Pure (f x) -- Homomorphism
txa <*> Pure x = fmap ($ x) txa -- Interchange
Empty <*> _ = Empty -- left-Annihilation (?)
_ <*> Empty = Empty -- right-Annihilation
txa <*> (tyx `Apply` fy) = ((.) <$> txa <*> tyx) `Apply` fy -- Composition
txa <*> (tyx :<*> ty) = ((.) <$> txa <*> tyx) :<*> ty -- Composition
txa <*> r@(Alter _txs) = txa :<*> r -- NO left-Distributivity
txa <*> r@(Opt _ysa _ty) = txa :<*> r -- NOTE doesn't distribute, intentionally
txa <*> r@(Many _ysa _ty) = txa :<*> r -- NOTE doesn't distribute, intentionally
txa <*> r@(Some _ysa _ty) = txa :<*> r -- NOTE doesn't distribute, intentionally
txa <*> r@(Terminal _i _t) = txa :<*> r
txa <*> r@(NonTerminal _l _r) = txa :<*> r -- NOTE preserving sharing is critical for the observers sharing interface
txa <*> r@(Terminals _i) = txa :<*> r -- NOTE greatly simplifies "self-referential" grammars (self-recursive grammars are already simple)
-- | lawful.
instance (Functor f, Functor (n t f)) => Alternative (RHS n t f) where
empty = Empty
Empty <|> y = y -- Left-Identity
x <|> Empty = x -- Right-Identity
x <|> y = Alter (toRHSList x <> toRHSList y) -- Associativity
{-# INLINE (<|>) #-}
many = Many id
{-# INLINE many #-}
some = fmap NonEmpty.toList . Some id
{-# INLINE some #-}
{- | both token and result must be an (instance of) 'IsString'.
(see <http://chrisdone.com/posts/haskell-constraint-trick the constraint trick>)
@t@ can default to String.
-}
instance (IsString t, Show t, a ~ t) => IsString (RHS n t f a) where --TODO remove Show constraint or show it's needed for defaulting
fromString s = Terminal id t where t = fromString s
{-# INLINEABLE fromString #-}
-- instance (IsString t, Show t, a ~ t) => IsString (RHS n t f a) where fromString = Terminal id . fromString
-- instance (IsString t, Show t, a ~ String) => IsString (RHS n t f a) where fromString = Terminal show . fromString
-- instance (IsString t, Show t) => IsString (RHS n t f String) where fromString = Terminal show . fromString
-- instance (IsString t) => IsString (RHS n String f t) where fromString = Terminal fromString
-- instance (IsString t, Show t) => IsString (RHS n t f t) where fromString = Terminal id . fromString
-- | @([r1,r2,r3] :: RHS n t f a)@ is @('mconcat' [r1,r2,r3])@ is @('asum' [r1,r2,r3])@ is @(r1 '<|>' r2 '<|>' r3)@
instance IsList (RHS n t f a) where
type Item (RHS n t f a) = RHS n t f a
fromList = Alter -- the constructor (rather than a method like "asum") avoids the (Functor f) constraint
toList = toRHSList
-- ================================================================ --
{-| a "lowered" (existentially-quantified) right hand side.
-}
type RHS0 n t f = Exists (RHS n t f)
-- | e.g. @('RHS' (ConstName n) t f a)@
data ConstName n t (f :: * -> *) a = ConstName { _unConstName :: !n } deriving (Functor)
-- KindSignatures because: f being phantom, it's kind is inferred to be nullary (I think)
-- TODO is PolyKinds better? (f :: k)
deriving instance Show n => Show (ConstName n t f a)
data SomeRHS n t f = SomeRHS { _unSomeRHS :: forall x. RHS n t f x }
-- ================================================================ --
toRHSList :: RHS n t f a -> [RHS n t f a]
toRHSList (Alter xs) = xs
toRHSList x = [x]
{-# INLINE toRHSList #-}
-- ================================================================ --
-- lenses
_RHSName :: Traversal' (RHS n t f a) (n t f a)
_RHSName = _NonTerminal._1
_NonTerminal :: Prism' (RHS n t f a) (n t f a, RHS n t f a)
_NonTerminal = prism (uncurry NonTerminal) $ \case
NonTerminal l r -> Right (l, r)
r -> Left r
-- makePrisms ''RHS
makeLenses ''ConstName
makeLenses ''SomeRHS
--TODO refactor -? to .? conflicts with lens?
| sboosali/commands | commands-core/sources/Commands/RHS/Types.hs | mit | 7,485 | 3 | 12 | 2,003 | 1,681 | 916 | 765 | 85 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
-------------------------------------------
-- |
-- Module : Web.Stripe.Dispute
-- Copyright : (c) David Johnson, 2014
-- Maintainer : djohnson.m@gmail.com
-- Stability : experimental
-- Portability : POSIX
--
-- < https:/\/\stripe.com/docs/api#diputes >
--
-- @
-- {-\# LANGUAGE OverloadedStrings \#-}
-- import Web.Stripe
-- import Web.Stripe.Charge
-- import Web.Stripe.Dispute
--
-- main :: IO ()
-- main = do
-- let config = StripeConfig (StripeKey "secret_key")
-- result <- stripe config $ getCharge (ChargeId "charge_id")
-- case result of
-- (Left stripeError) -> print stripeError
-- (Right (Charge { chargeDispute = dispute })) ->
-- case dispute of
-- (Just dispute) -> print dispute
-- Nothing -> print "no dispute on this charge"
-- @
module Web.Stripe.Dispute
( -- * API
UpdateDispute
, updateDispute
, CloseDispute
, closeDispute
-- * Types
, ChargeId (..)
, Dispute (..)
, DisputeReason (..)
, DisputeStatus (..)
, Evidence (..)
, MetaData (..)
) where
import Web.Stripe.StripeRequest (Method (POST),
StripeHasParam, StripeRequest (..),
StripeReturn,
mkStripeRequest)
import Web.Stripe.Util ((</>))
import Web.Stripe.Types (ChargeId (..), Dispute (..),
DisputeReason (..),
DisputeStatus (..),
Evidence (..), MetaData(..))
import Web.Stripe.Types.Util (getChargeId)
------------------------------------------------------------------------------
-- | `Dispute` to be updated
updateDispute
:: ChargeId -- ^ The ID of the Charge being disputed
-> StripeRequest UpdateDispute
updateDispute
chargeId = request
where request = mkStripeRequest POST url params
url = "charges" </> getChargeId chargeId </> "dispute"
params = []
data UpdateDispute
type instance StripeReturn UpdateDispute = Dispute
instance StripeHasParam UpdateDispute Evidence
instance StripeHasParam UpdateDispute MetaData
------------------------------------------------------------------------------
-- | `Dispute` to be closed
closeDispute
:: ChargeId -- ^ The ID of the Charge being disputed
-> StripeRequest CloseDispute
closeDispute
chargeId = request
where request = mkStripeRequest POST url params
url = "charges" </> getChargeId chargeId </> "dispute" </> "close"
params = []
data CloseDispute
type instance StripeReturn CloseDispute = Dispute
| dmjio/stripe | stripe-core/src/Web/Stripe/Dispute.hs | mit | 2,913 | 0 | 10 | 852 | 370 | 240 | 130 | -1 | -1 |
module Game.Poker.Cards
( Suit(..)
, Card
, allCards
, cardSuit
, cardNumber
, cardStrength
) where
-- | 4 types of card
--
-- >>> Hearts -- Show
-- Hearts
--
-- >>> read "Hearts" :: Suit -- Read
-- Hearts
--
-- >>> Hearts == Hearts -- Eq
-- True
--
-- >>> Hearts == Spades -- Eq
-- False
--
-- >>> Hearts < Diamonds -- Ord
-- True
--
-- >>> succ Hearts -- Enum
-- Diamonds
--
data Suit = Hearts | Diamonds | Clubs | Spades
deriving (Show, Read, Eq, Ord, Enum)
-- | One playing card
--
-- >>> Card 1 Hearts == Card 2 Hearts -- Eq
-- False
--
-- >>> Card 1 Hearts < Card 2 Hearts -- Ord
-- True
data Card = Card Int Suit
deriving (Eq, Ord)
-- | For instance of show typeclass
--
-- In order that "K < A" equals True,
-- consider 14 as the ace
--
-- >>> showCardNumber 14
-- "A_"
--
-- >>> showCardNumber 4
-- "4_"
showCardNumber :: Int -> String
showCardNumber 14 = "A_"
showCardNumber 13 = "K_"
showCardNumber 12 = "Q_"
showCardNumber 11 = "J_"
showCardNumber 10 = "10"
showCardNumber x = show x ++ "_"
-- | Show typeclass of Card
--
-- >>> show $ Card 1 Hearts
-- "H1_"
--
-- >>> show $ Card 14 Diamonds
-- "DA_"
--
-- >>> show $ Card 11 Clubs
-- "CJ_"
--
-- >>> show $ Card 10 Spades
-- "S10"
instance Show Card where
show (Card i Hearts) = "H" ++ showCardNumber i
show (Card i Diamonds) = "D" ++ showCardNumber i
show (Card i Clubs) = "C" ++ showCardNumber i
show (Card i Spades) = "S" ++ showCardNumber i
-- | All cards
--
-- >>> length allCards
-- 52
--
-- >>> take 13 $ allCards
-- [H2_,H3_,H4_,H5_,H6_,H7_,H8_,H9_,H10,HJ_,HQ_,HK_,HA_]
--
-- >>> reverse $ take 13 $ reverse allCards
-- [S2_,S3_,S4_,S5_,S6_,S7_,S8_,S9_,S10,SJ_,SQ_,SK_,SA_]
--
allCards :: [Card]
allCards = [ Card num suit | suit <- [Hearts ..], num <- [2..14] ]
-- | Get Suit from card
--
-- >>> cardSuit $ Card 10 Hearts
-- Hearts
cardSuit :: Card -> Suit
cardSuit (Card _ card) = card
-- | Get Suit from card
--
-- >>> cardNumber $ Card 10 Hearts
-- 10
cardNumber :: Card -> Int
cardNumber (Card num _) = num
-- | Stregnth of card
--
-- >>> cardStrength . head $ allCards
-- 2
cardStrength = cardNumber
| tobynet/java-poker | src/Game/Poker/Cards.hs | mit | 2,264 | 0 | 8 | 603 | 430 | 263 | 167 | 30 | 1 |
{-# LANGUAGE RankNTypes, ImpredicativeTypes, LiberalTypeSynonyms #-}
module Treb.Config (withTrebEnv) where
import qualified Hasql as H
import qualified Hasql.Postgres as HP
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Char8 as BC
import qualified Database.MySQL.Simple as MySQL
import Control.Concurrent.STM.TVar
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Except
import Control.Monad.STM
import Data.Aeson
import Data.Bool
import Data.Bits (xor)
import Data.Either (either)
import Data.Maybe
import Network.URI
import System.FilePath
import System.INotify
import System.Directory
import System.Random
import System.Environment (getArgs)
import System.IO.Error
import Text.Read (readEither)
import Treb.Combinators
import Treb.JSON ()
import Treb.Types
import Treb.Routes.Types
-- Exported Functions --
-- | Construct a Trebuchet environment and pass it into a function in IO. This
-- environment includes database connections and similar.
withTrebEnv :: (TrebEnv -> IO ()) -> IO ()
withTrebEnv f = runExceptT getEnv >>= either (putStrLn . ("ERROR: " ++)) f
-- Hidden Functions --
getEnv :: ExceptT String IO TrebEnv
getEnv = do
-- Generate configuration from command line arguments
conf <- processArgs defaultTrebConfig =<< liftIO getArgs
-- Create a pool of connections to PostgreSQL
pgPool <- getPool conf
-- Check that SSL-related command line arguments are well formed
exceptIf
(isJust (confSSLCertPath conf) `xor` isJust (confSSLCertKeyPath conf))
$ "SSL requires both -c/--ssl-certificate and -k/--ssl-certificate-key to be set."
-- Check that the job template directory exists
let jobTemplateDir = confJobTemplateDir conf
cwd <- liftIO getCurrentDirectory
jobTemplateDirExists <- liftIO $ doesFileExist jobTemplateDir
exceptIf
jobTemplateDirExists
$ "Job template directory '" ++ (cwd </> jobTemplateDir) ++ "' not found."
-- Create TVar for updating the job templates available to HTTP request handlers
jobTemplates <- liftIO $ newTVarIO []
-- Begin watching job_templates directory and automatically update the internal job templates accordingly
liftIO $ do
let updateJobTemplates = getJobTemplates jobTemplateDir >>= atomically . writeTVar jobTemplates
putStrLn "Initializing event watchers for job templates directory."
inotify <- initINotify
addWatch inotify [Create, Delete, Modify, MoveIn, MoveOut] jobTemplateDir $
const $ updateJobTemplates
>> putStrLn "Job Templates Updated."
putStrLn "> Done."
putStrLn "Settings initial Job Templates."
updateJobTemplates
putStrLn "> Done."
-- Connect to the Drupal/OpenAtrium MySQL database for authentication and authorization
drupalMySQLConn <- unlessDebugMode conf $ do
mapM_ (\(attr, msg) ->
exceptIf
(isNothing $ attr conf)
$ msg ++ " for OpenAtrium database not given.")
[ (confOAHost, "Host")
, (confOAPort, "Port")
, (confOADatabase, "Database name")
, (confOAUsername, "Username")
, (confOAPassword, "Password") ] :: ExceptT String IO ()
liftIO $ putStrLn "Connecting to Drupal/OpenAtrium MySQL database."
oaPort <- either throwE return $ readEither $ fromJust $ confOAPort conf
ret <- liftIO $ MySQL.connect $
MySQL.defaultConnectInfo
{ MySQL.connectHost = fromJust $ confOAHost conf
, MySQL.connectPort = oaPort
, MySQL.connectDatabase = fromJust $ confOADatabase conf
, MySQL.connectUser = fromJust $ confOAUsername conf
, MySQL.connectPassword = fromJust $ confOAPassword conf }
liftIO $ putStrLn "> Done."
return ret
activeUploads <- liftIO $ newTVarIO M.empty
uploadIdGen <- liftIO $ newTVarIO =<< getStdGen
maybe (throwE "No --base-uri specified.")
(bool (throwE "Invalid --base-uri given.")
(return ()))
(isURI <$> confBaseURI conf)
baseURI <- fromMaybe
(throwE "Failed to parse value given to --base-uri.")
(confBaseURI conf >>= fmap pure . parseURI)
-- Construct the Trebuchet environment
return TrebEnv
{ trebEnvJobTemplates = jobTemplates
, trebEnvDrupalMySQLConn = drupalMySQLConn
, trebEnvUsername = Nothing
, trebEnvConfig = conf
, trebEnvPgPool = pgPool
, trebEnvActiveUploads = activeUploads
, trebEnvCurrentUser = Nothing
, trebEnvUploadIdGen = uploadIdGen
, trebEnvBaseURI = baseURI }
processArgs :: TrebConfig -> [String] -> ExceptT String IO TrebConfig
processArgs conf [] = pure conf
processArgs conf (x :xs) | x == "-d" || x == "--debug" = processArgs (conf { confDebugMode = True }) xs
processArgs conf (x:y:xs) | x == "-c" || x == "--ssl-certificate" = processArgs (conf { confSSLCertPath = Just y }) xs
processArgs conf (x:y:xs) | x == "-k" || x == "--ssl-certificate-key" = processArgs (conf { confSSLCertKeyPath = Just y }) xs
processArgs conf (x:y:xs) | x == "-t" || x == "--job-template-directory" = processArgs (conf { confJobTemplateDir = y }) xs
processArgs conf (x:y:xs) | x == "-p" || x == "--port" = either
throwE
(\p -> processArgs (conf { confPort = p }) xs)
(readEither y)
processArgs conf (x:y:xs) | x == "-H" || x == "--oa-host" = processArgs (conf { confOAHost = Just y }) xs
processArgs conf (x:y:xs) | x == "-P" || x == "--oa-port" = processArgs (conf { confOAPort = Just y }) xs
processArgs conf (x:y:xs) | x == "-D" || x == "--oa-database" = processArgs (conf { confOADatabase = Just y }) xs
processArgs conf (x:y:xs) | x == "-U" || x == "--oa-username" = processArgs (conf { confOAUsername = Just y }) xs
processArgs conf (x:y:xs) | x == "-P" || x == "--oa-password" = processArgs (conf { confOAPassword = Just y }) xs
processArgs conf (x:y:xs) | x == "-C" || x == "--oa-cookie-domain" = processArgs (conf { confOADomain = Just y }) xs
processArgs conf (x:y:xs) | x == "-h" || x == "--pg-host" = processArgs (conf { confPGHost = Just y }) xs
processArgs conf (x:y:xs) | x == "-b" || x == "--pg-port" = processArgs (conf { confPGPort = Just y }) xs
processArgs conf (x:y:xs) | x == "-u" || x == "--pg-username" = processArgs (conf { confPGUsername = Just y }) xs
processArgs conf (x:y:xs) | x == "-w" || x == "--pg-password" = processArgs (conf { confPGPassword = Just y }) xs
processArgs conf (x:y:xs) | x == "-s" || x == "--pg-database" = processArgs (conf { confPGDatabase = Just y }) xs
processArgs conf (x:y:xs) | x == "-m" || x == "--pg-pool-max" = processArgs (conf { confPGPoolMax = Just y }) xs
processArgs conf (x:y:xs) | x == "-l" || x == "--pg-conn-lifetime" = processArgs (conf { confPGConnLifetime = Just y }) xs
processArgs conf (x:y:xs) | x == "-B" || x == "--base-uri" = processArgs (conf { confBaseURI = Just y }) xs
processArgs conf (x:_) = throwE $ "ERROR: Invalid command-line argument \'" ++ x ++ "\'."
getPool :: TrebConfig -> ExceptT String IO (H.Pool HP.Postgres)
getPool conf = do
mapM_ (\(attr, msg) ->
exceptIf
(isNothing $ attr conf)
$ msg ++ " for PostgreSQL database not given.")
[ (confPGHost, "Host")
, (confPGPort, "Port")
, (confPGUsername, "Username")
, (confPGPassword, "Password")
, (confPGDatabase, "Database name")
, (confPGPoolMax, "Maximum pool size")
, (confPGConnLifetime, "Connection duration") ]
pgPort <- either throwE return $ readEither $ fromJust $ confPGPort conf
pgPoolMax <- either throwE return $ readEither $ fromJust $ confPGPoolMax conf
pgConnLifetime <- either throwE return $ readEither $ fromJust $ confPGConnLifetime conf
maybe
(throwE "Invalid PostgreSQL pool settings.")
(liftIO . uncurry H.acquirePool)
$ (,) <$> (HP.ParamSettings <$> fmap BC.pack (confPGHost conf)
<*> pure pgPort
<*> fmap BC.pack (confPGUsername conf)
<*> fmap BC.pack (confPGPassword conf)
<*> fmap BC.pack (confPGDatabase conf))
<*> (fromMaybe Nothing $ H.poolSettings <$> pure pgPoolMax
<*> pure pgConnLifetime)
getJobTemplates :: FilePath -> IO [JobTemplate]
getJobTemplates templateDir = do
-- Get a list of job template file names
templateFiles' <- getDirectoryContents templateDir `catch` \e ->
if isDoesNotExistError e then do
fullTemplateDir <- makeAbsolute templateDir
putStrLn $ "ERROR: Job template specification directory '" ++ fullTemplateDir ++ "' does not exist."
createDirectoryIfMissing False fullTemplateDir
putStrLn $ "Made new directory '" ++ fullTemplateDir ++ "'."
return []
else
throw e
templateFiles <- filterM doesFileExist $ map (templateDir </>) templateFiles'
-- Get a list of decoded job templates
jobTemplates <- mapM (fmap eitherDecode . B.readFile) templateFiles
-- Print an error on each failure to decode a job template.
let parseResults = [ either (Left . ((,) f)) Right t | (f, t) <- zip templateFiles jobTemplates ]
results <- mapM (either printError (return . Just)) parseResults
-- Return only successfully parsed job templates
return $ map fromJust $ filter isJust results
where
printError (file, error) = do
putStrLn $ "ERROR: Failed to parse job template JSON: " ++ file ++ "\n\n" ++ error
return Nothing
defaultTrebConfig = TrebConfig
{ confDebugMode = False
, confPort = 3000
, confJobTemplateDir = "job_templates"
, confSSLCertPath = Nothing
, confSSLCertKeyPath = Nothing
, confOAHost = Nothing
, confOAPort = Nothing
, confOADatabase = Nothing
, confOAUsername = Nothing
, confOAPassword = Nothing
, confOADomain = Nothing
, confPGHost = Nothing
, confPGPort = Nothing
, confPGUsername = Nothing
, confPGPassword = Nothing
, confPGDatabase = Nothing
, confPGPoolMax = Nothing
, confPGConnLifetime = Nothing
, confBaseURI = Nothing }
ifDebugMode :: Monad m => TrebConfig -> m a -> m (Maybe a)
ifDebugMode conf action = bool (return Nothing) (action >>= return . Just) (confDebugMode conf)
unlessDebugMode :: Monad m => TrebConfig -> m a -> m (Maybe a)
unlessDebugMode conf action = bool (action >>= return . Just) (return Nothing) (confDebugMode conf)
| MadSciGuys/trebuchet | src/Treb/Config.hs | mit | 11,175 | 0 | 19 | 3,036 | 3,087 | 1,599 | 1,488 | 191 | 2 |
module Main where
import Codec.Picture
import Codec.Picture.Types
import Data.Maybe (fromJust)
import Data.Word (Word8)
import Data.List as L (transpose,foldl')
import Text.Printf (printf)
import Control.Arrow ((&&&))
import Options.Applicative
import qualified Data.ByteString as B
import System.IO (stdin)
data Options = Options
{ srcFile :: String
, width :: Int
, height :: Int
, trueColor :: Bool
}
options :: Parser Options
options = Options
<$> argument str (metavar "SRC" <> help "source file (or - for stdin)")
<*> argument auto (metavar "WIDTH" <> help "resulting width")
<*> argument auto (metavar "HEIGHT" <> help "resulting height")
<*> switch (long "256-colors" <> short 'c' <> help "only use 256-color-mode for old terminals")
opthelp :: ParserInfo Options
opthelp = info (helper <*> options)
( fullDesc
<> progDesc "An image to ASCII-Converter"
<> header "img2ascii - convert images to console-compatible text"
)
main :: IO ()
main = execParser opthelp >>= run
run :: Options -> IO ()
run (Options src w h redcol) = do
src' <- if src == "-" then B.getContents else B.readFile src
case decodeImage src' of
Left err -> putStrLn err
Right img ->
case extractDynImage img >>= pixelize w h of
Nothing -> return ()
Just (f,b) ->
let str = if redcol then img2ascii conv256 (f,b) else img2ascii conv (f,b)
in mapM_ (\x -> putStr x >> putStrLn "\x1b[0m") (concat <$> str)
chunksof :: Int -> [a] -> [[a]]
chunksof _ [] = []
chunksof c xs = take c xs : chunksof c (drop c xs)
conv :: (PixelRGB8,PixelRGB8) -> String
conv (fp@(PixelRGB8 fr fg fb),PixelRGB8 br bg bb) = printf "\x1b[48;2;%d;%d;%dm\x1b[38;2;%d;%d;%dm%c" br bg bb fr fg fb (lumi.computeLuma $ fp)
where
lumi :: Word8 -> Char
lumi x
| x > 225 = '@'
| x > 180 = 'O'
| x > 150 = 'X'
| x > 50 = 'o'
| x > 25 = 'x'
| x > 10 = '.'
| otherwise = ' '
conv256 :: (PixelRGB8,PixelRGB8) -> String
conv256 (fp@(PixelRGB8 fr fg fb),PixelRGB8 br bg bb) = printf "\x1b[48;5;%dm\x1b[38;5;%dm%c" bcolor fcolor (lumi.computeLuma $ fp)
where
-- converts [0..255] -> [0..5]
s = (`div` 51)
-- conversion: 6x6x6 rgb-cube so color is red * 36 + green * 6 + blue + 16 offset with red/green/blue in [0..5]
bcolor = s br * 36 + s bg * 6 + s bb + 16
fcolor = s fr * 36 + s fg * 6 + s fb + 16
lumi :: Word8 -> Char
lumi x
| x > 225 = '@'
| x > 180 = 'O'
| x > 150 = 'X'
| x > 50 = 'o'
| x > 25 = 'x'
| x > 10 = '.'
| otherwise = ' '
img2ascii :: ((PixelRGB8,PixelRGB8) -> String) -> (Image PixelRGB8,Image PixelRGB8) -> [[String]]
img2ascii c (fg@(Image w h _),bg@(Image w' h' _)) = (fmap.fmap) (c.(uncurry (pixelAt fg) &&& uncurry (pixelAt bg))) [[(x,y) | x <- [0..w-1]] | y <- [0..h-1]]
pixelize :: Int -> Int -> Image PixelRGB8 -> Maybe (Image PixelRGB8,Image PixelRGB8)
pixelize tw th im@(Image iw ih id) =
if windoww == 0 || windowh == 0 then
Nothing
else Just (snd $ generateFoldImage (folder filterfun windoww windowh) im tw th,
snd $ generateFoldImage (folder filterfuninv windoww windowh) im tw th)
where
windoww = (fromIntegral iw) / fromIntegral tw
windowh = fromIntegral ih / fromIntegral th
folder :: ((PixelRGB8, Int, Int) -> (PixelRGB8, Int, Int) -> (PixelRGB8, Int, Int)) -> Double -> Double -> Image PixelRGB8 -> Int -> Int -> (Image PixelRGB8, PixelRGB8)
folder f ww wh im@(Image iw ih id) x y = (im,(\(a,_,_) -> a) $ L.foldl' f (pixelAt im x' y',0,0)
[ (pixelAt im (x'+dx) (y'+dy),dx,dy)
| dx <- [-dw..dw]
, dy <- [-dw..dw]
, x'+dx > 0 && x'+dx < iw
, y'+dy > 0 && y'+dy < ih
])
where
dw = floor $ ww
x' = floor $ fromIntegral x * ww
y' = floor $ fromIntegral y * wh
filterfun :: (PixelRGB8,Int,Int) -> (PixelRGB8, Int, Int) -> (PixelRGB8,Int,Int)
filterfun (x@(PixelRGB8 r g b),_,_) (y@(PixelRGB8 r' g' b'),_,_) = if computeLuma x > computeLuma y then (x,0,0) else (y,0,0)
filterfuninv :: (PixelRGB8,Int,Int) -> (PixelRGB8, Int, Int) -> (PixelRGB8,Int,Int)
filterfuninv (x@(PixelRGB8 r g b),_,_) (y@(PixelRGB8 r' g' b'),_,_) = if computeLuma x < computeLuma y then (x,0,0) else (y,0,0)
extractDynImage :: DynamicImage -> Maybe (Image PixelRGB8)
extractDynImage image =
case image of
ImageY8 img -> Just $ promoteImage img
ImageY16 img -> Nothing
ImageYF img -> Nothing
ImageYA8 img -> Just $ promoteImage img
ImageYA16 img -> Nothing
ImageRGB8 img -> Just img
ImageRGB16 img -> Nothing
ImageRGBF img -> Nothing
ImageRGBA8 img -> Just $ pixelMap dropTransparency img
ImageRGBA16 img -> Nothing
ImageYCbCr8 img -> Just $ convertImage img
ImageCMYK8 img -> Just $ convertImage img
ImageCMYK16 img -> Nothing
| Drezil/img2ascii | src/Main.hs | mit | 5,308 | 1 | 19 | 1,646 | 2,106 | 1,102 | 1,004 | 108 | 13 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Celtchar.Novel.Structure where
import Data.Yaml
import GHC.Generics
data Language = French | English
deriving (Generic)
data Document = Document FilePath
deriving (Generic, Show)
instance FromJSON Document where
parseJSON v = Document <$> parseJSON v
data Chapter = Chapter { chapterTitle :: Maybe String
, documents :: [Document]
}
deriving (Generic, Show)
instance FromJSON Chapter where
parseJSON (Object v) = Chapter <$> v .:? "title"
<*> v .: "documents"
data Part = Part { partTitle :: String
, chapters :: [Chapter]
}
deriving (Generic, Show)
instance FromJSON Part where
parseJSON (Object v) = Part <$> v .: "title"
<*> v .: "chapters"
data Manuscript = Manuscript [Part]
deriving (Generic, Show)
instance FromJSON Manuscript where
parseJSON v = Manuscript <$> parseJSON v
instance FromJSON Language where
parseJSON (String "english") = pure English
parseJSON (String "french") = pure French
parseJSON _ = fail "unknown language"
instance Show Language where
show English = "english"
show French = "french"
data Novel = Novel { author :: String
, language :: Language
, novelTitle :: String
, frontmatter :: Maybe [Chapter]
, manuscript :: Manuscript
, appendix :: Maybe [Chapter]
}
deriving (Generic, Show)
instance FromJSON Novel where
parseJSON (Object v) = Novel <$> v .: "author"
<*> v .: "language"
<*> v .: "title"
<*> v .:? "frontmatter"
<*> v .: "manuscript"
<*> v .:? "appendix"
getNovelStructure :: FilePath -> IO (Either String Novel)
getNovelStructure conf = do
ec <- decodeFileEither conf
case ec of
Right novel ->
pure (Right novel)
Left ex -> do
pure (Left $ prettyPrintParseException ex)
| ogma-project/celtchar | src/Celtchar/Novel/Structure.hs | mit | 2,311 | 0 | 17 | 823 | 579 | 305 | 274 | 58 | 2 |
module Handler.EditThread where
import Authentification (isModeratorBySession, getThreadPermissions)
import Captcha
import CustomForms (threadMForm)
import Import
import Helper (spacesToMinus)
import Widgets (threadWidget, postWidget, accountLinksW)
getEditThreadR :: ThreadId -> Handler Html
getEditThreadR tid = do
-- db && auth
(thread, isMod) <- runDB $ do
t <- get404 tid
isMod <- isModeratorBySession
return (t, isMod)
isAuthor <- getThreadPermissions thread
case (isAuthor || isMod) of
True -> do
-- captcha
equation <- liftIO $ createMathEq
setSession "captcha" (eqResult equation)
-- form
(widget, enctype) <- generateFormPost $ threadMForm equation "Update thread" (Just $ threadTitle thread) (Just $ threadContent thread)
-- widgets
let headline = threadTitle thread
leftWidget = threadWidget isMod tid thread
rightWidget = postWidget enctype widget
defaultLayout $(widgetFile "left-right-layout")
(_) -> redirectUltDest HomeR
postEditThreadR :: ThreadId -> Handler Html
postEditThreadR tid = do
-- captcha
captcha <- getCaptchaBySession
equation <- liftIO $ createMathEq
setSession "captcha" (eqResult equation)
-- db & auth
(thread, isMod) <- runDB $ do
t <- get404 tid
isMod <- isModeratorBySession
return (t, isMod)
isAuthor <- getThreadPermissions thread
-- widgets
let headline = threadTitle thread
leftWidget = threadWidget isMod tid thread
case (isAuthor || isMod) of
True -> do
((result, widget),enctype)<- runFormPost $ threadMForm equation "Update thread" (Just $ threadTitle thread) (Just $ threadContent thread)
case result of
(FormSuccess mthread) -> do
let newThread = mthread (threadCreator thread)
case (threadCaptcha newThread) == captcha of
True -> do
runDB $ replace tid $ newThread {threadPosts = (threadPosts thread), threadCreator = (threadCreator thread)}
redirect $ ThreadR (spacesToMinus $ threadTitle newThread)
False -> do
let rightWidget = [whamlet|<span .simpleBlack> Sorry, the captcha is wrong|] >> postWidget enctype widget
defaultLayout $(widgetFile "left-right-layout")
(FormFailure (err:_)) -> do
let rightWidget = [whamlet|<span .simpleBlack> #{err}|] >> postWidget enctype widget
defaultLayout $(widgetFile "left-right-layout")
(_) -> do
let rightWidget = [whamlet|<span .simpleBlack> Something went wrong, please try again|] >> postWidget enctype widget
defaultLayout $(widgetFile "left-right-layout")
(_) -> redirectUltDest HomeR
| cirquit/HaskellPie | HaskellPie/Handler/EditThread.hs | mit | 3,045 | 0 | 26 | 973 | 761 | 379 | 382 | -1 | -1 |
module Lexer where
import Text.Parsec.String (Parser)
import Text.Parsec.Language (emptyDef)
import qualified Text.Parsec.Token as Tok
lexer :: Tok.TokenParser ()
lexer = Tok.makeTokenParser style
where
ops = ["+","*","-",";"]
names = ["def","extern"]
style = emptyDef {
Tok.commentLine = "#"
,Tok.reservedOpNames = ops
,Tok.reservedNames = names
}
integer :: Parser Integer
integer = Tok.integer lexer
float :: Parser Double
float = Tok.float lexer
parens :: Parser a -> Parser a
parens = Tok.parens lexer
commaSep :: Parser a -> Parser [a]
commaSep = Tok.commaSep lexer
semiSep :: Parser a -> Parser [a]
semiSep = Tok.semiSep lexer
identifier :: Parser String
identifier = Tok.identifier lexer
reserved :: String -> Parser ()
reserved = Tok.reserved lexer
reservedOp :: String -> Parser ()
reservedOp = Tok.reservedOp lexer
| raulfpl/kaleidoscope | src/chapter2/Lexer.hs | mit | 946 | 0 | 8 | 240 | 301 | 164 | 137 | 28 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Core
(
NonUnitVector
, UnitVector
, Point(..)
, Ray(..)
, Transform(..)
, RayPosition
, VectorUnaryOps(..)
, VectorBinaryOps(..)
, RefractiveIndex
, RayWithMedium(..)
, vector
, normal
, unsafeForceUnitVector
, origin
, to
, normalize
, normalizeWithLength
, at
, toRayPosition
, magnitude
, magnitudeSquared
, unitX
, unitY
, unitZ
, perpendiculars
, calculateReflection
, calculateRefraction
, refractiveIndexAir
, refractiveIndexGlass
)
where
import Numeric.FastMath ( )
import Control.DeepSeq ( NFData(..) )
data Vector = Vector !Double !Double !Double
instance NFData Vector where
rnf !(Vector !_ !_ !_) = ()
newtype NonUnitVector = NonUnitVector Vector
instance NFData NonUnitVector where
rnf !(NonUnitVector !(Vector !_ !_ !_)) = ()
newtype UnitVector = UnitVector Vector
instance NFData UnitVector where
rnf !(UnitVector !(Vector !_ !_ !_)) = ()
data Point = Point !Double !Double !Double
instance NFData Point where
rnf !(Point !_ !_ !_) = ()
data Ray = Ray
{ rayOrigin :: !Point
, rayDirection :: !UnitVector
}
newtype RayPosition = RayPosition Double
deriving (Eq, Ord)
class Transform t where
translate :: NonUnitVector -> t -> t
class VectorUnaryOps v where
neg :: v -> v
vectorValues :: v -> (Double, Double, Double)
(|*|) :: v -> Double -> NonUnitVector
rotateAround :: v -> UnitVector -> Double -> v
class (VectorUnaryOps v1, VectorUnaryOps v2) => VectorBinaryOps v1 v2 where
(|.|) :: v1 -> v2 -> Double
(|+|) :: v1 -> v2 -> NonUnitVector
(|-|) :: v1 -> v2 -> NonUnitVector
(|-|) v1 v2 = v1 |+| neg v2
cross :: v1 -> v2 -> NonUnitVector
instance Transform Point where
translate (NonUnitVector (Vector !vx !vy !vz)) (Point !px !py !pz) =
Point (px + vx)
(py + vy)
(pz + vz)
instance Transform Ray where
translate !v (Ray !ro !rd) =
Ray { rayOrigin = translate v ro
, rayDirection = rd
}
instance VectorUnaryOps NonUnitVector where
neg (NonUnitVector (Vector !xv !yv !zv)) =
vector (-xv)
(-yv)
(-zv)
vectorValues (NonUnitVector (Vector !x !y !z)) =
(x, y, z)
(|*|) (NonUnitVector (Vector !vx !vy !vz)) !s =
vector (vx * s)
(vy * s)
(vz * s)
rotateAround v k theta =
(v |*| cosTheta ) |+| ((k `cross` v) |*| sinTheta) |+| (k |*| ((k |.| v) * (1.0 - cosTheta)))
where
cosTheta = cos theta
sinTheta = sin theta
instance VectorUnaryOps UnitVector where
neg (UnitVector (Vector !xv !yv !zv)) =
UnitVector (Vector (-xv)
(-yv)
(-zv))
vectorValues (UnitVector (Vector !x !y !z)) =
(x, y, z)
(|*|) (UnitVector (Vector !vx !vy !vz)) !s =
vector (vx * s)
(vy * s)
(vz * s)
rotateAround v k theta =
normalize ((v |*| cosTheta ) |+|
((k `cross` v) |*| sinTheta) |+|
(k |*| ((k |.| v) * (1.0 - cosTheta))))
where
cosTheta = cos theta
sinTheta = sin theta
instance VectorBinaryOps NonUnitVector NonUnitVector where
(|.|) (NonUnitVector (Vector !vx !vy !vz)) (NonUnitVector (Vector !wx !wy !wz)) =
vx * wx
+ vy * wy
+ vz * wz
(|+|) (NonUnitVector (Vector !vx !vy !vz)) (NonUnitVector (Vector !wx !wy !wz)) =
vector (vx + wx)
(vy + wy)
(vz + wz)
cross (NonUnitVector (Vector !vx !vy !vz)) (NonUnitVector (Vector !wx !wy !wz)) =
vector (vy * wz - vz * wy)
(vz * wx - vx * wz)
(vx * wy - vy * wx)
instance VectorBinaryOps NonUnitVector UnitVector where
(|.|) (NonUnitVector (Vector !vx !vy !vz)) (UnitVector (Vector !wx !wy !wz)) =
vx * wx
+ vy * wy
+ vz * wz
(|+|) (NonUnitVector (Vector !vx !vy !vz)) (UnitVector (Vector !wx !wy !wz)) =
vector (vx + wx)
(vy + wy)
(vz + wz)
cross (NonUnitVector (Vector !vx !vy !vz)) (UnitVector (Vector !wx !wy !wz)) =
vector (vy * wz - vz * wy)
(vz * wx - vx * wz)
(vx * wy - vy * wx)
instance VectorBinaryOps UnitVector UnitVector where
(|.|) (UnitVector (Vector !vx !vy !vz)) (UnitVector (Vector !wx !wy !wz)) =
vx * wx
+ vy * wy
+ vz * wz
(|+|) (UnitVector (Vector !vx !vy !vz)) (UnitVector (Vector !wx !wy !wz)) =
vector (vx + wx)
(vy + wy)
(vz + wz)
cross (UnitVector (Vector !vx !vy !vz)) (UnitVector (Vector !wx !wy !wz)) =
vector (vy * wz - vz * wy)
(vz * wx - vx * wz)
(vx * wy - vy * wx)
instance VectorBinaryOps UnitVector NonUnitVector where
(|.|) (UnitVector (Vector !vx !vy !vz)) (NonUnitVector (Vector !wx !wy !wz)) =
vx * wx
+ vy * wy
+ vz * wz
(|+|) (UnitVector (Vector !vx !vy !vz)) (NonUnitVector (Vector !wx !wy !wz)) =
vector (vx + wx)
(vy + wy)
(vz + wz)
cross (UnitVector (Vector !vx !vy !vz)) (NonUnitVector (Vector !wx !wy !wz)) =
vector (vy * wz - vz * wy)
(vz * wx - vx * wz)
(vx * wy - vy * wx)
vector :: Double -> Double -> Double -> NonUnitVector
vector !x !y !z =
NonUnitVector (Vector x y z)
unitX :: UnitVector
unitX = UnitVector (Vector 1.0 0.0 0.0)
unitY :: UnitVector
unitY = UnitVector (Vector 0.0 1.0 0.0)
unitZ :: UnitVector
unitZ = UnitVector (Vector 0.0 0.0 1.0)
normal :: Double -> Double -> Double -> UnitVector
normal !x !y !z =
normalize $ vector x y z
unsafeForceUnitVector :: NonUnitVector -> UnitVector
unsafeForceUnitVector (NonUnitVector v) =
UnitVector v
origin :: Point
origin = Point 0.0 0.0 0.0
to :: Point -> Point -> NonUnitVector
to (Point !px !py !pz) (Point !qx !qy !qz)
= vector (qx - px)
(qy - py)
(qz - pz)
normalize :: NonUnitVector -> UnitVector
normalize !v =
UnitVector (Vector nx ny nz)
where
!m = 1.0 / magnitude v
(NonUnitVector (Vector !nx !ny !nz)) = v |*| m
normalizeWithLength :: NonUnitVector -> (UnitVector, RayPosition)
normalizeWithLength !v =
(UnitVector (Vector nx ny nz), RayPosition mag)
where
!mag = magnitude v
!m = 1.0 / mag
(NonUnitVector (Vector !nx !ny !nz)) = v |*| m
magnitude :: NonUnitVector -> Double
magnitude =
sqrt . magnitudeSquared
magnitudeSquared :: NonUnitVector -> Double
magnitudeSquared (NonUnitVector (Vector !vx !vy !vz)) =
vx * vx
+ vy * vy
+ vz * vz
at :: Ray -> RayPosition -> Point
at (Ray !ro !rd) (RayPosition !t) =
translate (rd |*| t) ro
toRayPosition :: Double -> RayPosition
toRayPosition =
RayPosition
perpendiculars :: UnitVector -> (NonUnitVector, NonUnitVector)
perpendiculars n =
(vb1, vb2)
where
(!nx, !ny, _) = vectorValues n
!vb1pre = if abs nx > abs ny then unitY else unitX
vb1 = vb1pre |-| (n |*| (n |.| vb1pre))
vb2 = n `cross` vb1
calculateReflection :: UnitVector -> UnitVector -> UnitVector
calculateReflection !incoming !surfaceNormal =
normalize (incoming |+| (surfaceNormal |*| (2 * c1)))
where
!c1 = - (surfaceNormal |.| incoming)
newtype RefractiveIndex = RefractiveIndex Double
data RayWithMedium = RayWithMedium Ray RefractiveIndex
refractiveIndexAir :: RefractiveIndex
refractiveIndexAir = RefractiveIndex 1.0
refractiveIndexGlass :: RefractiveIndex
refractiveIndexGlass = RefractiveIndex 1.5
calculateRefraction :: UnitVector -> UnitVector -> RefractiveIndex -> RefractiveIndex -> (UnitVector, RefractiveIndex)
calculateRefraction !incoming !surfaceNormal (RefractiveIndex !ri1) (RefractiveIndex !ri2) =
if sin2ThetaT > 1.0
then (calculateReflection incoming surfaceNormal, (RefractiveIndex ri1))
else (normalize ((incoming |*| ri1ri2) |+| (surfaceNormal |*| factor)), (RefractiveIndex ri2))
where
!sin2ThetaT = (ri1ri2 * ri1ri2) * (1.0 - cosThetaI * cosThetaI)
!ri1ri2 = ri1 / ri2
!cosThetaI = acos (incoming |.| surfaceNormal)
factor = ri1ri2 * cosThetaI - sqrt (1.0 - sin2ThetaT)
| stu-smith/rendering-in-haskell | src/experiment08/Core.hs | mit | 8,275 | 0 | 14 | 2,378 | 3,236 | 1,636 | 1,600 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-origin.html
module Stratosphere.ResourceProperties.CloudFrontDistributionOrigin where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.CloudFrontDistributionCustomOriginConfig
import Stratosphere.ResourceProperties.CloudFrontDistributionOriginCustomHeader
import Stratosphere.ResourceProperties.CloudFrontDistributionS3OriginConfig
-- | Full data type definition for CloudFrontDistributionOrigin. See
-- 'cloudFrontDistributionOrigin' for a more convenient constructor.
data CloudFrontDistributionOrigin =
CloudFrontDistributionOrigin
{ _cloudFrontDistributionOriginCustomOriginConfig :: Maybe CloudFrontDistributionCustomOriginConfig
, _cloudFrontDistributionOriginDomainName :: Val Text
, _cloudFrontDistributionOriginId :: Val Text
, _cloudFrontDistributionOriginOriginCustomHeaders :: Maybe [CloudFrontDistributionOriginCustomHeader]
, _cloudFrontDistributionOriginOriginPath :: Maybe (Val Text)
, _cloudFrontDistributionOriginS3OriginConfig :: Maybe CloudFrontDistributionS3OriginConfig
} deriving (Show, Eq)
instance ToJSON CloudFrontDistributionOrigin where
toJSON CloudFrontDistributionOrigin{..} =
object $
catMaybes
[ fmap (("CustomOriginConfig",) . toJSON) _cloudFrontDistributionOriginCustomOriginConfig
, (Just . ("DomainName",) . toJSON) _cloudFrontDistributionOriginDomainName
, (Just . ("Id",) . toJSON) _cloudFrontDistributionOriginId
, fmap (("OriginCustomHeaders",) . toJSON) _cloudFrontDistributionOriginOriginCustomHeaders
, fmap (("OriginPath",) . toJSON) _cloudFrontDistributionOriginOriginPath
, fmap (("S3OriginConfig",) . toJSON) _cloudFrontDistributionOriginS3OriginConfig
]
-- | Constructor for 'CloudFrontDistributionOrigin' containing required fields
-- as arguments.
cloudFrontDistributionOrigin
:: Val Text -- ^ 'cfdoDomainName'
-> Val Text -- ^ 'cfdoId'
-> CloudFrontDistributionOrigin
cloudFrontDistributionOrigin domainNamearg idarg =
CloudFrontDistributionOrigin
{ _cloudFrontDistributionOriginCustomOriginConfig = Nothing
, _cloudFrontDistributionOriginDomainName = domainNamearg
, _cloudFrontDistributionOriginId = idarg
, _cloudFrontDistributionOriginOriginCustomHeaders = Nothing
, _cloudFrontDistributionOriginOriginPath = Nothing
, _cloudFrontDistributionOriginS3OriginConfig = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-origin.html#cfn-cloudfront-distribution-origin-customoriginconfig
cfdoCustomOriginConfig :: Lens' CloudFrontDistributionOrigin (Maybe CloudFrontDistributionCustomOriginConfig)
cfdoCustomOriginConfig = lens _cloudFrontDistributionOriginCustomOriginConfig (\s a -> s { _cloudFrontDistributionOriginCustomOriginConfig = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-origin.html#cfn-cloudfront-distribution-origin-domainname
cfdoDomainName :: Lens' CloudFrontDistributionOrigin (Val Text)
cfdoDomainName = lens _cloudFrontDistributionOriginDomainName (\s a -> s { _cloudFrontDistributionOriginDomainName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-origin.html#cfn-cloudfront-distribution-origin-id
cfdoId :: Lens' CloudFrontDistributionOrigin (Val Text)
cfdoId = lens _cloudFrontDistributionOriginId (\s a -> s { _cloudFrontDistributionOriginId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-origin.html#cfn-cloudfront-distribution-origin-origincustomheaders
cfdoOriginCustomHeaders :: Lens' CloudFrontDistributionOrigin (Maybe [CloudFrontDistributionOriginCustomHeader])
cfdoOriginCustomHeaders = lens _cloudFrontDistributionOriginOriginCustomHeaders (\s a -> s { _cloudFrontDistributionOriginOriginCustomHeaders = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-origin.html#cfn-cloudfront-distribution-origin-originpath
cfdoOriginPath :: Lens' CloudFrontDistributionOrigin (Maybe (Val Text))
cfdoOriginPath = lens _cloudFrontDistributionOriginOriginPath (\s a -> s { _cloudFrontDistributionOriginOriginPath = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-origin.html#cfn-cloudfront-distribution-origin-s3originconfig
cfdoS3OriginConfig :: Lens' CloudFrontDistributionOrigin (Maybe CloudFrontDistributionS3OriginConfig)
cfdoS3OriginConfig = lens _cloudFrontDistributionOriginS3OriginConfig (\s a -> s { _cloudFrontDistributionOriginS3OriginConfig = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/CloudFrontDistributionOrigin.hs | mit | 4,862 | 0 | 13 | 411 | 617 | 353 | 264 | 52 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-secretsmanager-secrettargetattachment.html
module Stratosphere.Resources.SecretsManagerSecretTargetAttachment where
import Stratosphere.ResourceImports
-- | Full data type definition for SecretsManagerSecretTargetAttachment. See
-- 'secretsManagerSecretTargetAttachment' for a more convenient constructor.
data SecretsManagerSecretTargetAttachment =
SecretsManagerSecretTargetAttachment
{ _secretsManagerSecretTargetAttachmentSecretId :: Val Text
, _secretsManagerSecretTargetAttachmentTargetId :: Val Text
, _secretsManagerSecretTargetAttachmentTargetType :: Val Text
} deriving (Show, Eq)
instance ToResourceProperties SecretsManagerSecretTargetAttachment where
toResourceProperties SecretsManagerSecretTargetAttachment{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::SecretsManager::SecretTargetAttachment"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ (Just . ("SecretId",) . toJSON) _secretsManagerSecretTargetAttachmentSecretId
, (Just . ("TargetId",) . toJSON) _secretsManagerSecretTargetAttachmentTargetId
, (Just . ("TargetType",) . toJSON) _secretsManagerSecretTargetAttachmentTargetType
]
}
-- | Constructor for 'SecretsManagerSecretTargetAttachment' containing
-- required fields as arguments.
secretsManagerSecretTargetAttachment
:: Val Text -- ^ 'smstaSecretId'
-> Val Text -- ^ 'smstaTargetId'
-> Val Text -- ^ 'smstaTargetType'
-> SecretsManagerSecretTargetAttachment
secretsManagerSecretTargetAttachment secretIdarg targetIdarg targetTypearg =
SecretsManagerSecretTargetAttachment
{ _secretsManagerSecretTargetAttachmentSecretId = secretIdarg
, _secretsManagerSecretTargetAttachmentTargetId = targetIdarg
, _secretsManagerSecretTargetAttachmentTargetType = targetTypearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-secretsmanager-secrettargetattachment.html#cfn-secretsmanager-secrettargetattachment-secretid
smstaSecretId :: Lens' SecretsManagerSecretTargetAttachment (Val Text)
smstaSecretId = lens _secretsManagerSecretTargetAttachmentSecretId (\s a -> s { _secretsManagerSecretTargetAttachmentSecretId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-secretsmanager-secrettargetattachment.html#cfn-secretsmanager-secrettargetattachment-targetid
smstaTargetId :: Lens' SecretsManagerSecretTargetAttachment (Val Text)
smstaTargetId = lens _secretsManagerSecretTargetAttachmentTargetId (\s a -> s { _secretsManagerSecretTargetAttachmentTargetId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-secretsmanager-secrettargetattachment.html#cfn-secretsmanager-secrettargetattachment-targettype
smstaTargetType :: Lens' SecretsManagerSecretTargetAttachment (Val Text)
smstaTargetType = lens _secretsManagerSecretTargetAttachmentTargetType (\s a -> s { _secretsManagerSecretTargetAttachmentTargetType = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/SecretsManagerSecretTargetAttachment.hs | mit | 3,164 | 0 | 15 | 317 | 370 | 211 | 159 | 37 | 1 |
{-
******************************************************************************
* JSHOP *
* *
* Module: TestSuite *
* Purpose: A set of tests to run on a selection of inputs *
* Author: Nick Brunt *
* *
* Copyright (c) Nick Brunt, 2011 - 2012 *
* Subject to MIT License as stated in root directory *
* *
******************************************************************************
-}
module TestSuite where
-- Standard library imports
import System.Directory
import System.CPUTime
import Data.Maybe
import Control.Monad
-- JSHOP module imports
import Utilities
-- Test result data structures
data TestResults =
TestResults {
testNum :: Int,
message :: String,
strucTests :: [Test],
libTests :: [Test],
time :: Double,
average :: Float
}
deriving (Read, Show)
data Test =
Test {
name :: String,
result :: Bool, -- True = pass, False = fail
errorMsg :: String,
inputSize :: Int,
outputSize :: Int,
reduction :: Int,
percentage :: Float
}
deriving (Read, Show)
defaultTest :: Test
defaultTest =
Test {
name = "",
result = False,
errorMsg = "",
inputSize = 0,
outputSize = 0,
reduction = 0,
percentage = 0
}
testResultsFile :: String
testResultsFile = "tests/testResults.log"
-- Structure tests
funcFile :: String
funcFile = "tests/structure/functions.js"
exprFile :: String
exprFile = "tests/structure/expressions.js"
statFile :: String
statFile = "tests/structure/statements.js"
runTests :: Maybe [String] -> IO()
runTests mbArgs = do
startTime <- getCPUTime
putStrLn "Starting test suite"
putStrLn "-------------------\n"
let msg = head $ fromMaybe ["No message"] mbArgs
-- Structure tests run every possible JavaScript control structure
-- through the program to test that they can be fully parsed.
funcTest <- runParseTest (defaultTest {name="Functions"}) funcFile
exprTest <- runParseTest (defaultTest {name="Expressions"}) exprFile
statTest <- runParseTest (defaultTest {name="Statements"}) statFile
-- Library tests run a set of JavaScript libraries through the program
-- to test real world code and also to check compression ratios.
-- Get list of files in libraries directory
files <- getDirectoryContents "tests/libraries"
-- Filter out ".." and "." and add path
let names = filter (\x -> head x /= '.') files
let libs = ["tests/libraries/" ++ f | f <- names]
let libTests = [defaultTest {name=libName} | libName <- names]
libTests' <- zipWithM runParseTest libTests libs
nextTestNum <- getNextTestNum
endTime <- getCPUTime
let testResults = TestResults {
testNum = nextTestNum,
message = msg,
strucTests = funcTest:exprTest:[statTest],
libTests = libTests',
time = calcTime startTime endTime,
average = mean $ map percentage libTests'
}
-- Pretty print results
putStrLn $ ppTestResults testResults ""
-- Write results to file
if msg /= "No message" then
if nextTestNum == 0 then
writeFile testResultsFile (show testResults)
else
appendFile testResultsFile ('\n':(show testResults))
else
putStr ""
runParseTest :: Test -> String -> IO Test
runParseTest test file = do
input <- readFile file
let parseOutput = parseJS input
case parseOutput of
Left error -> do
return (test {
result = False,
errorMsg = error,
inputSize = length input
})
Right (tree, state) -> do
let output = genJS tree
let outFile = outTestFile file
saveFile outFile output
-- Write to file
return (test {
result = True,
inputSize = length input,
outputSize = length output,
reduction = (length input) - (length output),
percentage = calcRatio input output
})
where
outTestFile :: String -> String
outTestFile inFile = "tests/outputLibraries/" ++ minFile
where
file = reverse $ takeWhile (/='/') $ reverse inFile
minFile = reverse $ takeWhile (/='.') (reverse file)
++ ".nim" ++ dropWhile (/='.') (reverse file)
showPastResults :: IO()
showPastResults = do
f <- readFile testResultsFile
let results = [ppTestResults tr "" | tr <- map read (lines f)]
mapM_ putStrLn results
showLastResult :: IO()
showLastResult = do
f <- readFile testResultsFile
putStrLn $ ppTestResults (read $ last (lines f)) ""
showResult :: Int -> IO()
showResult n = do
f <- readFile testResultsFile
putStrLn $ ppTestResults (read $ (lines f) !! n) ""
showAverages :: IO()
showAverages = do
putStrLn "Percentage of output to input:\n"
f <- readFile testResultsFile
let tests = [tr | tr <- map read (lines f)]
let strings = ["Test " ++ (show $ testNum t) ++
" average:\t" ++ (show $ average t) ++
" \t" ++ (message t) | t <- tests]
mapM_ putStrLn strings
getNextTestNum :: IO Int
getNextTestNum = do
f <- readFile testResultsFile
return $ length $ lines f
ppTestResults :: TestResults -> ShowS
ppTestResults (TestResults {testNum = n, message = m,
strucTests = sts, libTests = lts,
time = t, average = a}) =
showString "Test number" . spc . showString (show n) . nl
. indent 1 . showString "Message:" . spc . showString m . nl . nl
. indent 1 . showString "STRUCTURE TESTS" . nl
. ppSeq 1 ppTest sts . nl
. indent 1 . showString "LIBRARY TESTS" . nl
. ppSeq 1 ppTest lts . nl
. showString "Completed in" . spc . showString (take 5 (show t))
. spc . showString "seconds" . nl
. showString "Average compression:" . spc
. showString (take 5 (show a)) . showChar '%' . nl
ppTest :: Int -> Test -> ShowS
ppTest idnt (Test {name = n, result = r, errorMsg = e,
inputSize = i, outputSize = o,
reduction = d, percentage = p}) =
indent idnt . showString n . nl
. indent (idnt+1) . showString "Result:" . spc . ppResult r . nl
. ppErrorMsg (idnt+1) e
. indent (idnt+1) . showString "Input size:" . spc . showString (show i) . nl
. indent (idnt+1) . showString "Output size:" . spc . showString (show o) . nl
. indent (idnt+1) . showString "Reduced by:" . spc . showString (show d) . nl
. indent (idnt+1) . showString "Percentage of original:" . spc
. showString (take 5 (show p)) . showChar '%' . nl
ppResult :: Bool -> ShowS
ppResult True = showString "PASS"
ppResult False = showString "FAIL"
ppErrorMsg :: Int -> String -> ShowS
ppErrorMsg _ "" = showString ""
ppErrorMsg n msg = indent n . showString "Error message:"
. spc . showString msg . nl | nbrunt/JSHOP | src/TestSuite.hs | mit | 7,600 | 0 | 36 | 2,592 | 2,005 | 1,031 | 974 | 156 | 3 |
swap = id . uncurry . flip . curry
| nkartashov/haskell | exam/swap.hs | gpl-2.0 | 38 | 1 | 7 | 12 | 21 | 9 | 12 | 1 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
module Util.FailDoc where
import Autolib.Reporter
import Autolib.ToDoc
import Text.ParserCombinators.Parsec
class Monad m => FailDoc m where
failDoc :: Doc -> m a
instance FailDoc (GenParser tok st) where
failDoc = fail . show
instance FailDoc Reporter where
failDoc = reject
| Erdwolf/autotool-bonn | src/Util/FailDoc.hs | gpl-2.0 | 333 | 0 | 8 | 63 | 89 | 48 | 41 | 11 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./OWL2/Sublogic.hs
Copyright : (c) Dominik Luecke, Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : f.mance@jacobs-university.de
Stability : provisional
Portability : portable
Complexity analysis of OWL2
-}
module OWL2.Sublogic where
import OWL2.AS
import OWL2.Sign
import OWL2.Morphism
import Data.List
import Data.Data
import qualified Data.Set as Set
data NumberRestrictions = None | Unqualified | Qualified
deriving (Show, Eq, Ord, Typeable, Data)
owlDatatypes :: Set.Set Datatype
owlDatatypes = predefIRIs
data OWLSub = OWLSub
{ numberRestrictions :: NumberRestrictions
, nominals :: Bool
, inverseRoles :: Bool
, roleTransitivity :: Bool
, roleHierarchy :: Bool
, complexRoleInclusions :: Bool
, addFeatures :: Bool
, datatype :: Set.Set Datatype
} deriving (Show, Eq, Ord, Typeable, Data)
allSublogics :: [[OWLSub]]
allSublogics = let
t = True
b = slBottom
in
[ [ b { numberRestrictions = Unqualified }
, b { numberRestrictions = Qualified } ]
, [b { nominals = t } ]
, [b { inverseRoles = t } ]
, [b { roleTransitivity = t } ]
, [b { roleHierarchy = t } ]
, [b { complexRoleInclusions = t } ]
, [b { addFeatures = t } ]
, map (\ d -> b { datatype = Set.singleton d }) $ Set.toList owlDatatypes ]
-- | sROIQ(D)
slTop :: OWLSub
slTop = OWLSub
{ numberRestrictions = Qualified
, nominals = True
, inverseRoles = True
, roleTransitivity = True
, roleHierarchy = True
, complexRoleInclusions = True
, addFeatures = True
, datatype = owlDatatypes
}
-- | ALC
slBottom :: OWLSub
slBottom = OWLSub
{ numberRestrictions = None
, nominals = False
, inverseRoles = False
, roleTransitivity = False
, roleHierarchy = False
, complexRoleInclusions = False
, addFeatures = False
, datatype = Set.empty
}
slMax :: OWLSub -> OWLSub -> OWLSub
slMax sl1 sl2 = OWLSub
{ numberRestrictions = max (numberRestrictions sl1) (numberRestrictions sl2)
, nominals = max (nominals sl1) (nominals sl2)
, inverseRoles = max (inverseRoles sl1) (inverseRoles sl2)
, roleTransitivity = max (roleTransitivity sl1) (roleTransitivity sl2)
, roleHierarchy = max (roleHierarchy sl1) (roleHierarchy sl2)
, complexRoleInclusions = max (complexRoleInclusions sl1)
(complexRoleInclusions sl2)
, addFeatures = max (addFeatures sl1) (addFeatures sl2)
, datatype = Set.union (datatype sl1) (datatype sl2)
}
-- | Naming for Description Logics
slName :: OWLSub -> String
slName sl =
(if complexRoleInclusions sl || addFeatures sl
then (if addFeatures sl then "s" else "") ++ "R"
else (if roleTransitivity sl then "S" else "ALC")
++ if roleHierarchy sl then "H" else "")
++ (if nominals sl then "O" else "")
++ (if inverseRoles sl then "I" else "")
++ (case numberRestrictions sl of
Qualified -> "Q"
Unqualified -> "N"
None -> "")
++ let ds = datatype sl in if Set.null ds then "" else
"-D|" ++ (if ds == owlDatatypes then "-|" else
intercalate "|" (map printDatatype $ Set.toList ds) ++ "|")
requireQualNumberRestrictions :: OWLSub -> OWLSub
requireQualNumberRestrictions sl = sl {numberRestrictions = Qualified}
requireNumberRestrictions :: OWLSub -> OWLSub
requireNumberRestrictions sl = let nr = numberRestrictions sl in
sl {numberRestrictions = if nr /= Qualified then Unqualified else nr}
requireRoleTransitivity :: OWLSub -> OWLSub
requireRoleTransitivity sl = sl {roleTransitivity = True}
requireRoleHierarchy :: OWLSub -> OWLSub
requireRoleHierarchy sl = sl {roleHierarchy = True}
requireComplexRoleInclusions :: OWLSub -> OWLSub
requireComplexRoleInclusions sl = (requireRoleHierarchy
$ requireRoleTransitivity sl) {complexRoleInclusions = True}
requireAddFeatures :: OWLSub -> OWLSub
requireAddFeatures sl = (requireComplexRoleInclusions sl) {addFeatures = True}
requireNominals :: OWLSub -> OWLSub
requireNominals sl = sl {nominals = True}
requireInverseRoles :: OWLSub -> OWLSub
requireInverseRoles sl = sl {inverseRoles = True}
slDatatype :: Datatype -> OWLSub
slDatatype dt = slBottom {datatype = if isDatatypeKey dt then
Set.singleton $ setDatatypePrefix dt else Set.empty}
slObjProp :: ObjectPropertyExpression -> OWLSub
slObjProp o = case o of
ObjectProp _ -> slBottom
ObjectInverseOf _ -> requireInverseRoles slBottom
slEntity :: Entity -> OWLSub
slEntity (Entity _ et iri) = case et of
Datatype -> slDatatype iri
_ -> slBottom
slDataRange :: DataRange -> OWLSub
slDataRange rn = case rn of
DataType ur _ -> slDatatype ur
DataComplementOf c -> slDataRange c
DataOneOf _ -> requireNominals slBottom
DataJunction _ drl -> foldl slMax slBottom $ map slDataRange drl
slClassExpression :: ClassExpression -> OWLSub
slClassExpression des = case des of
ObjectJunction _ dec -> foldl slMax slBottom $ map slClassExpression dec
ObjectComplementOf dec -> slClassExpression dec
ObjectOneOf _ -> requireNominals slBottom
ObjectValuesFrom _ o d -> slMax (slObjProp o) (slClassExpression d)
ObjectHasSelf o -> requireAddFeatures $ slObjProp o
ObjectHasValue o _ -> slObjProp o
ObjectCardinality c -> slObjCard c
DataValuesFrom _ _ dr -> slDataRange dr
DataCardinality c -> slDataCard c
_ -> slBottom
slDataCard :: Cardinality DataPropertyExpression DataRange -> OWLSub
slDataCard (Cardinality _ _ _ x) = requireNumberRestrictions $ case x of
Nothing -> slBottom
Just y -> slDataRange y
slObjCard :: Cardinality ObjectPropertyExpression ClassExpression -> OWLSub
slObjCard (Cardinality _ _ op x) = requireNumberRestrictions $ case x of
Nothing -> slObjProp op
Just y -> slMax (slObjProp op) (slClassExpression y)
slAxiom :: Axiom -> OWLSub
slAxiom ax = case ax of
Declaration _ e -> slEntity e
ClassAxiom cax -> case cax of
SubClassOf _ sub sup -> slMax (slClassExpression sub) (slClassExpression sup)
EquivalentClasses _ clExprs -> foldl slMax slBottom $ map slClassExpression clExprs
DisjointClasses _ clExprs -> foldl slMax slBottom $ map slClassExpression clExprs
DisjointUnion _ _ clExprs -> foldl slMax slBottom $ map slClassExpression clExprs
ObjectPropertyAxiom opax -> case opax of
SubObjectPropertyOf _ subOpExpr supOpExpr ->
let oExprs = case subOpExpr of
SubObjPropExpr_obj oExpr -> [oExpr]
SubObjPropExpr_exprchain e -> e
in requireRoleHierarchy $ foldl slMax slBottom $ map slObjProp (supOpExpr : oExprs)
EquivalentObjectProperties _ oExprs -> foldl slMax slBottom $ map slObjProp oExprs
DisjointObjectProperties _ oExprs -> foldl slMax (requireAddFeatures slBottom) $ map slObjProp oExprs
InverseObjectProperties _ e1 e2 -> slMax (slObjProp e1) (slObjProp e2)
ObjectPropertyDomain _ oExpr cExpr -> slMax (slObjProp oExpr) (slClassExpression cExpr)
ObjectPropertyRange _ oExpr cExpr -> slMax (slObjProp oExpr) (slClassExpression cExpr)
FunctionalObjectProperty _ oExpr -> slObjProp oExpr
InverseFunctionalObjectProperty _ oExpr -> requireInverseRoles $ slObjProp oExpr
ReflexiveObjectProperty _ oExpr -> requireAddFeatures (slObjProp oExpr)
IrreflexiveObjectProperty _ oExpr -> requireAddFeatures (slObjProp oExpr)
SymmetricObjectProperty _ oExpr -> slObjProp oExpr
AsymmetricObjectProperty _ oExpr -> requireAddFeatures (slObjProp oExpr)
TransitiveObjectProperty _ oExpr -> requireRoleTransitivity (slObjProp oExpr)
DataPropertyAxiom a -> case a of
SubDataPropertyOf _ _ _ -> requireRoleHierarchy slBottom
EquivalentDataProperties _ _ -> slBottom
DisjointDataProperties _ _ -> requireAddFeatures slBottom
DataPropertyDomain _ _ _ -> slBottom
DataPropertyRange _ _ r -> slDataRange r
FunctionalDataProperty _ _ -> slBottom
DatatypeDefinition _ dt dr -> slMax (slDatatype dt) (slDataRange dr)
HasKey _ cExpr oExprs _ -> foldl slMax (slClassExpression cExpr)
$ map slObjProp oExprs
Assertion a -> case a of
SameIndividual _ _ -> requireNominals slBottom
DifferentIndividuals _ _ -> requireNominals slBottom
ClassAssertion _ clExpr _ -> slClassExpression clExpr
ObjectPropertyAssertion _ _ _ _ -> slBottom
NegativeObjectPropertyAssertion _ _ _ _ -> slBottom
DataPropertyAssertion _ _ _ _ -> slBottom
NegativeDataPropertyAssertion _ _ _ _ -> slBottom
AnnotationAxiom a -> case a of
AnnotationAssertion _ _ _ _ -> slBottom
SubAnnotationPropertyOf _ _ _ -> requireRoleHierarchy slBottom
AnnotationPropertyDomain _ _ _ -> slBottom
AnnotationPropertyRange _ _ _ -> slBottom
_ -> slBottom
slODoc :: OntologyDocument -> OWLSub
slODoc = foldl slMax slBottom . map slAxiom . axioms . ontology
slSig :: Sign -> OWLSub
slSig sig = let dts = Set.toList $ datatypes sig in
if Set.size (dataProperties sig) == 0 && null dts
then slBottom else foldl slMax slBottom $ map slDatatype dts
slMor :: OWLMorphism -> OWLSub
slMor mor = slMax (slSig $ osource mor) $ slSig $ otarget mor
-- projections along sublogics
prMor :: OWLSub -> OWLMorphism -> OWLMorphism
prMor s a = a
{ osource = prSig s $ osource a
, otarget = prSig s $ otarget a }
prSig :: OWLSub -> Sign -> Sign
prSig s a = if datatype s == Set.empty
then a {datatypes = Set.empty, dataProperties = Set.empty}
else a
prODoc :: OWLSub -> OntologyDocument -> OntologyDocument
prODoc s a =
let o = (ontology a) {axioms = filter ((s >=) . slAxiom) $ axioms $
ontology a }
in a {ontology = o}
| spechub/Hets | OWL2/Sublogic.hs | gpl-2.0 | 9,855 | 0 | 18 | 2,225 | 2,910 | 1,484 | 1,426 | 207 | 39 |
{--
-- Natume -- an implementation of Kana-Kanji conversion in Haskell
-- Copyright (C) 2006-2012 Takayuki Usui
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
--}
module Rensetu (
Rensetu(MkRensetu),Ren,
index,hinsi,ktype,kform,goi,
match,mktab,decode,compile,convert,
) where
import Prelude hiding (id)
import qualified Connect
import qualified Hinsi
import qualified KForm
import qualified KType
data Rensetu = MkRensetu { index :: Int,
hinsi :: Hinsi.Hinsi,
ktype :: [KType.KType],
kform :: [KForm.KForm],
goi :: String }
deriving (Show)
type Ren = (Int,Int,Int,Int,String)
allForms :: KType.KType -> [(KType.KType,KForm.KForm)]
allForms x = map (\f -> (x,f)) (KType.kform x)
allTypes :: Hinsi.Hinsi -> [(KType.KType,KForm.KForm)]
allTypes x = concat (map (\t -> (allForms t)) (Hinsi.ktype x))
bare :: [Hinsi.Hinsi] -> [Rensetu]
bare [] = []
bare (x:xs) = if (Hinsi.kt x)
then (map (\(t,f) -> MkRensetu 0 x [t] [f] [])
(allTypes x)) ++ (bare xs)
else (MkRensetu 0 x [] [] []) : bare xs
withGoi :: [Connect.Rule] -> [([String],String,String,String)]
withGoi [] = []
withGoi (x:xs) = (filter hasgoi x) ++ (withGoi xs)
where
hasgoi (_,_,_,g) = not (null g)
match :: ([String],String,String,String) -> Rensetu -> Bool
match x r =
let (h1,t1,f1,g1) = x
(MkRensetu _ h2 t2 f2 g2) = r in
and [Hinsi.isprefixof h1 (Hinsi.name h2),
or [(t1 == []),((not (null t2)) && (t1 == (KType.name (head t2))))],
or [(f1 == []),((not (null f2)) && (f1 == (KForm.name (head f2))))],
or [(g1 == []),((not (null g2)) && (g1 == g2))]]
update :: Connect.Rule ->([Rensetu],[Rensetu]) -> [Rensetu]
update [] (rs,_) = rs
update (x:xs) (rs,es) =
if not (null (filter (match x) (rs ++ es)))
then update xs (rs,es)
else if null t
then update xs ((r : rs),es)
else update xs (((reverse rs') ++ rs),es)
where
(h,t,f,g) = x
r = (head (filter (match (h,t,f,"")) (rs ++ es))) {goi=g}
rs' = map (\ren -> ren {goi=g}) (filter (match (h,t,"","")) es)
enum0 :: Int -> [Rensetu] -> (Int,[Rensetu])
enum0 n [] = (n,[])
enum0 n (x:xs) = (n1,x1:xs1)
where
(n1,xs1) = enum0 (n+1) xs
x1 = x {index=n}
enum :: [Rensetu] -> [Rensetu]
enum xs = snd (enum0 0 xs)
mktab :: [Hinsi.Hinsi] -> [Connect.Connect] -> ([Rensetu],[Rensetu])
mktab hs cs = ((take half ss),(drop half ss))
where
es = bare hs
fs = withGoi (map fst cs)
gs = update fs ([],(tail es))
rs = (head es) : ((reverse gs) ++ (tail es))
ss = enum rs
half = (length gs) + 1
encode :: [Rensetu] -> [Ren]
encode [] = []
encode (x:xs) =
(i1,h1,t1,f1,g1) : encode xs
where
(MkRensetu i0 h0 t0 f0 g0) = x
i1 = i0
h1 = Hinsi.index h0
t1 = if null t0 then 0 else (KType.index (head t0))
f1 = if null f0 then 0 else (KForm.index (head f0))
g1 = g0
decode :: [Hinsi.Hinsi] -> [Ren] -> [Rensetu]
decode _ [] = []
decode hs (x:xs) =
(MkRensetu i0 h0 t0 f0 g0) : decode hs xs
where
(i1,h1,t1,f1,g1) = x
i0 = i1
h0 = head (filter ((h1==).Hinsi.index) hs)
t0 = if t1 == 0
then []
else filter ((t1==).KType.index) (Hinsi.ktype h0)
f0 = if f1 == 0
then []
else filter ((f1==).KForm.index) (KType.kform (head t0))
g0 = g1
compile :: String -> String -> String -> String -> IO ()
compile g f t c = do hs <- Hinsi.readclass g f t
cs <- Connect.readconn c
let (rs1,rs2) = mktab hs cs
let (rs1',rs2') = (encode rs1,encode rs2)
writeFile "Ren.hs"
("module Ren (rensetu_tbl) where\n" ++
"import Rensetu\n" ++
"rensetu_tbl :: ([Ren],[Ren])\n" ++
"rensetu_tbl = " ++ (show (rs1',rs2')) ++ "\n")
return ()
indice :: [Rensetu] -> ([String],String,String,String) -> [Int]
indice _ (["ʸƬ"],_,_,_) = []
indice _ (["ʸËö"],_,_,_) = []
indice rs c = map index (filter (match c) rs)
weave0 :: [[a]] -> [[a]]
weave0 [] = []
weave0 [x] = map (\t -> [t]) x
weave0 (x:xs) = concat (map (\x' -> map (x':) xs') x)
where
xs' = weave0 xs
weave :: [[a]] -> [[a]]
weave = weave0 . (filter (not . null))
convert :: [Connect.Connect] -> [Rensetu] -> [([Int],Int)]
convert [] _ = []
convert (x:xs) rs = (map (\c -> (c,cost)) css) ++ (convert xs rs)
where
(cs,cost) = x; css = weave (map (indice rs) cs)
| takayuki/natume | Rensetu.hs | gpl-2.0 | 5,530 | 3 | 18 | 1,708 | 2,394 | 1,326 | 1,068 | 116 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{- lat - tool to track alerts from LWN.
- Copyright (C) 2010 Magnus Therning
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, version 3 of the License.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Commands.ArgTypes
where
import Data.Data
data VulnListType = All | Reported | Unreported
deriving (Data, Typeable, Show, Eq)
data VulnListSize = Small | Normal | Full
deriving (Data, Typeable, Show, Eq)
data ArgType
= DistroAdd { config :: FilePath , name :: String , url :: String }
| DistroList { config :: FilePath }
| VulnUpdate { config :: FilePath, dry :: Bool }
| VulnList
{ config :: FilePath
, typ :: VulnListType
, size :: VulnListSize
, nofilter :: Bool
}
| VulnReport { config :: FilePath, nofilter :: Bool }
deriving (Data, Typeable, Show)
| magthe/lat | src/Commands/ArgTypes.hs | gpl-3.0 | 1,361 | 0 | 8 | 320 | 193 | 118 | 75 | 18 | 0 |
import Module (Root(b, d), Root(c, a))
| evolutics/haskell-formatter | testsuite/resources/source/orders_parts/nested_import_entities_before_root_entities/Input.hs | gpl-3.0 | 39 | 0 | 6 | 6 | 24 | 17 | 7 | 1 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module Topology.Generate where
import GHC.TypeLits
--
import Data.Graph (scc, Tree(..) )
import qualified Data.HashSet as H
import Data.List (tails)
--
import Data.Partition (unitPartition)
import Data.Fin1
import Graph
import McKay
type TopologySet n = H.HashSet (UndirGraph n)
addEdge :: UndirGraph n -> UndirEdge n -> UndirGraph n
addEdge (UG (SE es)) e = let es' = e : es in mkUndirGraph es'
pick2distinct :: (KnownNat n) => [ UndirEdge n ]
pick2distinct = filter (not . isSelfish) pick2
pick2 :: forall n. (KnownNat n) => [ UndirEdge n ]
pick2 = (map (uncurry UE) . concatMap f . tails) interval
where f :: [Vertex n] -> [(Vertex n,Vertex n)]
f [] = []
f lst@(x:_) = map (x,) lst
generate1EdgeMore :: (KnownNat n) => UndirGraph n -> [UndirGraph n]
generate1EdgeMore g = map (addEdge g) pick2
generate1EdgeMore' :: (KnownNat n) => UndirGraph n -> [UndirGraph n]
generate1EdgeMore' g = map (addEdge g) pick2distinct
nextEdgeLevel :: (KnownNat n) => TopologySet n -> TopologySet n
nextEdgeLevel = foldr H.insert H.empty . map (canonicalLabel unitPartition) . concatMap generate1EdgeMore' . H.toList
nextEdgeLevelConnected :: (KnownNat n) => TopologySet n -> TopologySet n
nextEdgeLevelConnected = H.filter ((<=1) . length . filter (not . isIsolated) . scc . undirToDirected) . nextEdgeLevel
where isIsolated (Node _ []) = True
isIsolated _ = False
| wavewave/qft | old/lib/Topology/Generate.hs | gpl-3.0 | 1,478 | 0 | 13 | 282 | 570 | 301 | 269 | 31 | 2 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-overlapping-patterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Handler.DB.Routes where
import Prelude (const)
import Handler.DB.Enums
import Handler.DB.Esqueleto
import Handler.DB.Internal
import Handler.DB.RouteFiles
import Handler.DB.RouteFilesFile
import Handler.DB.RouteFileusergroupcontents
import Handler.DB.RouteUsergroupcontents
import Handler.DB.RouteUsergroups
import Handler.DB.RouteUsergroupsUserGroup
import Handler.DB.RouteUsergroupitems
import Handler.DB.RouteUsergroupitemsUserGroupItem
import Handler.DB.RouteUsers
import Handler.DB.RouteUsersUser
import Handler.DB.RouteVersions
import Handler.DB.RouteTransferreceipts
import Handler.DB.RouteReceipts
import Handler.DB.RouteReceiptsReceipt
import Handler.DB.RouteProcessperiods
import Handler.DB.RouteProcessperiodsProcessPeriod
import Handler.DB.RouteSettings
import Yesod.Auth (requireAuth, requireAuthId, YesodAuth, AuthId, YesodAuthPersist)
import Yesod.Core
import Yesod.Persist (runDB, YesodPersist, YesodPersistBackend)
getDB :: a -> DB
getDB = const DB
mkYesodSubData "DB" [parseRoutes|
/files FilesR GET
/files/#FileId FilesFileIdR GET PUT DELETE
/fileusergroupcontents FileusergroupcontentsR DELETE POST
/usergroupcontents UsergroupcontentsR GET
/usergroups UsergroupsR GET POST
/usergroups/#UserGroupId UsergroupsUserGroupIdR PUT DELETE
/usergroupitems UsergroupitemsR GET POST
/usergroupitems/#UserGroupItemId UsergroupitemsUserGroupItemIdR DELETE
/users UsersR GET POST
/users/#UserId UsersUserIdR GET DELETE PUT
/versions VersionsR GET
/transferreceipts TransferreceiptsR POST
/receipts ReceiptsR GET POST
/receipts/#ReceiptId ReceiptsReceiptIdR GET DELETE PUT
/processperiods ProcessperiodsR GET
/processperiods/#ProcessPeriodId ProcessperiodsProcessPeriodIdR POST
/settings SettingsR POST
|]
| tlaitinen/receipts | backend/Handler/DB/Routes.hs | gpl-3.0 | 2,452 | 0 | 5 | 405 | 222 | 148 | 74 | 43 | 1 |
module Beer where
sing :: Int -> Int -> String
sing e s =
unlines . map verse $ reverse [s..e]
verse :: Int -> String
verse 0 =
ucbs ++ location ++ ", " ++ bs ++ getMore ++ bottles 99 ++ location ++ ".\n"
where
bs = bottles 0
ucbs = "N" ++ tail bs
verse n =
if n>0
then bs ++ location ++ ", " ++ bs ++ drink n ++ bottles n' ++ location ++ ".\n"
else error "You cannot have fewer than no bottles of beer. Not even if you are a really heavy drinker."
where
bs = bottles n
n' = n-1
bottles :: Int -> String
bottles 0 = "no more bottles of beer"
bottles 1 = "1 bottle of beer"
bottles n = show n ++ " bottles of beer"
location :: String
location = " on the wall"
drink :: Int -> String
drink 1 = ".\nTake it down and pass it around, "
drink _ = ".\nTake one down and pass it around, "
getMore :: String
getMore = ".\nGo to the store and buy some more, "
| ciderpunx/exercismo | src/Beer.hs | gpl-3.0 | 906 | 0 | 12 | 245 | 277 | 142 | 135 | 26 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer2.Accounts.Clients.Invitations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all the client users invitations for a client with a given account
-- ID.
--
-- /See:/ <https://developers.google.com/authorized-buyers/apis/reference/rest/ Ad Exchange Buyer API II Reference> for @adexchangebuyer2.accounts.clients.invitations.list@.
module Network.Google.Resource.AdExchangeBuyer2.Accounts.Clients.Invitations.List
(
-- * REST Resource
AccountsClientsInvitationsListResource
-- * Creating a Request
, accountsClientsInvitationsList
, AccountsClientsInvitationsList
-- * Request Lenses
, acilXgafv
, acilUploadProtocol
, acilAccessToken
, acilUploadType
, acilAccountId
, acilClientAccountId
, acilPageToken
, acilPageSize
, acilCallback
) where
import Network.Google.AdExchangeBuyer2.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer2.accounts.clients.invitations.list@ method which the
-- 'AccountsClientsInvitationsList' request conforms to.
type AccountsClientsInvitationsListResource =
"v2beta1" :>
"accounts" :>
Capture "accountId" (Textual Int64) :>
"clients" :>
Capture "clientAccountId" Text :>
"invitations" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListClientUserInvitationsResponse
-- | Lists all the client users invitations for a client with a given account
-- ID.
--
-- /See:/ 'accountsClientsInvitationsList' smart constructor.
data AccountsClientsInvitationsList =
AccountsClientsInvitationsList'
{ _acilXgafv :: !(Maybe Xgafv)
, _acilUploadProtocol :: !(Maybe Text)
, _acilAccessToken :: !(Maybe Text)
, _acilUploadType :: !(Maybe Text)
, _acilAccountId :: !(Textual Int64)
, _acilClientAccountId :: !Text
, _acilPageToken :: !(Maybe Text)
, _acilPageSize :: !(Maybe (Textual Int32))
, _acilCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsClientsInvitationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acilXgafv'
--
-- * 'acilUploadProtocol'
--
-- * 'acilAccessToken'
--
-- * 'acilUploadType'
--
-- * 'acilAccountId'
--
-- * 'acilClientAccountId'
--
-- * 'acilPageToken'
--
-- * 'acilPageSize'
--
-- * 'acilCallback'
accountsClientsInvitationsList
:: Int64 -- ^ 'acilAccountId'
-> Text -- ^ 'acilClientAccountId'
-> AccountsClientsInvitationsList
accountsClientsInvitationsList pAcilAccountId_ pAcilClientAccountId_ =
AccountsClientsInvitationsList'
{ _acilXgafv = Nothing
, _acilUploadProtocol = Nothing
, _acilAccessToken = Nothing
, _acilUploadType = Nothing
, _acilAccountId = _Coerce # pAcilAccountId_
, _acilClientAccountId = pAcilClientAccountId_
, _acilPageToken = Nothing
, _acilPageSize = Nothing
, _acilCallback = Nothing
}
-- | V1 error format.
acilXgafv :: Lens' AccountsClientsInvitationsList (Maybe Xgafv)
acilXgafv
= lens _acilXgafv (\ s a -> s{_acilXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
acilUploadProtocol :: Lens' AccountsClientsInvitationsList (Maybe Text)
acilUploadProtocol
= lens _acilUploadProtocol
(\ s a -> s{_acilUploadProtocol = a})
-- | OAuth access token.
acilAccessToken :: Lens' AccountsClientsInvitationsList (Maybe Text)
acilAccessToken
= lens _acilAccessToken
(\ s a -> s{_acilAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
acilUploadType :: Lens' AccountsClientsInvitationsList (Maybe Text)
acilUploadType
= lens _acilUploadType
(\ s a -> s{_acilUploadType = a})
-- | Numerical account ID of the client\'s sponsor buyer. (required)
acilAccountId :: Lens' AccountsClientsInvitationsList Int64
acilAccountId
= lens _acilAccountId
(\ s a -> s{_acilAccountId = a})
. _Coerce
-- | Numerical account ID of the client buyer to list invitations for.
-- (required) You must either specify a string representation of a
-- numerical account identifier or the \`-\` character to list all the
-- invitations for all the clients of a given sponsor buyer.
acilClientAccountId :: Lens' AccountsClientsInvitationsList Text
acilClientAccountId
= lens _acilClientAccountId
(\ s a -> s{_acilClientAccountId = a})
-- | A token identifying a page of results the server should return.
-- Typically, this is the value of
-- ListClientUserInvitationsResponse.nextPageToken returned from the
-- previous call to the clients.invitations.list method.
acilPageToken :: Lens' AccountsClientsInvitationsList (Maybe Text)
acilPageToken
= lens _acilPageToken
(\ s a -> s{_acilPageToken = a})
-- | Requested page size. Server may return fewer clients than requested. If
-- unspecified, server will pick an appropriate default.
acilPageSize :: Lens' AccountsClientsInvitationsList (Maybe Int32)
acilPageSize
= lens _acilPageSize (\ s a -> s{_acilPageSize = a})
. mapping _Coerce
-- | JSONP
acilCallback :: Lens' AccountsClientsInvitationsList (Maybe Text)
acilCallback
= lens _acilCallback (\ s a -> s{_acilCallback = a})
instance GoogleRequest AccountsClientsInvitationsList
where
type Rs AccountsClientsInvitationsList =
ListClientUserInvitationsResponse
type Scopes AccountsClientsInvitationsList =
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient AccountsClientsInvitationsList'{..}
= go _acilAccountId _acilClientAccountId _acilXgafv
_acilUploadProtocol
_acilAccessToken
_acilUploadType
_acilPageToken
_acilPageSize
_acilCallback
(Just AltJSON)
adExchangeBuyer2Service
where go
= buildClient
(Proxy ::
Proxy AccountsClientsInvitationsListResource)
mempty
| brendanhay/gogol | gogol-adexchangebuyer2/gen/Network/Google/Resource/AdExchangeBuyer2/Accounts/Clients/Invitations/List.hs | mpl-2.0 | 7,230 | 0 | 21 | 1,640 | 991 | 574 | 417 | 144 | 1 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
import Control.Arrow ((>>>))
import Control.Monad ((>=>))
import Data.Maybe (fromJust)
import Generics.MultiRec.Base
import Generics.MultiRec.Compos
import qualified Generics.MultiRec.Show as GS
import Generics.MultiRec.Zipper
import System.IO
import Control.Monad
import Generics.MultiRec.GHC.GHCTHUseAlt
import Generics.MultiRec.GHC.Instances
import Bag
import Bag(Bag,bagToList)
-- import Data.Generics
import FastString(FastString)
import GHC.Paths ( libdir )
import RdrName
import OccName
import qualified OccName(occNameString)
import System.Directory
-----------------
import qualified Data.Generics as SYB
import qualified Data.Generics.Schemes as SYB
import qualified Data.Generics.Aliases as SYB
import qualified GHC.SYB.Utils as SYB
import Var
import qualified CoreFVs as GHC
import qualified CoreSyn as GHC
import qualified DynFlags as GHC
import qualified ErrUtils as GHC
import qualified Exception as GHC
import qualified FastString as GHC
import qualified GHC as GHC
import qualified HscTypes as GHC
import qualified HsLit as GHC
import qualified Lexer as GHC
import qualified MonadUtils as GHC
import qualified Outputable as GHC
import qualified SrcLoc as GHC
import qualified StringBuffer as GHC
-- Working with the GHC AST.
-- * Instantiating the library for GHC AST, starting with RenamedSource
-- ---------------------------------------------------------------------
main = do
Just renamed <- getStuff
let renamed' = addOne renamed
let foo = testZipper renamed'
-- putStrLn $ "\nfoo=" ++ (SYB.showData SYB.Renamer 0 $ foo)
putStrLn $ "\nfoo=" ++ showGhc foo
putStrLn $ "\nrenamed'=" ++ showGhc renamed'
startEditor renamed
return ()
-- ---------------------------------------------------------------------
-- playing with compos
{-
-- | Renaming variables using 'compos'
renameVar :: Expr String -> Expr String
renameVar = renameVar' Expr
where
renameVar' :: AST String a -> a -> a
renameVar' Var x = x ++ "_"
renameVar' p x = compos renameVar' p x
-- | Test for 'renameVar'
testRename :: Expr String
testRename = renameVar example
-}
addOne :: GHC.RenamedSource -> GHC.RenamedSource
addOne = addOne' RenamedSourceIt
where
addOne' :: AST a -> a -> a
addOne' OverLitValIt (GHC.HsIntegral x) = (GHC.HsIntegral (x+1))
addOne' p x = compos addOne' p x
-- ---------------------------------------------------------------------
-- | Call this to start the navigation demo.
startEditor :: GHC.RenamedSource -> IO ()
startEditor ast@(g,i,e,d) =
do
intro
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
-- loop $ enter HsGroupIt g
loop $ enter RenamedSourceIt ast
-- ---------------------------------------------------------------------
-- | Main loop. Prints current location, asks for a command and executes
-- a navigation operation depending on that command.
-- loop :: Loc AST I0 (GHC.HsGroup GHC.Name) -> IO ()
loop :: Loc AST I0 (GHC.RenamedSource) -> IO ()
loop l =
do
putStr $ (showZipper l) ++ " {" ++ {- typeOfFocus l ++ -} "}"
cmd <- getChar
putStr "\r\ESC[2K"
when (cmd == 'q') $ putStrLn ""
when (cmd /= 'q') $ do
let op = case cmd of
'j' -> down
'l' -> right
'h' -> left
'k' -> up
' ' -> dfnext
'n' -> dfnext
'b' -> dfprev
_ -> return
case op l of
Nothing -> loop l
Just l' -> loop l'
-- ---------------------------------------------------------------------
testZipper2 :: GHC.RenamedSource -> Maybe (GHC.HsGroup GHC.Name)
testZipper2 renamed@(g,i,e,d) =
-- enter LImportDeclIt >>>
enter HsGroupIt >>>
dfnext >=>
update solve >>>
leave >>>
return $ g
where
solve :: AST ix -> ix -> ix
-- solve OveerLit _ = Const 42
solve HsValBindsLRIt (GHC.ValBindsOut [x1,x2] y) = GHC.ValBindsOut [x1] y
solve _ x = error "foo" -- x
-- ---------------------------------------------------------------------
-- | Show the current location, with the focus being highlighted in red.
-- showZipper :: Loc AST I0 (GHC.HsGroup GHC.Name) -> String
showZipper :: Loc AST I0 (GHC.RenamedSource) -> String
showZipper l = (GS.spaces $ map ($ 0) $ unK0 (foldZipper focus (\ p x -> K0 (GS.hShowsPrecAlg p x)) l)) ""
where focus :: AST ix -> ix -> K0 ([Int -> ShowS]) ix
focus ix x = K0 [\ n -> ("\ESC[01;31m" ++) . GS.showsPrec ix n x . ("\ESC[00m" ++)]
-- ---------------------------------------------------------------------
testZipper :: GHC.RenamedSource -> Maybe (GHC.HsGroup GHC.Name)
testZipper renamed@(g,i,e,d) =
-- enter LImportDeclIt >>>
enter HsGroupIt >>>
down >=>
-- down >=>
-- right >=>
update solve >>>
leave >>>
return $ g
where
solve :: AST ix -> ix -> ix
-- solve OveerLit _ = Const 42
-- solve HsValBindsLRIt (GHC.ValBindsOut [x1,x2] y) = GHC.ValBindsOut [x1] y
solve HsValBindsLRIt (GHC.ValBindsOut [x1,x2] y) = GHC.ValBindsOut [x2] y
solve _ x = error "foo" -- x
-- ---------------------------------------------------------------------
-- | Introductory help message.
intro :: IO ()
intro =
putStrLn "h: left, j: down, k: up, l: right, q: quit, n,[space]: df lr traversal, b: df rl traversal"
-- ---------------------------------------------------------------------
targetFile = "./examples/Foo.hs"
-- getStuff :: IO ()
getStuff :: IO (Maybe GHC.RenamedSource)
getStuff =
GHC.defaultErrorHandler GHC.defaultFatalMessager GHC.defaultFlushOut $ do
GHC.runGhc (Just libdir) $ do
dflags <- GHC.getSessionDynFlags
let dflags' = foldl GHC.xopt_set dflags
[GHC.Opt_Cpp, GHC.Opt_ImplicitPrelude, GHC.Opt_MagicHash]
dflags'' = dflags' { GHC.importPaths = ["./src/","./test/testdata/","../test/testdata/"] }
dflags''' = dflags'' { GHC.hscTarget = GHC.HscInterpreted,
GHC.ghcLink = GHC.LinkInMemory }
_ <- GHC.setSessionDynFlags dflags'''
GHC.liftIO $ putStrLn $ "dflags set"
target <- GHC.guessTarget targetFile Nothing
GHC.setTargets [target]
GHC.load GHC.LoadAllTargets -- Loads and compiles, much as calling make
modSum <- GHC.getModSummary $ GHC.mkModuleName "Foo"
p <- GHC.parseModule modSum
t <- GHC.typecheckModule p
GHC.setContext [GHC.IIModule (GHC.moduleName $ GHC.ms_mod modSum)]
g <- GHC.getModuleGraph
gs <- mapM GHC.showModule g
GHC.liftIO (putStrLn $ "modulegraph=" ++ (Prelude.show gs))
let ps = GHC.pm_parsed_source p
GHC.liftIO $ putStrLn $ "got parsed source"
-- RenamedSource -----------------------------------------------
GHC.liftIO $ putStrLn $ "about to show renamedSource"
-- GHC.liftIO (putStrLn $ "renamedSource(Ppr)=" ++ (showGhc $ GHC.tm_renamed_source t))
-- GHC.liftIO (putStrLn $ "\nrenamedSource(showData)=" ++ (SYB.showData SYB.Renamer 0 $ GHC.tm_renamed_source t))
return (GHC.tm_renamed_source t)
pwd :: IO FilePath
pwd = getCurrentDirectory
| alanz/ghc-multirec | examples/main-ghc.hs | unlicense | 7,813 | 0 | 17 | 1,947 | 1,611 | 875 | 736 | 143 | 9 |
module Tables.A004489 (a004489) where
import Helpers.BaseRepresentation (toBase, fromBase)
import Helpers.ListHelpers (zipWithPadding)
import Helpers.Table (tableByAntidiagonals)
a004489 :: Int -> Int
a004489 i = fromBase 3 $ map tertSum $ zipWithPadding 0 (base3 n) (base3 k) where
(n, k) = tableByAntidiagonals i
tertSum (n', k') = (n' + k') `mod` 3
base3 = toBase 3
| peterokagey/haskellOEIS | src/Tables/A004489.hs | apache-2.0 | 376 | 0 | 9 | 60 | 146 | 80 | 66 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1beta1.Scale where
import GHC.Generics
import Data.Text
import Kubernetes.V1.ObjectMeta
import Openshift.V1beta1.ScaleSpec
import Openshift.V1beta1.ScaleStatus
import qualified Data.Aeson
-- | represents a scaling request for a resource.
data Scale = Scale
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ObjectMeta -- ^ Standard object metadata; More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata.
, spec :: Maybe ScaleSpec -- ^ defines the behavior of the scale. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status.
, status :: Maybe ScaleStatus -- ^ current status of the scale. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status. Read-only.
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON Scale
instance Data.Aeson.ToJSON Scale
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1beta1/Scale.hs | apache-2.0 | 1,616 | 0 | 9 | 213 | 141 | 86 | 55 | 21 | 0 |
{-# LANGUAGE PackageImports, OverloadedStrings, TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module TlsIo (
TlsIo, evalTlsIo, liftIO, throwError, readCached, randomByteString,
Partner(..), opponent, isCiphered,
readContentType, writeContentType, readVersion, writeVersion,
readLen, writeLen,
setVersion, setClientRandom, setServerRandom,
getClientRandom, getServerRandom, getCipherSuite,
cacheCipherSuite, flushCipherSuite,
encryptRSA, generateKeys, updateHash, finishedHash, clientVerifySign,
encryptMessage, decryptMessage,
updateSequenceNumber, updateSequenceNumberSmart,
TlsServer, runOpen, tPut, tGetByte, tGetLine, tGet, tGetContent, tClose,
debugPrintKeys,
getRandomGen, setRandomGen,
SecretKey(..),
) where
import Prelude hiding (read)
import System.IO
import System.IO.Error
import Control.Concurrent.STM
import Control.Applicative
import "monads-tf" Control.Monad.Error
import "monads-tf" Control.Monad.State
import Data.Maybe
import Data.Word
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import "crypto-random" Crypto.Random
import qualified Crypto.Hash.SHA256 as SHA256
import qualified Crypto.PubKey.HashDescr as RSA
import qualified Crypto.PubKey.RSA as RSA
import qualified Crypto.PubKey.RSA.Prim as RSA
import qualified Crypto.PubKey.RSA.PKCS15 as RSA
import qualified Crypto.PubKey.ECC.ECDSA as ECDSA
import qualified CryptoTools as CT
import Basic
import Data.HandleLike
import Data.ASN1.Encoding
import Data.ASN1.Types
import Data.ASN1.BinaryEncoding
import Content
type TlsIo cnt = ErrorT String (StateT (TlsClientState cnt) IO)
data TlsClientState cnt = TlsClientState {
tlssHandle :: Handle,
tlssContentCache :: [cnt],
tlssVersion :: Maybe CT.MSVersion,
tlssClientWriteCipherSuite :: CipherSuite,
tlssServerWriteCipherSuite :: CipherSuite,
tlssCachedCipherSuite :: CipherSuite,
tlssMasterSecret :: Maybe BS.ByteString,
tlssClientRandom :: Maybe BS.ByteString,
tlssServerRandom :: Maybe BS.ByteString,
tlssClientWriteMacKey :: Maybe BS.ByteString,
tlssServerWriteMacKey :: Maybe BS.ByteString,
tlssClientWriteKey :: Maybe BS.ByteString,
tlssServerWriteKey :: Maybe BS.ByteString,
tlssRandomGen :: SystemRNG,
tlssSha256Ctx :: SHA256.Ctx,
tlssClientSequenceNumber :: Word64,
tlssServerSequenceNumber :: Word64
}
instance HandleLike TlsServer where
type HandleMonad TlsServer = IO
hlPut = tPut
hlGet = tGet
hlGetLine = tGetLine
hlGetContent = tGetContent
hlClose = tClose
initTlsClientState :: EntropyPool -> Handle -> TlsClientState cnt
initTlsClientState ep sv = TlsClientState {
tlssHandle = sv,
tlssContentCache = [],
tlssVersion = Nothing,
tlssClientWriteCipherSuite = CipherSuite KeyExNULL MsgEncNULL,
tlssServerWriteCipherSuite = CipherSuite KeyExNULL MsgEncNULL,
tlssCachedCipherSuite = CipherSuite KeyExNULL MsgEncNULL,
tlssMasterSecret = Nothing,
tlssClientRandom = Nothing,
tlssServerRandom = Nothing,
tlssClientWriteMacKey = Nothing,
tlssServerWriteMacKey = Nothing,
tlssClientWriteKey = Nothing,
tlssServerWriteKey = Nothing,
tlssRandomGen = cprgCreate ep,
tlssSha256Ctx = SHA256.init,
tlssClientSequenceNumber = 0,
tlssServerSequenceNumber = 0
}
runOpen :: TlsIo cnt () -> Handle -> IO TlsServer
runOpen opn sv = do
ep <- createEntropyPool
(_, tlss) <- opn `runTlsIo` initTlsClientState ep sv
tvgen <- atomically . newTVar $ tlssRandomGen tlss
tvcsn <- atomically . newTVar $ tlssClientSequenceNumber tlss
tvssn <- atomically . newTVar $ tlssServerSequenceNumber tlss
tvbfr <- atomically $ newTVar ""
return TlsServer {
tlsVersion = fromJust $ tlssVersion tlss,
tlsCipherSuite = tlssClientWriteCipherSuite tlss,
tlsHandle = tlssHandle tlss,
tlsBuffer = tvbfr,
tlsRandomGen = tvgen,
tlsClientWriteMacKey = fromJust $ tlssClientWriteMacKey tlss,
tlsServerWriteMacKey = fromJust $ tlssServerWriteMacKey tlss,
tlsClientWriteKey = fromJust $ tlssClientWriteKey tlss,
tlsServerWriteKey = fromJust $ tlssServerWriteKey tlss,
tlsClientSequenceNumber = tvcsn,
tlsServerSequenceNumber = tvssn
}
runTlsIo :: TlsIo cnt a -> TlsClientState cnt -> IO (a, TlsClientState cnt)
runTlsIo io st = do
(ret, st') <- runErrorT io `runStateT` st
case ret of
Right r -> return (r, st')
Left err -> error err
evalTlsIo :: TlsIo cnt a -> EntropyPool -> Handle -> IO a
evalTlsIo io ep sv = do
ret <- runErrorT io `evalStateT` initTlsClientState ep sv
case ret of
Right r -> return r
Left err -> error err
readCached :: TlsIo cnt [cnt] -> TlsIo cnt cnt
readCached rd = do
tlss@TlsClientState{ tlssContentCache = cch } <- get
case cch of
[] -> do
r : cch' <- rd
put tlss { tlssContentCache = cch' }
return r
r : cch' -> do
put tlss { tlssContentCache = cch' }
return r
randomByteString :: Int -> TlsIo cnt BS.ByteString
randomByteString len = do
(r, gen) <- cprgGenerate len <$> gets tlssRandomGen
tlss <- get
put tlss{ tlssRandomGen = gen }
return r
data Partner = Server | Client deriving (Show, Eq)
opponent :: Partner -> Partner
opponent Server = Client
opponent Client = Server
isCiphered :: Partner -> TlsIo cnt Bool
isCiphered partner = (/= CipherSuite KeyExNULL MsgEncNULL) <$> gets (case partner of
Client -> tlssClientWriteCipherSuite
Server -> tlssServerWriteCipherSuite)
readContentType :: TlsIo cnt ContentType
readContentType = byteStringToContentType <$> read 1
writeContentType :: ContentType -> TlsIo cnt ()
writeContentType = write . contentTypeToByteString
readVersion :: TlsIo cnt Version
readVersion = byteStringToVersion <$> read 2
writeVersion :: Version -> TlsIo cnt ()
writeVersion = write . versionToByteString
readLen :: Int -> TlsIo cnt BS.ByteString
readLen n = read . byteStringToInt =<< read n
writeLen :: Int -> BS.ByteString -> TlsIo cnt ()
writeLen n bs = write (intToByteString n $ BS.length bs) >> write bs
read :: Int -> TlsIo cnt BS.ByteString
read n = do
r <- liftIO . flip BS.hGet n =<< gets tlssHandle
if BS.length r == n then return r else throwError $
"Basic.read:\n" ++
"\texpected: " ++ show n ++ "byte\n" ++
"\tactural : " ++ show (BS.length r) ++ "byte\n"
write :: BS.ByteString -> TlsIo cnt ()
write dat = liftIO . flip BS.hPut dat =<< gets tlssHandle
setVersion :: Version -> TlsIo cnt ()
setVersion v = do
tlss <- get
case CT.versionToVersion v of
Just v' -> put tlss { tlssVersion = Just v' }
_ -> throwError "setVersion: Not implemented"
setClientRandom, setServerRandom :: Random -> TlsIo cnt ()
setClientRandom (Random cr) = do
tlss <- get
put $ tlss { tlssClientRandom = Just cr }
setServerRandom (Random sr) = do
tlss <- get
put $ tlss { tlssServerRandom = Just sr }
getClientRandom, getServerRandom :: TlsIo cnt (Maybe BS.ByteString)
getClientRandom = gets tlssClientRandom
getServerRandom = gets tlssServerRandom
getCipherSuite :: TlsIo cnt CipherSuite
getCipherSuite = gets tlssCachedCipherSuite
cacheCipherSuite :: CipherSuite -> TlsIo cnt ()
cacheCipherSuite cs = do
tlss <- get
put $ tlss { tlssCachedCipherSuite = cs }
flushCipherSuite :: Partner -> TlsIo cnt ()
flushCipherSuite p = do
tlss <- get
case p of
Client -> put tlss {
tlssClientWriteCipherSuite = tlssCachedCipherSuite tlss }
Server -> put tlss {
tlssServerWriteCipherSuite = tlssCachedCipherSuite tlss }
encryptRSA :: RSA.PublicKey -> BS.ByteString -> TlsIo cnt BS.ByteString
encryptRSA pub pln = do
g <- gets tlssRandomGen
let (Right e, g') = RSA.encrypt g pub pln
tlss <- get
put tlss { tlssRandomGen = g' }
return e
generateKeys :: BS.ByteString -> TlsIo cnt ()
generateKeys pms = do
-- liftIO $ putStrLn $ "Pre Master Secret: " ++ show pms
mv <- gets tlssVersion
mcr <- gets $ (CT.ClientRandom <$>) . tlssClientRandom
msr <- gets $ (CT.ServerRandom <$>) . tlssServerRandom
mkl <- do
cs <- gets tlssCachedCipherSuite
case cs of
CipherSuite _ AES_128_CBC_SHA -> return 20
CipherSuite _ AES_128_CBC_SHA256 -> return 32
_ -> throwError "TlsIO.generateKeys: error"
case (mv, mcr, msr) of
(Just v, Just cr, Just sr) -> do
let ms = CT.generateMasterSecret v pms cr sr
ems = CT.generateKeyBlock v cr sr ms $
mkl * 2 + 32
[cwmk, swmk, cwk, swk] =
divide [ mkl, mkl, 16, 16 ] ems
-- liftIO . putStrLn $ "KEYS: " ++ show [cwmk, swmk, cwk, swk]
tlss <- get
put $ tlss {
tlssMasterSecret = Just ms,
tlssClientWriteMacKey = Just cwmk,
tlssServerWriteMacKey = Just swmk,
tlssClientWriteKey = Just cwk,
tlssServerWriteKey = Just swk }
_ -> throwError "No version / No (client/server) random"
where
divide [] _ = []
divide (n : ns) bs
| bs == BS.empty = []
| otherwise = let (x, xs) = BS.splitAt n bs in x : divide ns xs
updateHash :: BS.ByteString -> TlsIo cnt ()
updateHash bs = do
tlss@TlsClientState{ tlssSha256Ctx = sha256 } <- get
-- liftIO . putStrLn $ "PRE : " ++ show (SHA256.finalize sha256)
-- liftIO . putStrLn $ show bs
-- liftIO . putStrLn $ "POST: " ++ show (SHA256.finalize $ SHA256.update sha256 bs)
put tlss { tlssSha256Ctx = SHA256.update sha256 bs }
finishedHash :: Partner -> TlsIo cnt BS.ByteString
finishedHash partner = do
mms <- gets tlssMasterSecret
sha256 <- SHA256.finalize <$> gets tlssSha256Ctx
mv <- gets tlssVersion
case (mv, mms) of
(Just CT.TLS12, Just ms) -> return $ case partner of
Client -> CT.generateFinished CT.TLS12 True ms sha256
Server -> CT.generateFinished CT.TLS12 False ms sha256
_ -> throwError "finishedHash: No version / No master secrets"
class SecretKey sk where
sign :: sk -> BS.ByteString -> BS.ByteString
algorithm :: sk -> (HashAlgorithm, SignatureAlgorithm)
instance SecretKey RSA.PrivateKey where
sign sk bd = let
Right hashed = RSA.padSignature
(RSA.public_size $ RSA.private_pub sk)
(RSA.digestToASN1 RSA.hashDescrSHA256 bd) in
RSA.dp Nothing sk hashed
algorithm _ = (HashAlgorithmSha256, SignatureAlgorithmRsa)
instance SecretKey ECDSA.PrivateKey where
sign sk = encodeSignature . fromJust . ECDSA.signWith 4649 sk id
algorithm _ = (HashAlgorithmSha256, SignatureAlgorithmEcdsa)
encodeSignature :: ECDSA.Signature -> BS.ByteString
encodeSignature (ECDSA.Signature r s) =
encodeASN1' DER [Start Sequence, IntVal r, IntVal s, End Sequence]
clientVerifySign :: SecretKey sk => sk -> TlsIo cnt BS.ByteString
clientVerifySign pkys = do
sha256 <- gets $ SHA256.finalize . tlssSha256Ctx
return $ sign pkys sha256
getVsnCsMwkSnMmk :: Partner -> TlsIo cnt (Maybe CT.MSVersion, CipherSuite, Maybe BS.ByteString,
Word64, Maybe BS.ByteString)
getVsnCsMwkSnMmk partner = do
vrsn <- gets tlssVersion
cs <- cipherSuite partner
mwk <- writeKey partner
sn <- sequenceNumber partner
mmk <- macKey partner
return (vrsn, cs, mwk, sn, mmk)
encryptMessage :: Partner ->
ContentType -> Version -> BS.ByteString -> TlsIo cnt BS.ByteString
encryptMessage partner ct v msg = do
(vrsn, cs, mwk, sn, mmk) <- getVsnCsMwkSnMmk partner
gen <- gets tlssRandomGen
mhs <- case cs of
CipherSuite _ AES_128_CBC_SHA -> return $ Just CT.hashSha1
CipherSuite _ AES_128_CBC_SHA256 -> return $ Just CT.hashSha256
CipherSuite KeyExNULL MsgEncNULL -> return Nothing
_ -> throwError "TlsIo.encryptMessage"
case (vrsn, mhs, mwk, mmk) of
(Just CT.TLS12, Just hs, Just wk, Just mk)
-> do let (ret, gen') =
CT.encryptMessage hs gen wk sn mk ct v msg
tlss <- get
put tlss{ tlssRandomGen = gen' }
return ret
(_, Nothing, _, _) -> return msg
_ -> throwError $ "TlsIO.encryptMessage:\n" ++
"\tNo keys or not implemented cipher suite"
decryptMessage :: Partner ->
ContentType -> Version -> BS.ByteString -> TlsIo cnt BS.ByteString
decryptMessage partner ct v enc = do
(vrsn, cs, mwk, sn, mmk) <- getVsnCsMwkSnMmk partner
case (vrsn, cs, mwk, mmk) of
(Just CT.TLS12, CipherSuite _ AES_128_CBC_SHA, Just key, Just mk)
-> do let emsg = CT.decryptMessage CT.hashSha1 key sn mk ct v enc
case emsg of
Right msg -> return msg
Left err -> throwError err
(Just CT.TLS12, CipherSuite _ AES_128_CBC_SHA256, Just key, Just mk)
-> do let emsg = CT.decryptMessage CT.hashSha256 key sn mk ct v enc
case emsg of
Right msg -> return msg
Left err -> throwError err
(_, CipherSuite KeyExNULL MsgEncNULL, _, _) -> return enc
_ -> throwError "TlsIO.decryptMessage: No keys or Bad cipher suite"
cipherSuite :: Partner -> TlsIo cnt CipherSuite
cipherSuite partner = gets $ case partner of
Client -> tlssClientWriteCipherSuite
Server -> tlssServerWriteCipherSuite
writeKey :: Partner -> TlsIo cnt (Maybe BS.ByteString)
writeKey partner = gets $ case partner of
Client -> tlssClientWriteKey
Server -> tlssServerWriteKey
macKey :: Partner -> TlsIo cnt (Maybe BS.ByteString)
macKey partner = gets $ case partner of
Client -> tlssClientWriteMacKey
Server -> tlssServerWriteMacKey
sequenceNumber :: Partner -> TlsIo cnt Word64
sequenceNumber partner = gets $ case partner of
Client -> tlssClientSequenceNumber
Server -> tlssServerSequenceNumber
updateSequenceNumber :: Partner -> TlsIo cnt ()
updateSequenceNumber partner = do
sn <- gets $ case partner of
Client -> tlssClientSequenceNumber
Server -> tlssServerSequenceNumber
tlss <- get
put $ case partner of
Client -> tlss { tlssClientSequenceNumber = succ sn }
Server -> tlss { tlssServerSequenceNumber = succ sn }
updateSequenceNumberSmart :: Partner -> TlsIo cnt ()
updateSequenceNumberSmart partner =
flip when (updateSequenceNumber partner) =<< isCiphered partner
data TlsServer = TlsServer {
tlsVersion :: CT.MSVersion,
tlsCipherSuite :: CipherSuite,
tlsHandle :: Handle,
tlsBuffer :: TVar BS.ByteString,
tlsRandomGen :: TVar SystemRNG,
tlsClientWriteMacKey :: BS.ByteString,
tlsServerWriteMacKey :: BS.ByteString,
tlsClientWriteKey :: BS.ByteString,
tlsServerWriteKey :: BS.ByteString,
tlsClientSequenceNumber :: TVar Word64,
tlsServerSequenceNumber :: TVar Word64
}
tPut :: TlsServer -> BS.ByteString -> IO ()
tPut ts = tPutWithCT ts ContentTypeApplicationData
tPutWithCT :: TlsServer -> ContentType -> BS.ByteString -> IO ()
tPutWithCT ts ct msg = do
hs <- case cs of
CipherSuite _ AES_128_CBC_SHA -> return CT.hashSha1
CipherSuite _ AES_128_CBC_SHA256 -> return CT.hashSha256
_ -> error "TlsIo.tPutWithCT"
ebody <- atomically $ do
gen <- readTVar tvgen
sn <- readTVar tvsn
let (e, gen') = enc hs gen sn
writeTVar tvgen gen'
writeTVar tvsn $ succ sn
return e
BS.hPut h $ BS.concat [
contentTypeToByteString ct,
versionToByteString v,
lenBodyToByteString 2 ebody]
where
cs = tlsCipherSuite ts
h = tlsHandle ts
key = tlsClientWriteKey ts
mk = tlsClientWriteMacKey ts
v = Version 3 3
tvsn = tlsClientSequenceNumber ts
tvgen = tlsRandomGen ts
enc hs gen sn = CT.encryptMessage hs gen key sn mk ct v msg
tGetWhole :: TlsServer -> IO BS.ByteString
tGetWhole ts = do
ret <- tGetWholeWithCT ts
case ret of
(ContentTypeApplicationData, ad) -> return ad
(ContentTypeAlert, "\SOH\NUL") -> do
tPutWithCT ts ContentTypeAlert "\SOH\NUL"
ioError $ mkIOError
eofErrorType "tGetWhole" (Just h) Nothing
_ -> error "not impolemented yet"
where
h = tlsHandle ts
tGetWholeWithCT :: TlsServer -> IO (ContentType, BS.ByteString)
tGetWholeWithCT ts = do
hs <- case cs of
CipherSuite _ AES_128_CBC_SHA -> return CT.hashSha1
CipherSuite _ AES_128_CBC_SHA256 -> return CT.hashSha256
_ -> error "TlsIo.tGetWholeWithCT"
ct <- byteStringToContentType <$> BS.hGet h 1
v <- byteStringToVersion <$> BS.hGet h 2
enc <- BS.hGet h . byteStringToInt =<< BS.hGet h 2
sn <- atomically $ do
n <- readTVar tvsn
writeTVar tvsn $ succ n
return n
case dec hs sn ct v enc of
Right r -> return (ct, r)
Left err -> error err
where
cs = tlsCipherSuite ts
h = tlsHandle ts
key = tlsServerWriteKey ts
mk = tlsServerWriteMacKey ts
tvsn = tlsServerSequenceNumber ts
dec hs sn = CT.decryptMessage hs key sn mk
tGetByte :: TlsServer -> IO Word8
tGetByte ts = do
bfr <- atomically . readTVar $ tlsBuffer ts
if BS.null bfr then do
msg <- tGetWhole ts
atomically $ case BS.uncons msg of
Just (b, bs) -> do
writeTVar (tlsBuffer ts) bs
return b
_ -> error "tGetByte: empty data"
else atomically $ case BS.uncons bfr of
Just (b, bs) -> do
writeTVar (tlsBuffer ts) bs
return b
_ -> error "tGetByte: never occur"
tGet :: TlsServer -> Int -> IO BS.ByteString
tGet ts n = do
bfr <- atomically . readTVar $ tlsBuffer ts
if n <= BS.length bfr then atomically $ do
let (ret, bfr') = BS.splitAt n bfr
writeTVar (tlsBuffer ts) bfr'
return ret
else do msg <- tGetWhole ts
atomically $ writeTVar (tlsBuffer ts) msg
(bfr `BS.append`) <$> tGet ts (n - BS.length bfr)
splitOneLine :: BS.ByteString -> Maybe (BS.ByteString, BS.ByteString)
splitOneLine bs = case ('\r' `BSC.elem` bs, '\n' `BSC.elem` bs) of
(True, _) -> let
(l, ls) = BSC.span (/= '\r') bs
Just ('\r', ls') = BSC.uncons ls in
case BSC.uncons ls' of
Just ('\n', ls'') -> Just (l, ls'')
_ -> Just (l, ls')
(_, True) -> let
(l, ls) = BSC.span (/= '\n') bs
Just ('\n', ls') = BSC.uncons ls in Just (l, ls')
_ -> Nothing
tGetLine :: TlsServer -> IO BS.ByteString
tGetLine ts = do
bfr <- atomically . readTVar $ tlsBuffer ts
case splitOneLine bfr of
Just (l, ls) -> atomically $ do
writeTVar (tlsBuffer ts) ls
return l
_ -> do msg <- tGetWhole ts
atomically $ writeTVar (tlsBuffer ts) msg
(bfr `BS.append`) <$> tGetLine ts
tGetContent :: TlsServer -> IO BS.ByteString
tGetContent ts = do
bfr <- atomically . readTVar $ tlsBuffer ts
if BS.null bfr then tGetWhole ts else atomically $ do
writeTVar (tlsBuffer ts) BS.empty
return bfr
debugPrintKeys :: TlsIo cnt ()
debugPrintKeys = do
Just ms <- gets tlssMasterSecret
Just cwmk <- gets tlssClientWriteMacKey
Just swmk <- gets tlssServerWriteMacKey
Just cwk <- gets tlssClientWriteKey
Just swk <- gets tlssServerWriteKey
-- Just cwi <- gets tlssClientWriteIv
-- Just swi <- gets tlssServerWriteIv
liftIO $ do
putStrLn "### GENERATED KEYS ###"
putStrLn $ "\tMaster Secret : " ++ show ms
putStrLn $ "\tClntWr MAC Key: " ++ showKeySingle cwmk
putStrLn $ "\tSrvrWr MAC Key: " ++ showKeySingle swmk
putStrLn $ "\tClntWr Key : " ++ showKeySingle cwk
putStrLn $ "\tSrvrWr Key : " ++ showKeySingle swk
-- putStrLn $ "\tClntWr IV : " ++ showKeySingle cwi
-- putStrLn $ "\tSrvrWr IV : " ++ showKeySingle swi
tClose :: TlsServer -> IO ()
tClose ts = do
tPutWithCT ts ContentTypeAlert "\SOH\NUL"
tGetWholeWithCT ts >>= \c -> if c /= (ContentTypeAlert, "\SOH\NUL")
then print c else return ()
hClose h
where
h = tlsHandle ts
getRandomGen :: TlsIo cnt SystemRNG
getRandomGen = gets tlssRandomGen
setRandomGen :: SystemRNG -> TlsIo cnt ()
setRandomGen g = do
tlss <- get
put tlss{ tlssRandomGen = g }
| YoshikuniJujo/forest | subprojects/tls-analysis/client/TlsIo.hs | bsd-3-clause | 18,760 | 285 | 18 | 3,437 | 6,477 | 3,268 | 3,209 | 492 | 6 |
-----------------------------------------------------------------------------
-- |
-- Module : Text.ParserCombinators.Parsec.Prim
-- Copyright : (c) Daan Leijen 1999-2001
-- License : BSD-style (see the file libraries/parsec/LICENSE)
--
-- Maintainer : daan@cs.uu.nl
-- Stability : provisional
-- Portability : portable
--
-- The primitive parser combinators.
--
-----------------------------------------------------------------------------
module Text.ParserCombinators.Parsec.Prim
( -- operators: label a parser, alternative
(<?>), (<|>)
-- basic types
, Parser, GenParser
, runParser, parse, parseFromFile, parseTest
-- primitive parsers:
-- instance Functor Parser : fmap
-- instance Monad Parser : return, >>=, fail
-- instance MonadPlus Parser : mzero (pzero), mplus (<|>)
, token, tokens, tokenPrim, tokenPrimEx
, try, label, labels, unexpected, pzero
-- primitive because of space behaviour
, many, skipMany
-- user state manipulation
, getState, setState, updateState
-- state manipulation
, getPosition, setPosition
, getInput, setInput
, getParserState, setParserState
) where
import Prelude
import Text.ParserCombinators.Parsec.Pos
import Text.ParserCombinators.Parsec.Error
import Control.Monad
{-# INLINE parsecMap #-}
{-# INLINE parsecReturn #-}
{-# INLINE parsecBind #-}
{-# INLINE parsecZero #-}
{-# INLINE parsecPlus #-}
{-# INLINE token #-}
{-# INLINE tokenPrim #-}
-----------------------------------------------------------
-- Operators:
-- <?> gives a name to a parser (which is used in error messages)
-- <|> is the choice operator
-----------------------------------------------------------
infix 0 <?>
infixr 1 <|>
(<?>) :: GenParser tok st a -> String -> GenParser tok st a
p <?> msg = label p msg
(<|>) :: GenParser tok st a -> GenParser tok st a -> GenParser tok st a
p1 <|> p2 = mplus p1 p2
-----------------------------------------------------------
-- User state combinators
-----------------------------------------------------------
getState :: GenParser tok st st
getState = do{ state <- getParserState
; return (stateUser state)
}
setState :: st -> GenParser tok st ()
setState st = do{ updateParserState (\(State input pos _) -> State input pos st)
; return ()
}
updateState :: (st -> st) -> GenParser tok st ()
updateState f = do{ updateParserState (\(State input pos user) -> State input pos (f user))
; return ()
}
-----------------------------------------------------------
-- Parser state combinators
-----------------------------------------------------------
getPosition :: GenParser tok st SourcePos
getPosition = do{ state <- getParserState; return (statePos state) }
getInput :: GenParser tok st [tok]
getInput = do{ state <- getParserState; return (stateInput state) }
setPosition :: SourcePos -> GenParser tok st ()
setPosition pos = do{ updateParserState (\(State input _ user) -> State input pos user)
; return ()
}
setInput :: [tok] -> GenParser tok st ()
setInput input = do{ updateParserState (\(State _ pos user) -> State input pos user)
; return ()
}
getParserState :: GenParser tok st (State tok st)
getParserState = updateParserState id
setParserState :: State tok st -> GenParser tok st (State tok st)
setParserState st = updateParserState (const st)
-----------------------------------------------------------
-- Parser definition.
-- GenParser tok st a:
-- General parser for tokens of type "tok",
-- a user state "st" and a result type "a"
-----------------------------------------------------------
type Parser a = GenParser Char () a
newtype GenParser tok st a = Parser (State tok st -> Consumed (Reply tok st a))
runP (Parser p) = p
data Consumed a = Consumed a --input is consumed
| Empty !a --no input is consumed
data Reply tok st a = Ok !a !(State tok st) ParseError --parsing succeeded with "a"
| Error ParseError --parsing failed
data State tok st = State { stateInput :: [tok]
, statePos :: !SourcePos
, stateUser :: !st
}
-----------------------------------------------------------
-- run a parser
-----------------------------------------------------------
parseFromFile :: Parser a -> SourceName -> IO (Either ParseError a)
parseFromFile p fname
= do{ input <- readFile fname
; return (parse p fname input)
}
parseTest :: Show a => GenParser tok () a -> [tok] -> IO ()
parseTest p input
= case (runParser p () "" input) of
Left err -> do{ putStr "parse error at "
; print err
}
Right x -> print x
parse :: GenParser tok () a -> SourceName -> [tok] -> Either ParseError a
parse p name input
= runParser p () name input
runParser :: GenParser tok st a -> st -> SourceName -> [tok] -> Either ParseError a
runParser p st name input
= case parserReply (runP p (State input (initialPos name) st)) of
Ok x _ _ -> Right x
Error err -> Left err
parserReply result
= case result of
Consumed reply -> reply
Empty reply -> reply
-----------------------------------------------------------
-- Functor: fmap
-----------------------------------------------------------
instance Functor (GenParser tok st) where
fmap f p = parsecMap f p
parsecMap :: (a -> b) -> GenParser tok st a -> GenParser tok st b
parsecMap f (Parser p)
= Parser (\state ->
case (p state) of
Consumed reply -> Consumed (mapReply reply)
Empty reply -> Empty (mapReply reply)
)
where
mapReply reply
= case reply of
Ok x state err -> let fx = f x
in seq fx (Ok fx state err)
Error err -> Error err
-----------------------------------------------------------
-- Monad: return, sequence (>>=) and fail
-----------------------------------------------------------
instance Monad (GenParser tok st) where
return x = parsecReturn x
p >>= f = parsecBind p f
fail msg = parsecFail msg
parsecReturn :: a -> GenParser tok st a
parsecReturn x
= Parser (\state -> Empty (Ok x state (unknownError state)))
parsecBind :: GenParser tok st a -> (a -> GenParser tok st b) -> GenParser tok st b
parsecBind (Parser p) f
= Parser (\state ->
case (p state) of
Consumed reply1
-> Consumed $
case (reply1) of
Ok x state1 err1 -> case runP (f x) state1 of
Empty reply2 -> mergeErrorReply err1 reply2
Consumed reply2 -> reply2
Error err1 -> Error err1
Empty reply1
-> case (reply1) of
Ok x state1 err1 -> case runP (f x) state1 of
Empty reply2 -> Empty (mergeErrorReply err1 reply2)
other -> other
Error err1 -> Empty (Error err1)
)
mergeErrorReply err1 reply
= case reply of
Ok x state err2 -> Ok x state (mergeError err1 err2)
Error err2 -> Error (mergeError err1 err2)
parsecFail :: String -> GenParser tok st a
parsecFail msg
= Parser (\state ->
Empty (Error (newErrorMessage (Message msg) (statePos state))))
-----------------------------------------------------------
-- MonadPlus: alternative (mplus) and mzero
-----------------------------------------------------------
instance MonadPlus (GenParser tok st) where
mzero = parsecZero
mplus p1 p2 = parsecPlus p1 p2
pzero :: GenParser tok st a
pzero = parsecZero
parsecZero :: GenParser tok st a
parsecZero
= Parser (\state -> Empty (Error (unknownError state)))
parsecPlus :: GenParser tok st a -> GenParser tok st a -> GenParser tok st a
parsecPlus (Parser p1) (Parser p2)
= Parser (\state ->
case (p1 state) of
Empty (Error err) -> case (p2 state) of
Empty reply -> Empty (mergeErrorReply err reply)
consumed -> consumed
other -> other
)
{-
-- variant that favors a consumed reply over an empty one, even it is not the first alternative.
empty@(Empty reply) -> case reply of
Error err ->
case (p2 state) of
Empty reply -> Empty (mergeErrorReply err reply)
consumed -> consumed
ok ->
case (p2 state) of
Empty reply -> empty
consumed -> consumed
consumed -> consumed
-}
-----------------------------------------------------------
-- Primitive Parsers:
-- try, token(Prim), label, unexpected and updateState
-----------------------------------------------------------
try :: GenParser tok st a -> GenParser tok st a
try (Parser p)
= Parser (\state@(State input pos user) ->
case (p state) of
Consumed (Error err) -> Empty (Error (setErrorPos pos err))
Consumed ok -> Consumed ok -- was: Empty ok
empty -> empty
)
token :: (tok -> String) -> (tok -> SourcePos) -> (tok -> Maybe a) -> GenParser tok st a
token show tokpos test
= tokenPrim show nextpos test
where
nextpos _ _ (tok:toks) = tokpos tok
nextpos _ tok [] = tokpos tok
tokenPrim :: (tok -> String) -> (SourcePos -> tok -> [tok] -> SourcePos) -> (tok -> Maybe a) -> GenParser tok st a
tokenPrim show nextpos test
= tokenPrimEx show nextpos Nothing test
-- | The most primitive token recogniser. The expression @tokenPrimEx show nextpos mbnextstate test@,
-- recognises tokens when @test@ returns @Just x@ (and returns the value @x@). Tokens are shown in
-- error messages using @show@. The position is calculated using @nextpos@, and finally, @mbnextstate@,
-- can hold a function that updates the user state on every token recognised (nice to count tokens :-).
-- The function is packed into a 'Maybe' type for performance reasons.
tokenPrimEx :: (tok -> String) ->
(SourcePos -> tok -> [tok] -> SourcePos) ->
Maybe (SourcePos -> tok -> [tok] -> st -> st) ->
(tok -> Maybe a) ->
GenParser tok st a
tokenPrimEx show nextpos mbNextState test
= case mbNextState of
Nothing
-> Parser (\state@(State input pos user) ->
case input of
(c:cs) -> case test c of
Just x -> let newpos = nextpos pos c cs
newstate = State cs newpos user
in seq newpos $ seq newstate $
Consumed (Ok x newstate (newErrorUnknown newpos))
Nothing -> Empty (sysUnExpectError (show c) pos)
[] -> Empty (sysUnExpectError "" pos)
)
Just nextState
-> Parser (\state@(State input pos user) ->
case input of
(c:cs) -> case test c of
Just x -> let newpos = nextpos pos c cs
newuser = nextState pos c cs user
newstate = State cs newpos newuser
in seq newpos $ seq newstate $
Consumed (Ok x newstate (newErrorUnknown newpos))
Nothing -> Empty (sysUnExpectError (show c) pos)
[] -> Empty (sysUnExpectError "" pos)
)
label :: GenParser tok st a -> String -> GenParser tok st a
label p msg
= labels p [msg]
labels :: GenParser tok st a -> [String] -> GenParser tok st a
labels (Parser p) msgs
= Parser (\state ->
case (p state) of
Empty reply -> Empty $
case (reply) of
Error err -> Error (setExpectErrors err msgs)
Ok x state1 err | errorIsUnknown err -> reply
| otherwise -> Ok x state1 (setExpectErrors err msgs)
other -> other
)
updateParserState :: (State tok st -> State tok st) -> GenParser tok st (State tok st)
updateParserState f
= Parser (\state -> let newstate = f state
in Empty (Ok state newstate (unknownError newstate)))
unexpected :: String -> GenParser tok st a
unexpected msg
= Parser (\state -> Empty (Error (newErrorMessage (UnExpect msg) (statePos state))))
setExpectErrors err [] = setErrorMessage (Expect "") err
setExpectErrors err [msg] = setErrorMessage (Expect msg) err
setExpectErrors err (msg:msgs) = foldr (\msg err -> addErrorMessage (Expect msg) err)
(setErrorMessage (Expect msg) err) msgs
sysUnExpectError msg pos = Error (newErrorMessage (SysUnExpect msg) pos)
unknownError state = newErrorUnknown (statePos state)
-----------------------------------------------------------
-- Parsers unfolded for space:
-- if many and skipMany are not defined as primitives,
-- they will overflow the stack on large inputs
-----------------------------------------------------------
many :: GenParser tok st a -> GenParser tok st [a]
many p
= do{ xs <- manyAccum (:) p
; return (reverse xs)
}
skipMany :: GenParser tok st a -> GenParser tok st ()
skipMany p
= do{ manyAccum (\x xs -> []) p
; return ()
}
manyAccum :: (a -> [a] -> [a]) -> GenParser tok st a -> GenParser tok st [a]
manyAccum accum (Parser p)
= Parser (\state ->
let walk xs state r = case r of
Empty (Error err) -> Ok xs state err
Empty ok -> error "Text.ParserCombinators.Parsec.Prim.many: combinator 'many' is applied to a parser that accepts an empty string."
Consumed (Error err) -> Error err
Consumed (Ok x state' err) -> let ys = accum x xs
in seq ys (walk ys state' (p state'))
in case (p state) of
Empty reply -> case reply of
Ok x state' err -> error "Text.ParserCombinators.Parsec.Prim.many: combinator 'many' is applied to a parser that accepts an empty string."
Error err -> Empty (Ok [] state err)
consumed -> Consumed $ walk [] state consumed)
-----------------------------------------------------------
-- Parsers unfolded for speed:
-- tokens
-----------------------------------------------------------
{- specification of @tokens@:
tokens showss nextposs s
= scan s
where
scan [] = return s
scan (c:cs) = do{ token show nextpos c <?> shows s; scan cs }
show c = shows [c]
nextpos pos c = nextposs pos [c]
-}
tokens :: Eq tok => ([tok] -> String) -> (SourcePos -> [tok] -> SourcePos) -> [tok] -> GenParser tok st [tok]
tokens shows nextposs s
= Parser (\state@(State input pos user) ->
let
ok cs = let newpos = nextposs pos s
newstate = State cs newpos user
in seq newpos $ seq newstate $
(Ok s newstate (newErrorUnknown newpos))
errEof = Error (setErrorMessage (Expect (shows s))
(newErrorMessage (SysUnExpect "") pos))
errExpect c = Error (setErrorMessage (Expect (shows s))
(newErrorMessage (SysUnExpect (shows [c])) pos))
walk [] cs = ok cs
walk xs [] = errEof
walk (x:xs) (c:cs)| x == c = walk xs cs
| otherwise = errExpect c
walk1 [] cs = Empty (ok cs)
walk1 xs [] = Empty (errEof)
walk1 (x:xs) (c:cs)| x == c = Consumed (walk xs cs)
| otherwise = Empty (errExpect c)
in walk1 s input)
| OS2World/DEV-UTIL-HUGS | libraries/Text/ParserCombinators/Parsec/Prim.hs | bsd-3-clause | 17,579 | 4 | 24 | 6,436 | 4,367 | 2,212 | 2,155 | 267 | 6 |
--Basically a brute force attempt. For larger numbers, say where a+b+c=10000, this will take a long long time. At 1000 though, it takes a minute or so
triplet n = [(x,y,z) | x <- [1..(n-1)], y <- [1..(n-x)], z <- [1..(n-x-y)], x+y+z == n, z>y, z>x, y>x, x^2+y^2==z^2]
problem9 = triplet 1000
| thomas-oo/projectEulerHaskell | src/Problem9.hs | bsd-3-clause | 293 | 0 | 11 | 53 | 158 | 84 | 74 | 2 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveGeneric #-}
module MidiRhythm.NotePress (
Duration(..),
Press(..),
NotePress(..),
Velocity(..),
Pitch(..),
ElapsedTime(..),
PressCount(..),
) where
import qualified Numeric.NonNegative.Wrapper as NonNeg
import GHC.Generics
newtype ElapsedTime = ElapsedTime NonNeg.Integer
deriving (Show, Eq, Ord, Num, Integral, Real, Enum)
newtype Velocity = Velocity NonNeg.Int
deriving (Show, Eq, Ord, Num, Integral, Real, Enum)
type Duration = ElapsedTime
newtype Pitch = Pitch NonNeg.Int
deriving (Show, Eq, Ord, Num, Integral, Real, Enum)
newtype PressCount = PressCount NonNeg.Int
deriving (Show, Eq, Ord, Num, Integral, Real, Enum)
data Press = Press ElapsedTime Velocity Duration deriving (Show, Eq, Ord)
data NotePress = NotePress {
notePressTime :: ElapsedTime,
notePressVelocity :: Velocity,
notePressDuration :: Duration,
notePressPitch :: Pitch
} deriving (Show, Eq, Ord, Generic)
| a10nik/midiRhythm | src/MidiRhythm/NotePress.hs | bsd-3-clause | 979 | 0 | 8 | 168 | 323 | 190 | 133 | 27 | 0 |
{-# LANGUAGE StandaloneDeriving
#-}
import Data.List (concatMap, nub)
import Data.Ratio
import Data.ByteString.Char8 (pack)
import Test.QuickCheck
import Data.Trie
import qualified Data.ByteString.UTF8 as UTF8
import qualified Text.JSONb as JSONb
prop_structures_parse = samples structure_tests
samples tests = forAll (elements tests) with_classifiers
where
with_classifiers :: (String, JSONb.JSON, [String]) -> Property
with_classifiers = compound (property . array_parse) classifiers
where
compound = foldl (flip ($))
array_parse (s, j, info) = rt s == Right j
classifiers = (fmap classifier . nub . concatMap third) tests
where
third (_,_,t) = t
classifier string p x = classify (string `elem` third x) string $ p x
prop_integer_round_trip :: Integer -> Property
prop_integer_round_trip n = collect bin $ case (rt . show) n of
Right (JSONb.Number r) -> r == fromIntegral n
_ -> False
where
bin
| n == 0 = Bounds (Open (-1)) (Open 1)
| n >= 1 && n < 100 = Bounds (Closed 1) (Open 100)
| n > -100 && n <= -1 = Bounds (Open (-100)) (Closed (-1))
| n <= -100 = Bounds Infinite (Closed (-100))
| n >= -100 = Bounds (Closed (100)) Infinite
prop_double_round_trip :: Double -> Property
prop_double_round_trip n = collect bin $ case (rt . show) n of
Right (JSONb.Number r) -> fromRational r == n
_ -> False
where
bin
| n < 1 && n > -1 = Bounds (Open (-1)) (Open 1)
| n >= 1 && n < 100 = Bounds (Closed 1) (Open 100)
| n > -100 && n <= -1 = Bounds (Open (-100)) (Closed (-1))
| n <= -100 = Bounds Infinite (Closed (-100))
| n >= -100 = Bounds (Closed (100)) Infinite
prop_string_round_trip s = high . escapes $ case round_trip bytes of
Right (JSONb.String b) -> bytes == b
_ -> False
where
bytes = UTF8.fromString s
round_trip = JSONb.decode . JSONb.encode JSONb.Compact . JSONb.String
high = classify (any (> '\x7f') s) "above ASCII"
escapes = classify (any JSONb.escaped s) "escaped chars"
data Bounds n where
Bounds :: (Show n, Num n) => Bound n -> Bound n -> Bounds n
instance (Show n) => Show (Bounds n) where
show (Bounds l r) = case (l, r) of
(Open l, Open r) -> "(" ++ show l ++ ".." ++ show r ++ ")"
(Closed l, Closed r) -> "[" ++ show l ++ ".." ++ show r ++ "]"
(Closed l, Open r) -> "[" ++ show l ++ ".." ++ show r ++ ")"
(Open l, Closed r) -> "(" ++ show l ++ ".." ++ show r ++ "]"
(Closed l, Infinite) -> "[" ++ show l ++ ".."
(Infinite, Closed r) -> ".." ++ show r ++ "]"
(Open l, Infinite) -> "(" ++ show l ++ ".."
(Infinite, Open r) -> ".." ++ show r ++ ")"
(Infinite, Infinite) -> ".."
data Bound n where
Open :: (Show n, Num n) => n -> Bound n
Closed :: (Show n, Num n) => n -> Bound n
Infinite :: (Show n, Num n) => Bound n
rt = JSONb.decode . pack . (++ " ")
{- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
We have to add a space so that the number parser terminates.
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -}
structure_tests =
[ ( "[ 7, 6 ]", (JSONb.Array . fmap JSONb.Number) [7, 6]
, ["array", "excessive spacing", "integers"] )
, ( "[]", JSONb.Array []
, ["array", "compact spacing", "empty"] )
, ( "[ ]", JSONb.Array []
, ["array", "normal spacing", "empty"] )
, ( "[7,6]", (JSONb.Array . fmap JSONb.Number) [7, 6]
, ["array", "compact spacing", "integers"] )
, ( "[7.6, 21]", (JSONb.Array . fmap JSONb.Number) [7.6, 21.0]
, ["array", "normal spacing", "floats"] )
, ( "[22.0 ,7.6,]", (JSONb.Array . fmap JSONb.Number) [22, 7.6]
, ["array", "weird comma spacing", "extra comma", "floats"] )
, ( "[\"22.0\" ,7.6,]"
, JSONb.Array [(JSONb.String . pack) "22.0", JSONb.Number 7.6]
, ["array", "weird comma spacing", "extra comma", "floats", "strings"] )
, ( "{ \"ixion\":6 }"
, (JSONb.Object . fromList) [(pack "ixion", JSONb.Number 6)]
, ["object", "no commas", "integers"] )
, ( "{ \"Ack\":\"Success\" ,\"Build\" :\"e605_core_Bundled_8000231_R1\"}"
, (JSONb.Object . fromList)
[ (pack "Ack", JSONb.String (pack "Success"))
, ( pack "Build"
, JSONb.String (pack "e605_core_Bundled_8000231_R1") ) ]
, ["object", "random spacing", "strings"] )
, ( "{\n\"Ack\"\n:\n\"Success\" , \"Build\":\"e605_core_Bundled_8000231_R1\"}"
, (JSONb.Object . fromList)
[ (pack "Ack", JSONb.String (pack "Success"))
, ( pack "Build"
, JSONb.String (pack "e605_core_Bundled_8000231_R1") ) ]
, ["object", "newlines", "strings"] )
, ( "{}", JSONb.Object empty
, ["object", "compact spacing", "empty"] )
, ( "{ }", JSONb.Object empty
, ["object", "normal spacing", "empty"] )
, ( "{\"Ack\":\"Success\",\"Build\":\"e605_core_Bundled_8000231_R1\"}"
, (JSONb.Object . fromList)
[ (pack "Ack", JSONb.String (pack "Success"))
, ( pack "Build"
, JSONb.String (pack "e605_core_Bundled_8000231_R1") ) ]
, ["object", "compact spacing", "strings"] )
]
| solidsnack/JSONb | test/SimpleUnits.hs | bsd-3-clause | 5,575 | 0 | 13 | 1,726 | 2,007 | 1,066 | 941 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.