code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-----------------------------------------------------------------------------
-- |
-- Module : System.Taffybar.Support.PagerHints
-- Copyright : (c) José A. Romero L.
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : José A. Romero L. <escherdragon@gmail.com>
-- Stability : unstable
-- Portability : unportable
--
-- Complements the "XMonad.Hooks.EwmhDesktops" with two additional hints
-- not contemplated by the EWMH standard:
--
-- [@_XMONAD_CURRENT_LAYOUT@] Contains a UTF-8 string with the name of the
-- windows layout currently used in the active workspace.
--
-- [@_XMONAD_VISIBLE_WORKSPACES@] Contains a list of UTF-8 strings with the
-- names of all the workspaces that are currently showed in a secondary
-- display, or an empty list if in the current installation there's only
-- one monitor.
--
-- The first hint can be set directly on the root window of the default
-- display, or indirectly via X11 events with an atom of the same
-- name. This allows both to track any changes that occur in the layout of
-- the current workspace, as well as to have it changed automatically by
-- just sending a custom event to the hook.
--
-- The second one should be considered read-only, and is set every time
-- XMonad calls its log hooks.
--
-----------------------------------------------------------------------------
module System.Taffybar.Support.PagerHints (
-- * Usage
-- $usage
pagerHints
) where
import Codec.Binary.UTF8.String (encode)
import Control.Monad
import Data.Monoid
import Foreign.C.Types (CInt)
import XMonad
import qualified XMonad.StackSet as W
-- $usage
--
-- You can use this module with the following in your @xmonad.hs@ file:
--
-- > import System.Taffybar.Support.PagerHints (pagerHints)
-- >
-- > main = xmonad $ ewmh $ pagerHints $ defaultConfig
-- > ...
-- | The \"Current Layout\" custom hint.
xLayoutProp :: X Atom
xLayoutProp = getAtom "_XMONAD_CURRENT_LAYOUT"
-- | The \"Visible Workspaces\" custom hint.
xVisibleProp :: X Atom
xVisibleProp = getAtom "_XMONAD_VISIBLE_WORKSPACES"
-- | Add support for the \"Current Layout\" and \"Visible Workspaces\" custom
-- hints to the given config.
pagerHints :: XConfig a -> XConfig a
pagerHints c = c { handleEventHook = handleEventHook c +++ pagerHintsEventHook
, logHook = logHook c +++ pagerHintsLogHook }
where x +++ y = x `mappend` y
-- | Update the current values of both custom hints.
pagerHintsLogHook :: X ()
pagerHintsLogHook = do
withWindowSet
(setCurrentLayout . description . W.layout . W.workspace . W.current)
withWindowSet
(setVisibleWorkspaces . map (W.tag . W.workspace) . W.visible)
-- | Set the value of the \"Current Layout\" custom hint to the one given.
setCurrentLayout :: String -> X ()
setCurrentLayout l = withDisplay $ \dpy -> do
r <- asks theRoot
a <- xLayoutProp
c <- getAtom "UTF8_STRING"
let l' = map fromIntegral (encode l)
io $ changeProperty8 dpy r a c propModeReplace l'
-- | Set the value of the \"Visible Workspaces\" hint to the one given.
setVisibleWorkspaces :: [String] -> X ()
setVisibleWorkspaces vis = withDisplay $ \dpy -> do
r <- asks theRoot
a <- xVisibleProp
c <- getAtom "UTF8_STRING"
let vis' = map fromIntegral $ concatMap ((++[0]) . encode) vis
io $ changeProperty8 dpy r a c propModeReplace vis'
-- | Handle all \"Current Layout\" events received from pager widgets, and
-- set the current layout accordingly.
pagerHintsEventHook :: Event -> X All
pagerHintsEventHook ClientMessageEvent {
ev_message_type = mt,
ev_data = d
} = withWindowSet $ \_ -> do
a <- xLayoutProp
when (mt == a) $ sendLayoutMessage d
return (All True)
pagerHintsEventHook _ = return (All True)
-- | Request a change in the current layout by sending an internal message
-- to XMonad.
sendLayoutMessage :: [CInt] -> X ()
sendLayoutMessage evData = case evData of
[] -> return ()
x:_ -> if x < 0
then sendMessage FirstLayout
else sendMessage NextLayout
| teleshoes/taffybar | src/System/Taffybar/Support/PagerHints.hs | bsd-3-clause | 3,992 | 0 | 17 | 732 | 674 | 366 | 308 | 51 | 3 |
-- Monadic command line checkers..
-- @2013 Angel Alvarez
module OptsCheck where
-- Main module needed imports
import Control.Exception (SomeException,evaluate,try)
import Control.Monad(foldM,liftM,ap)
import Control.Monad.IO.Class
import Control.Monad.Trans.Either
import Data.List (find)
import Data.Maybe ( fromMaybe )
import Data.Either
import Text.Show.Functions
import System.Environment
import System.Console.GetOpt
import System.Directory ( doesDirectoryExist, doesFileExist )
import System.FilePath
-- Record for storing cmdline options
data Options = Options
{ optDump :: Bool
, optModules :: [(String,(Options-> IO()))]
, optMode :: Maybe (Options->IO())
, optVerbose :: Bool
, optShowVersion :: Bool
, optOutput :: Maybe FilePath
, optDataDir :: Maybe FilePath
, optInput :: [FilePath]
, optTemperature :: Maybe Double
} deriving (Show)
-- An EitherT container to store parsed opts from commandline or error messages
type OptsResult = EitherT String IO Options
-- An Opts filter runnng in the EitherT IO Stack
type OptsFilter = ( Options -> OptsResult )
-- A Policy describing a command line options with a checking filter
type OptsPolicy = OptDescr OptsFilter
-- =============================================== Options checking ==============================================
-- progOpts: getOpt args processor that also checks semantically getopt results or bang in front of the user
-- Upon checking with getopt this function gets a list of lambdas representing semantical checks
-- on every cmdline switch, as an example; we check for input file presence and the check that either data
-- director is a valid one and input file exists and is readable. Those checks are performed by
-- filename_check, check_datadir and check_input_file respectively. These funs are stored
-- in the Options structure that getopt uses.
-- We use a EitherT transformer to combine Either chaining with arbitrary IO actions needed during checking
-- ===============================================================================================================
progOpts :: [String] -> Options -> [OptsPolicy] -> OptsResult
progOpts args defaultOptions acceptedOptions =
case getOpt RequireOrder acceptedOptions args of
(funs,[],[]) -> do
left "input file(s) missing"
(funs,filenames,[]) -> do
resultOfFuns <- foldl (>>=) (return defaultOptions) funs -- Perform monadic checkings upon getOpt supplied functions
foldM check_input_file resultOfFuns $ reverse filenames -- Now check if all the input files exist and are accesible
(_,_,errs) -> do
left ( concat errs )
-- =============================================== Monadic Options checkers =======================================
-- getOpt will partially apply against the supplied argument do we can just come over the options record
-- Who we are?, sort of alien outbreak?
check_version :: Options -> OptsResult
check_version optsR = return $ optsR { optShowVersion = True }
-- help message, dont panic, we are just not going anywhere after showing the help
check_help :: Options -> OptsResult
check_help _ = left "Command line help requested"
--check supplied input files exist or bang if any is not.
check_input_file :: Options -> String -> OptsResult
check_input_file optsR@Options { optInput = files , optDataDir = dataDir } filename = do
test <- liftIO $ filename_check dataDir filename
case test of
True -> return $ (optsR { optInput = filename : files })
False -> left $ "input file "++ filename ++ " not readable"
where
filename_check :: Maybe FilePath -> FilePath -> IO Bool --check file with or without data directory
filename_check (Just datadir) filename = doesFileExist $ combine datadir filename
filename_check Nothing filename = doesFileExist filename
-- User passed some directory, we make sure this dir exits so file will matched against it
check_data_dir :: String -> Options -> OptsResult
check_data_dir dir optsR = do
test <- liftIO $ doesDirectoryExist dir
case test of
True -> return $ optsR { optDataDir = Just dir }
False -> left ( "Data directory " ++ dir ++ " does not exist" )
-- check user wants verbosity level increased
check_verbosity :: Options -> OptsResult
check_verbosity optsR = return $ optsR { optVerbose = True }
-- check mode of operation. A list of modules is provided in the options record
check_operation_mode :: String -> Options -> OptsResult
check_operation_mode mode optsR@Options { optModules = modules } = do
return $ optsR { optMode = selectedModule }
where
selectedModule = case (findmodule mode modules) of
Just (_,fun) -> Just fun
Nothing -> Nothing
findmodule :: String -> [(String, (Options-> IO()))] -> Maybe (String,(Options -> IO ()))
findmodule mode = find ((== mode ).fst )
-- dump either options or errors as we get passthrought
check_dump_options :: Options -> OptsResult
check_dump_options optsR = do
liftIO $ putStrLn $ "\n\nOptions dumping selected record: \n\t" ++ show optsR ++ "\n"
return $ optsR { optDump =True }
-- | Optional temperature argument take from the command line
check_temperature :: String -> Options -> OptsResult
check_temperature temp optsR = do
r <- liftIO (try (evaluate (read temp)) :: IO (Either SomeException Double))
case r of
Right val -> if val < 0 then error "Temperature must be a positive number"
else return $ optsR { optTemperature = Just val }
Left _ -> left $ "Wrong temperature value: " ++ temp
| felipeZ/Dynamics | src/OptsCheck.hs | bsd-3-clause | 5,822 | 0 | 14 | 1,272 | 1,123 | 614 | 509 | 77 | 3 |
module Main where
import Tandoori.GHC
import Tandoori.GHC.Parse
import Tandoori.GHC.Scope
import GHC
import Outputable
import IOEnv
import System.Environment
import Tandoori.Typing.Infer
import Tandoori.Typing.Show
typecheckMod mod = runDyn $ do
env <- getSession
(limports, ltydecls, group) <- liftIO $ runScope env mod
return $ infer (map unLoc ltydecls) group
main' [src_filename] = do mod <- parseMod src_filename
(c, errors) <- typecheckMod mod
if not(null errors)
then mapM_ (\ error -> printErrs $ ppr error $ mkErrStyle neverQualify) errors
else return ()
case c of
Just (ctxt, m) -> printCtxt ctxt
Nothing -> return ()
return c
main' _ = error "Usage: tandoori filename.hs"
main = do args <- getArgs
main' args
| bitemyapp/tandoori | src/main.hs | bsd-3-clause | 1,106 | 0 | 13 | 494 | 271 | 137 | 134 | 26 | 3 |
module PigLatin (translate) where
import Data.Char (isLetter)
import Data.List (isSuffixOf)
consonantCluster :: String -> (String, String)
consonantCluster = qu . break (`elem` "aeiou")
where qu (cs, ('u':rest)) | "q" `isSuffixOf` cs = (cs ++ "u", rest)
qu pair = pair
translateWord :: String -> String
translateWord w0 = concat [before, w, cs, "ay", after]
where
(before, w1) = break isLetter w0
(w2, after) = span isLetter w1
(cs, w) = consonantCluster w2
translate :: String -> String
translate = unwords . map translateWord . words
| pminten/xhaskell | pig-latin/example.hs | mit | 569 | 0 | 10 | 118 | 226 | 127 | 99 | 14 | 2 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module GHC.Int (module M) where
import "base" GHC.Int as M
| Ye-Yong-Chi/codeworld | codeworld-base/src/GHC/Int.hs | apache-2.0 | 729 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
{-# LANGUAGE Rank2Types #-}
-- | Internal types to the library.
module Stack.Types.Internal where
import Control.Concurrent.MVar
import Control.Monad.Logger (LogLevel)
import Data.Monoid.Extra
import Data.Text (Text)
import Lens.Micro
import Network.HTTP.Client.Conduit (Manager,HasHttpManager(..))
import Stack.Types.Config
-- | Monadic environment.
data Env config =
Env {envConfig :: !config
,envLogLevel :: !LogLevel
,envTerminal :: !Bool
,envReExec :: !Bool
,envManager :: !Manager
,envSticky :: !Sticky
,envSupportsUnicode :: !Bool}
instance HasStackRoot config => HasStackRoot (Env config) where
getStackRoot = getStackRoot . envConfig
instance HasPlatform config => HasPlatform (Env config) where
getPlatform = getPlatform . envConfig
getPlatformVariant = getPlatformVariant . envConfig
instance HasGHCVariant config => HasGHCVariant (Env config) where
getGHCVariant = getGHCVariant . envConfig
instance HasConfig config => HasConfig (Env config) where
getConfig = getConfig . envConfig
instance HasBuildConfig config => HasBuildConfig (Env config) where
getBuildConfig = getBuildConfig . envConfig
instance HasEnvConfig config => HasEnvConfig (Env config) where
getEnvConfig = getEnvConfig . envConfig
instance HasHttpManager (Env config) where
getHttpManager = envManager
class HasLogLevel r where
getLogLevel :: r -> LogLevel
instance HasLogLevel (Env config) where
getLogLevel = envLogLevel
instance HasLogLevel LogLevel where
getLogLevel = id
class HasTerminal r where
getTerminal :: r -> Bool
instance HasTerminal (Env config) where
getTerminal = envTerminal
class HasReExec r where
getReExec :: r -> Bool
instance HasReExec (Env config) where
getReExec = envReExec
class HasSupportsUnicode r where
getSupportsUnicode :: r -> Bool
instance HasSupportsUnicode (Env config) where
getSupportsUnicode = envSupportsUnicode
newtype Sticky = Sticky
{ unSticky :: Maybe (MVar (Maybe Text))
}
class HasSticky r where
getSticky :: r -> Sticky
instance HasSticky (Env config) where
getSticky = envSticky
envEnvConfig :: Lens' (Env EnvConfig) EnvConfig
envEnvConfig = lens (envConfig)
(\s t -> s {envConfig = t})
buildOptsMonoidHaddock :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidHaddock = lens (getFirst . buildMonoidHaddock)
(\buildMonoid t -> buildMonoid {buildMonoidHaddock = First t})
buildOptsMonoidTests :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidTests = lens (getFirst . buildMonoidTests)
(\buildMonoid t -> buildMonoid {buildMonoidTests = First t})
buildOptsMonoidBenchmarks :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidBenchmarks = lens (getFirst . buildMonoidBenchmarks)
(\buildMonoid t -> buildMonoid {buildMonoidBenchmarks = First t})
buildOptsMonoidInstallExes :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidInstallExes =
lens (getFirst . buildMonoidInstallExes)
(\buildMonoid t -> buildMonoid {buildMonoidInstallExes = First t})
buildOptsInstallExes :: Lens' BuildOpts Bool
buildOptsInstallExes =
lens (boptsInstallExes)
(\bopts t -> bopts {boptsInstallExes = t})
envConfigBuildOpts :: Lens' EnvConfig BuildOpts
envConfigBuildOpts =
lens
(\envCfg -> configBuild (bcConfig (envConfigBuildConfig envCfg)))
(\envCfg bopts ->
envCfg
{ envConfigBuildConfig = (envConfigBuildConfig envCfg)
{ bcConfig = (bcConfig (envConfigBuildConfig envCfg))
{ configBuild = bopts
}
}
})
globalOptsBuildOptsMonoid :: Lens' GlobalOpts BuildOptsMonoid
globalOptsBuildOptsMonoid =
lens
(\globalOpts ->
configMonoidBuildOpts
(globalConfigMonoid globalOpts))
(\globalOpts boptsMonoid ->
globalOpts
{ globalConfigMonoid = (globalConfigMonoid globalOpts)
{ configMonoidBuildOpts = boptsMonoid
}
})
| phadej/stack | src/Stack/Types/Internal.hs | bsd-3-clause | 4,124 | 0 | 16 | 937 | 1,034 | 561 | 473 | 109 | 1 |
module SwapArgsSpec (main, spec) where
import Test.Hspec
-- import qualified GHC as GHC
-- import qualified GhcMonad as GHC
-- import qualified RdrName as GHC
-- import qualified SrcLoc as GHC
import Language.Haskell.Refact.Refactoring.SwapArgs
import TestUtils
import System.Directory
-- ---------------------------------------------------------------------
main :: IO ()
main = do
-- setLogger
hspec spec
spec :: Spec
spec = do
describe "swapArgs" $ do
it "checks for that an identifier is selected" $ do
res <- catchException (ct $ swapArgs defaultTestSettings testOptions ["./SwapArgs/B.hs","4","1"])
-- let res = "foo"
(show res) `shouldBe` "Just \"Incorrect identifier selected!\""
it "swaps arguments for a definition at the top level" $ do
r <- ct $ swapArgs defaultTestSettings testOptions ["./SwapArgs/B.hs","9","1"]
-- r <- ct $ swapArgs logTestSettings testOptions ["./SwapArgs/B.hs","9","1"]
r' <- ct $ mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"SwapArgs/B.hs\"]"
pendingWith "need to complete this"
-- diff <- compareFiles "./test/testdata/SwapArgs/B.refactored.hs"
-- "./test/testdata/SwapArgs/B.hs.expected"
-- diff `shouldBe` []
-- ---------------------------------------------------------------------
-- Helper functions
| RefactoringTools/HaRe | test/SwapArgsSpec.hs | bsd-3-clause | 1,382 | 0 | 18 | 275 | 219 | 119 | 100 | 19 | 1 |
module PackageTests.OrderFlags.Check where
import Test.Tasty.HUnit
import PackageTests.PackageTester
import System.FilePath
import Control.Exception
#if !MIN_VERSION_base(4,6,0)
import Prelude hiding (catch)
#endif
suite :: FilePath -> Assertion
suite ghcPath = do
let spec = PackageSpec
{ directory = "PackageTests" </> "OrderFlags"
, configOpts = []
, distPref = Nothing
}
result <- cabal_build spec ghcPath
do
assertEqual "cabal build should succeed - see test-log.txt" True (successful result)
`catch` \exc -> do
putStrLn $ "Cabal result was "++show result
throwIO (exc :: SomeException)
| Helkafen/cabal | Cabal/tests/PackageTests/OrderFlags/Check.hs | bsd-3-clause | 685 | 0 | 12 | 171 | 159 | 87 | 72 | 18 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Core.Runtime.Tests
( tests
) where
--------------------------------------------------------------------------------
import qualified Data.ByteString as B
import System.FilePath ((</>))
import Test.Framework (Test, testGroup)
import Test.HUnit (Assertion, (@?=))
--------------------------------------------------------------------------------
import Hakyll
import qualified Hakyll.Core.Logger as Logger
import Hakyll.Core.Runtime
import TestSuite.Util
--------------------------------------------------------------------------------
tests :: Test
tests = testGroup "Hakyll.Core.Runtime.Tests" $
fromAssertions "run" [case01, case02]
--------------------------------------------------------------------------------
case01 :: Assertion
case01 = do
logger <- Logger.new Logger.Error
_ <- run testConfiguration logger $ do
match "images/*" $ do
route idRoute
compile copyFileCompiler
match "*.md" $ do
route $ setExtension "html"
compile $ do
getResourceBody
>>= saveSnapshot "raw"
>>= renderPandoc
create ["bodies.txt"] $ do
route idRoute
compile $ do
items <- loadAllSnapshots "*.md" "raw"
makeItem $ concat $ map itemBody (items :: [Item String])
favicon <- B.readFile $
providerDirectory testConfiguration </> "images/favicon.ico"
favicon' <- B.readFile $
destinationDirectory testConfiguration </> "images/favicon.ico"
favicon @?= favicon'
example <- readFile $
destinationDirectory testConfiguration </> "example.html"
lines example @?= ["<p>This is an example.</p>"]
bodies <- readFile $ destinationDirectory testConfiguration </> "bodies.txt"
head (lines bodies) @?= "This is an example."
cleanTestEnv
--------------------------------------------------------------------------------
case02 :: Assertion
case02 = do
logger <- Logger.new Logger.Error
_ <- run testConfiguration logger $ do
match "images/favicon.ico" $ do
route $ gsubRoute "images/" (const "")
compile $ makeItem ("Test" :: String)
match "images/**" $ do
route idRoute
compile copyFileCompiler
favicon <- readFile $
destinationDirectory testConfiguration </> "favicon.ico"
favicon @?= "Test"
cleanTestEnv
| Minoru/hakyll | tests/Hakyll/Core/Runtime/Tests.hs | bsd-3-clause | 2,676 | 0 | 21 | 697 | 537 | 263 | 274 | 57 | 1 |
module SvgTests.BuilderTests where
import Svg.Builder
import Test.QuickCheck
prop1_sanitizeId str =
let sanitized = sanitizeId str
illegals = map (\c -> not $ elem c ",()/<>% ") sanitized
allTrue = and illegals
in allTrue
| Ian-Stewart-Binks/courseography | hs/SvgTests/BuilderTests.hs | gpl-3.0 | 250 | 0 | 13 | 61 | 73 | 38 | 35 | 8 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>Forced Browse Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Sadržaj</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Traži</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriti</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/bruteforce/src/main/javahelp/org/zaproxy/zap/extension/bruteforce/resources/help_bs_BA/helpset_bs_BA.hs | apache-2.0 | 965 | 77 | 67 | 157 | 419 | 211 | 208 | -1 | -1 |
module Test1 where
f = e1 + e2
where
e1 = 1 + 2
e2 = 1 - 2
| kmate/HaRe | old/testing/refacSlicing/Test1.hs | bsd-3-clause | 94 | 0 | 7 | 54 | 33 | 19 | 14 | 4 | 1 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, TypeSynonymInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Accordion
-- Copyright : (c) glasser@mit.edu
-- License : BSD
--
-- Maintainer : glasser@mit.edu
-- Stability : stable
-- Portability : unportable
--
-- LayoutClass that puts non-focused windows in ribbons at the top and bottom
-- of the screen.
-----------------------------------------------------------------------------
module XMonad.Layout.Accordion (
-- * Usage
-- $usage
Accordion(Accordion)) where
import XMonad
import qualified XMonad.StackSet as W
import Data.Ratio
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Layout.Accordion
--
-- Then edit your @layoutHook@ by adding the Accordion layout:
--
-- > myLayout = Accordion ||| Full ||| etc..
-- > main = xmonad defaultConfig { layoutHook = myLayout }
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
data Accordion a = Accordion deriving ( Read, Show )
instance LayoutClass Accordion Window where
pureLayout _ sc ws = zip ups tops ++ [(W.focus ws, mainPane)] ++ zip dns bottoms
where
ups = reverse $ W.up ws
dns = W.down ws
(top, allButTop) = splitVerticallyBy (1%8 :: Ratio Int) sc
(center, bottom) = splitVerticallyBy (6%7 :: Ratio Int) allButTop
(allButBottom, _) = splitVerticallyBy (7%8 :: Ratio Int) sc
mainPane | ups /= [] && dns /= [] = center
| ups /= [] = allButTop
| dns /= [] = allButBottom
| otherwise = sc
tops = if ups /= [] then splitVertically (length ups) top else []
bottoms = if dns /= [] then splitVertically (length dns) bottom else []
| adinapoli/xmonad-contrib | XMonad/Layout/Accordion.hs | bsd-3-clause | 1,949 | 0 | 14 | 464 | 380 | 215 | 165 | 23 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Tests.JSON where
import Haste
import Haste.JSON
import Haste.Serialize
import Control.Applicative
data ABC = A | B | C
deriving (Show, Read)
data Foo = Foo {
string :: String,
number :: Int,
bool :: Bool,
nested :: Either String Foo,
enum :: ABC
} deriving Show
instance Serialize ABC where
toJSON = Str . toJSString . show
parseJSON (Str s) = case fromJSString s of Just s' -> return $ read s'
instance Serialize Foo where
toJSON (Foo s n b nest e) = Dict [
("string", toJSON s),
("number", toJSON n),
("bool", toJSON b),
("nested", toJSON nest),
("enum", toJSON e)
]
parseJSON j = Foo <$> j .: "string"
<*> j .: "number"
<*> j .: "bool"
<*> j .: "nested"
<*> j .: "enum"
foo :: Foo
foo = Foo {
string = "\"hälsena\"",
number = -42,
bool = False,
nested = Right (foo {nested = Left "nope"}),
enum = B
}
fooStr :: JSString
fooStr = encodeJSON $ toJSON foo
fooAgain :: Either String Foo
fooAgain = decodeJSON fooStr >>= fromJSON
runTest :: IO ()
runTest = do
print foo
putStrLn ""
print fooStr
print $ decodeJSON fooStr
putStrLn ""
print fooAgain
| beni55/haste-compiler | Tests/JSON.hs | bsd-3-clause | 1,304 | 0 | 15 | 415 | 450 | 241 | 209 | 49 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hr-HR">
<title>AJAX Spider | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | msrader/zap-extensions | src/org/zaproxy/zap/extension/spiderAjax/resources/help_hr_HR/helpset_hr_HR.hs | apache-2.0 | 974 | 80 | 66 | 160 | 415 | 210 | 205 | -1 | -1 |
{-# LANGUAGE TypeFamilies #-}
-- This is actually perfectly ok!
module NonLinearSigErr where
type family E a b
type instance E a (a :: *) = [a]
| urbanslug/ghc | testsuite/tests/indexed-types/should_fail/NonLinearSigErr.hs | bsd-3-clause | 147 | 0 | 6 | 30 | 34 | 23 | 11 | 4 | 0 |
-- !!! Test exceptions in a foreign import "wrapper"
import Foreign
import Foreign.C
import System.Exit
type IOF = IO ()
foreign import ccall "wrapper" wrap_f_io :: IOF -> IO (FunPtr IOF)
foreign import ccall "dynamic" call_io :: FunPtr IOF -> IOF
mk_error = error "this is an error"
main = do f <- wrap_f_io mk_error; call_io f
| holzensp/ghc | testsuite/tests/ffi/should_run/ffi008.hs | bsd-3-clause | 334 | 0 | 9 | 62 | 100 | 52 | 48 | 8 | 1 |
module Y2015.D12Spec (spec) where
{-# LANGUAGE OverloadedStrings #-}
import Y2015
import Test.Hspec
import Data.ByteString.Lazy.Char8 (pack)
spec :: Spec
spec = parallel $ do
describe "Day 12" $ do
describe "jsonSum" $ do
it "sums three-member lists" $
jsonSum (pack "[1,2,3]") `shouldBe` 6
it "sums two-member objects" $
jsonSum (pack "{\"a\":2,\"b\":4}") `shouldBe` 6
it "sums nested lists" $
jsonSum (pack "[[[3]]]") `shouldBe` 3
it "sums nested objects" $
jsonSum (pack "{\"a\":{\"b\":4},\"c\":-1}") `shouldBe` 3
it "sums mixed objects" $
jsonSum (pack "{\"a\":[-1,1]}") `shouldBe` 0
it "sums mixed lists" $
jsonSum (pack "[-1,{\"a\":1}]") `shouldBe` 0
it "sums empty lists" $
jsonSum (pack "[]") `shouldBe` 0
it "sums empty objects" $
jsonSum (pack "{}") `shouldBe` 0
describe "jsonSumFixed" $ do
it "sums three-member lists" $
jsonSumFixed (pack "[1,2,3]") `shouldBe` 6
it "ignores red in nested objects" $
jsonSumFixed (pack "[1,{\"c\":\"red\",\"b\":2},3]") `shouldBe` 4
it "ignores red objects" $
jsonSumFixed (pack "{\"d\":\"red\",\"e\":[1,2,3,4],\"f\":5}") `shouldBe` 0
it "ignores red array elements" $
jsonSumFixed (pack "[1,\"red\",5]") `shouldBe` 6
| tylerjl/adventofcode | test/Y2015/D12Spec.hs | mit | 1,522 | 0 | 18 | 518 | 376 | 185 | 191 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Provides a dummy authentication module that simply lets a user specify
-- their identifier. This is not intended for real world use, just for
-- testing. This plugin supports form submissions via JSON (since 1.6.8).
--
-- = Using the JSON Login Endpoint
--
-- We are assuming that you have declared `authRoute` as follows
--
-- @
-- Just $ AuthR LoginR
-- @
--
-- If you are using a different one, then you have to adjust the
-- endpoint accordingly.
--
-- @
-- Endpoint: \/auth\/page\/dummy
-- Method: POST
-- JSON Data: {
-- "ident": "my identifier"
-- }
-- @
--
-- Remember to add the following headers:
--
-- - Accept: application\/json
-- - Content-Type: application\/json
module Yesod.Auth.Dummy
( authDummy
) where
import Yesod.Auth
import Yesod.Form (runInputPost, textField, ireq)
import Yesod.Core
import Data.Text (Text)
import Data.Aeson.Types (Result(..), Parser)
import qualified Data.Aeson.Types as A (parseEither, withObject)
identParser :: Value -> Parser Text
identParser = A.withObject "Ident" (.: "ident")
authDummy :: YesodAuth m => AuthPlugin m
authDummy =
AuthPlugin "dummy" dispatch login
where
dispatch "POST" [] = do
(jsonResult :: Result Value) <- parseCheckJsonBody
eIdent <- case jsonResult of
Success val -> return $ A.parseEither identParser val
Error err -> return $ Left err
case eIdent of
Right ident ->
setCredsRedirect $ Creds "dummy" ident []
Left _ -> do
ident <- runInputPost $ ireq textField "ident"
setCredsRedirect $ Creds "dummy" ident []
dispatch _ _ = notFound
url = PluginR "dummy" []
login authToMaster = do
request <- getRequest
toWidget [hamlet|
$newline never
<form method="post" action="@{authToMaster url}">
$maybe t <- reqToken request
<input type=hidden name=#{defaultCsrfParamName} value=#{t}>
Your new identifier is: #
<input type="text" name="ident">
<input type="submit" value="Dummy Login">
|]
| geraldus/yesod | yesod-auth/Yesod/Auth/Dummy.hs | mit | 2,294 | 0 | 16 | 576 | 365 | 205 | 160 | 34 | 4 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Common where
import Control.Arrow ((>>>), first, left)
import Control.Applicative
import Control.Concurrent (forkIO, killThread, threadDelay)
import Control.Concurrent.Async
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import Control.Monad.Error.Class
import Control.Monad.Trans.Maybe
import Control.Monad.IO.Class
import Data.Foldable
import Data.Function ((&))
import Data.IntCast
import Data.Int (Int64)
import Data.Map.Strict (Map)
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Sequence (Seq, ViewL((:<)), (|>))
import Data.Serialize
import Data.Tuple (swap)
import Data.Typeable (Typeable, typeRep)
import Data.Word (Word64, Word32)
import Foreign (Storable, sizeOf)
import System.IO
import qualified Data.Map.Strict as Map
import qualified Data.Sequence as Seq
{- DEBUG -}
import qualified System.GlobalLock as GLock
atomicPrintStderr :: String -> IO ()
atomicPrintStderr msg = GLock.lock $ do
hPutStrLn stderr msg
hFlush stderr
traceIO :: Show b => String -> b -> IO a -> IO a
traceIO name ctx action = do
atomicPrintStderr ("++" <> name <> " " <> show ctx)
result <- try action
case result of
Left e -> do
atomicPrintStderr ("!!" <> name <> " " <> show ctx <>
": " <> show (e :: SomeException))
throwIO e
Right x -> do
atomicPrintStderr ("--" <> name <> " " <> show ctx)
pure x
-- END DEBUG -}
modifyMVarPure :: MVar a -> (a -> a) -> IO ()
modifyMVarPure v f = modifyMVar_ v (pure . f)
mapConcurrently_ :: Foldable t => (a -> IO ()) -> t a -> IO ()
mapConcurrently_ f = runConcurrently . traverse_ (Concurrently . f)
concurrently_ :: IO () -> IO () -> IO ()
concurrently_ x y = void (concurrently x y)
standby :: IO a
standby = forever (threadDelay 1000000000)
-- | Automatically restart if the action fails (after the given delay).
autorestart :: Int -> IO a -> IO a
-- this type signature is not ideal ^
autorestart delay action = do
result <- tryAny action
case result of
Right x -> pure x
Left e -> do
hPutStrLn stderr (show e)
hFlush stderr
threadDelay delay
autorestart delay action
forkIO_ :: IO () -> IO ()
forkIO_ = void . forkIO
tryAny :: IO a -> IO (Either SomeException a)
tryAny action = do
result <- newEmptyMVar
mask $ \ unmask -> do
thread <- forkIO (try (unmask action) >>= putMVar result)
unmask (readMVar result) `onException` killThread thread
fromJustIO1 :: Exception e => (a -> e) -> (a -> Maybe b) -> a -> IO b
fromJustIO1 e f x = fromJustIO (e x) (f x)
fromJustIO :: Exception e => e -> Maybe a -> IO a
fromJustIO _ (Just x) = pure x
fromJustIO e Nothing = throwIO e
fromJustE :: String -> Maybe a -> a
fromJustE e = fromMaybe (error e)
maybeToMonadError :: MonadError e m => e -> Maybe a -> m a
maybeToMonadError e = maybeToEither e >>> eitherToMonadError
eitherToMonadError :: MonadError e m => Either e a -> m a
eitherToMonadError (Left e) = throwError e
eitherToMonadError (Right x) = pure x
eitherToMaybe :: Either e a -> Maybe a
eitherToMaybe (Right e) = Just e
eitherToMaybe (Left _) = Nothing
maybeToAlternative :: Alternative f => Maybe a -> f a
maybeToAlternative Nothing = empty
maybeToAlternative (Just x) = pure x
maybeToEither :: e -> Maybe a -> Either e a
maybeToEither e Nothing = Left e
maybeToEither _ (Just x) = Right x
generateBidiMap :: (Ord a, Ord b) => [(a, b)] -> (Map a b, Map b a)
generateBidiMap t = (Map.fromList t, Map.fromList (swap <$> t))
popMap :: Ord k => k -> Map k a -> Maybe (a, Map k a)
popMap k m = Map.lookup k m <&> \ x -> (x, Map.delete k m)
-- | Same as '<$>' but flips the order of the arguments.
{-# INLINE (<&>) #-}
infixl 1 <&>
(<&>) :: Functor f => f a -> (a -> b) -> f b
m <&> f = fmap f m
-- | Similar to 'sizeOf' but acts on a proxy value.
{-# INLINE sizeOfProxy #-}
sizeOfProxy :: Storable a => proxy a -> Int
sizeOfProxy = sizeOf . unproxy
-- | Restrict the type of the first argument based on a proxy value.
-- It otherwise behaves identical to 'const'.
{-# INLINE asTypeOfProxy #-}
asTypeOfProxy :: a -> proxy a -> a
asTypeOfProxy = const
-- | Construct a dummy value based on the type of a proxy value.
-- The dummy value must not be evaluated.
unproxy :: proxy a -> a
unproxy _ = error "unproxy: dummy value is not meant to be evaluated"
-- | A dummy value that must not be evaluated.
__ :: a
__ = error "__: dummy value is not meant to be evaluated"
| Rufflewind/conplex | Common.hs | mit | 4,561 | 0 | 18 | 933 | 1,620 | 829 | 791 | 112 | 2 |
-- makeBackronym
-- http://www.codewars.com/kata/55805ab490c73741b7000064/
module Codewars.Exercise.Backronym where
import Codewars.Exercise.Backronym.Dictionary (dict)
import Data.Char (toUpper)
makeBackronym :: String -> String
makeBackronym = unwords . map (dict . toUpper)
| gafiatulin/codewars | src/7 kyu/Backronym.hs | mit | 280 | 0 | 8 | 29 | 59 | 36 | 23 | 5 | 1 |
module Tamari where
import Data.List
import Data.Maybe
import Catalan
import Bijections
rotR1 :: Tree -> [Tree]
rotR1 (B (t1 @ (B t11 t12)) t2) =
B t11 (B t12 t2) : [B t1' t2 | t1' <- rotR1 t1] ++ [B t1 t2' | t2' <- rotR1 t2]
rotR1 (B L t2) = [B L t2' | t2' <- rotR1 t2]
rotR1 _ = []
rotL1 :: Tree -> [Tree]
rotL1 (B t1 (t2 @ (B t21 t22))) =
B (B t1 t21) t22 : [B t1' t2 | t1' <- rotL1 t1] ++ [B t1 t2' | t2' <- rotL1 t2]
rotL1 (B t1 L) = [B t1' L | t1' <- rotL1 t1]
rotL1 _ = []
tamari_up :: Tree -> [Tree]
tamari_up t = t : foldr union [] [tamari_up t' | t' <- rotR1 t]
tamari_down :: Tree -> [Tree]
tamari_down t = t : foldr union [] [tamari_down t' | t' <- rotL1 t]
tamari_order :: Tree -> Tree -> Bool
tamari_order t1 t2 = elem t2 (tamari_up t1)
kreweras_order :: Tree -> Tree -> Bool
kreweras_order L L = True
kreweras_order (B t1 t2) (B t1' t2') =
(kreweras_order t1 t1' && kreweras_order t2 t2') ||
case t1 of
B t11 t12 -> kreweras_order (B t11 (B t12 t2)) (B t1' t2')
L -> False
kreweras_order _ _ = False
tamari :: Int -> [(Tree,Tree)]
tamari n = [(t1,t2) | t1 <- binary_trees n, t2 <- tamari_up t1]
-- [length $ tamari n | n <- [0..]] == [1,1,3,13,68,399,2530,...]
kreweras :: Int -> [(Tree,Tree)]
kreweras n = [(t1,t2) | t1 <- binary_trees n, t2 <- binary_trees n, kreweras_order t1 t2]
tamari_parts :: Int -> [Int]
tamari_parts n = [length $ tamari_down t | t <- binary_trees n]
-- some properties of the Tamari lattice
-- If t<=u in the Tamari order, then the left-branching spine of t is at
-- least as long as the left-branching spine of u.
-- verified for n<=6
prop1 :: Int -> Bool
prop1 n =
flip all (tamari n) $ \(t1,t2) ->
length (tree2spine t1) >= length (tree2spine t2)
-- sequent-style decision procedure for Tamari order
tamari_seq :: [Tree] -> Tree -> Tree -> Bool
tamari_seq g (B t1 t2) u = tamari_seq (t2:g) t1 u
tamari_seq g L L = g == []
tamari_seq g L (B u1 u2) =
let k = leaves u1 in
let grab k g acc =
if k == 0 then Just (acc,g)
else if g == [] then Nothing
else
let (t:g') = g in
let i = leaves t in
if i > k then Nothing
else grab (k - i) g' (t:acc) in
case grab (k-1) g [] of
Nothing -> False
Just (g1,t2:g2) -> tamari_seq (reverse g1) L u1 && tamari_seq g2 t2 u2
Just (g1,[]) -> False
-- claim: tamari_seq agrees with tamari_order
-- verified for n<=6
prop2 :: Int -> Bool
prop2 n =
flip all (binary_trees n) $ \t1 ->
flip all (binary_trees n) $ \t2 ->
tamari_order t1 t2 == tamari_seq [] t1 t2
-- focused sequent calculus
tamari_linv :: Tree -> [Tree] -> Tree -> Bool
tamari_neu :: [Tree] -> Tree -> Bool
tamari_linv t g u = let ts = tree2spine t in tamari_neu (reverse ts ++ g) u
tamari_neu g L = g == []
tamari_neu g (B u1 u2) =
let k = leaves u1 in
let grab k g acc =
if k == 0 then Just (acc,g)
else if g == [] then Nothing
else
let (t:g') = g in
let i = leaves t in
if i > k then Nothing
else grab (k - i) g' (t:acc) in
case grab (k-1) g [] of
Nothing -> False
Just (g1,t2:g2) -> tamari_neu (reverse g1) u1 && tamari_linv t2 g2 u2
Just (g1,[]) -> False
-- verified for n<=7
prop3 :: Int -> Bool
prop3 n =
flip all (binary_trees n) $ \t1 ->
flip all (binary_trees n) $ \t2 ->
tamari_linv t1 [] t2 == tamari_seq [] t1 t2
shuffle_linv :: Tree -> [Tree] -> Tree -> Bool
shuffle_neu :: [Tree] -> Tree -> Bool
shuffle_linv t g u =
let ts = tree2spine t in
-- flip any (permutations ts) $ \ts' ->
flip any (shuffle ts g) $ \g' ->
shuffle_neu g' u
shuffle_neu g L = g == []
shuffle_neu g (B u1 u2) =
let k = leaves u1 in
let grab k g acc =
if k == 0 then Just (acc,g)
else if g == [] then Nothing
else
let (t:g') = g in
let i = leaves t in
if i > k then Nothing
else grab (k - i) g' (t:acc) in
case grab (k-1) g [] of
Nothing -> False
Just (g1,g2) ->
flip any (remove g2) $ \(t2,g2') ->
shuffle_neu (reverse g1) u1 && shuffle_linv t2 g2' u2
-- lattice structure
tamari_meetc :: [Tree] -> [Tree] -> [Tree]
tamari_meetc [] [] = []
tamari_meetc (B t1 t2:g) d = tamari_meetc (t1:t2:g) d
tamari_meetc g (B t1 t2:d) = tamari_meetc g (t1:t2:d)
tamari_meetc (L:g) (L:d) =
let match = map fst $ fst $ break (uncurry (/=)) (zip g d) in
let g' = fromJust $ stripPrefix match g in
let d' = fromJust $ stripPrefix match d in
L:match ++ tamari_meetc g' d'
-- tamari_join :: Tree -> Tree -> Tree
-- tamari_join t1 t2 =
-- if tamari_linv t1 [] t2 then t2 else
-- if tamari_linv t2 [] t1 then t1 else
-- let (cur1:g1) = tree2spine t1 in
-- let (cur2:g2) = tree2spine t2 in
-- match_and_join g1 g2 (leaves cur1,[cur1]) (leaves cur2,[cur2])
-- where
-- match_and_join :: [Tree] -> [Tree] -> (Int,[Tree]) -> (Int,[Tree]) -> Tree
-- match_and_join g1 g2 (k1,cur1) (k2,cur2) =
-- if k1 == k2 then
-- let j = tamari_join (tpsi cur1) (tpsi cur2) in
-- if g1 == g2 then j else let (t1:g1') = g1 in let (t2:g2') = g2 in B j (match_and_join g1' g2' (leaves t1,[t1]) (leaves t2,[t2]))
-- else if k1 < k2 then
-- let (t1:g1') = g1 in
-- match_and_join g1' g2 (k1+leaves t1,t1:cur1) (k2,cur2)
-- else
-- let (t2:g2') = g2 in
-- match_and_join g1 g2' (k1,cur1) (k2+leaves t2,t2:cur2)
-- tpsi :: [Tree] -> Tree
-- tpsi [t] = t
-- tpsi (t:ts) = foldl B t ts
tree_type :: Tree -> [Bool]
tree_type t = pol False t
where
pol :: Bool -> Tree -> [Bool]
pol b L = [b]
pol b (B t1 t2) = pol False t1 ++ pol True t2
| noamz/linlam-gos | src/Tamari.hs | mit | 5,691 | 2 | 20 | 1,576 | 2,305 | 1,178 | 1,127 | 117 | 6 |
--Task 1
--sumProducts [[1,2,3], [4,5], [], [-2,3,0,5,1]] -> 27 -- 27 = 6 + 20 + 1 + 0
sumProducts m = sum (map product nonEmptyLists)
where nonEmptyLists = filter ( \ x -> not (null x)) m
sumProducts2 m = sum products
where products = map ( \ lst -> (foldr ( \ x res -> x*res ) 1 lst) ) m
--Task 2
--occurrences [1..6] [1,3,4,3,2,3,3,0,5,3,1] -> [2,1,5,1,1,0]
occurrences lst1 lst2 = [ count elem lst2 | elem<-lst1 ]
where count elem lst = length (filter ( \ x -> x == elem) lst)
--Task 6
--matchLengths [[1..4],[0..3],[5,4,8,10]] -> True
--matchLengths [[1..4],[0..3],[],[5,4,8,10]] -> False
matchLengths lst = allEquals (map length lst)
where allEquals l = all ( \ x -> x == head l ) l
--Task 7
--setUnion [1,2,3,5] [2,4,5,6,7] -> [1,2,3,4,5,6,7]
--setIntersect [1,2,3,5] [2,4,5,6,7] -> [2,5]
--setDiff [1,2,3,5] [2,4,5,6,7] -> [1,3]
--setDiff [2,4,5,6,7] [1,2,3,5] -> [4,6,7]
setUnion s1 [] = s1
setUnion [] s2 = s2
setUnion (x:xs) (y:ys)
| x < y = x : setUnion xs (y:ys)
| x > y = y : setUnion (x:xs) ys
| x == y = x : setUnion xs ys
setIntersect s1 [] = []
setIntersect [] s2 = []
setIntersect (x:xs) (y:ys)
| x < y = setIntersect xs (y:ys)
| x > y = setIntersect (x:xs) ys
| x == y = x : setIntersect xs ys
setDiff s1 [] = s1
setDiff [] s2 = []
setDiff (x:xs) (y:ys)
| x == y = setDiff xs ys
| x < y = x : setDiff xs (y:ys)
| x > y = setDiff (x:xs) ys
setSumDiff s1 [] = s1
setSumDiff [] s2 = s2
setSumDiff s1 s2 = setUnion (setDiff s1 s2) (setDiff s2 s1)
setSum s1 [] = s1
setSum [] s2 = s2
setSum (x:xs) (y:ys)
| x == y = [x,y] ++ setSum xs ys
| x < y = x : setSum xs (y:ys)
| x > y = y : setSum (x:xs) ys
| pepincho/Functional-Programming | haskell/ex-4.hs | mit | 1,678 | 8 | 12 | 398 | 779 | 390 | 389 | 35 | 1 |
-- FIXME: Depend on the not-yet-released project-template library.
{-# LANGUAGE OverloadedStrings #-}
module MultiFile where
import Control.Monad (unless)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Resource (runExceptionT)
import qualified Data.ByteString as S
import qualified Data.ByteString.Base64 as B64
import Data.Conduit (Conduit, MonadResource, Sink,
await, awaitForever, leftover,
yield, ($$), (=$))
import Data.Conduit.Binary (sinkFile)
import Data.Conduit.List (sinkNull)
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Text as CT
import Data.Functor.Identity (runIdentity)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Filesystem (createTree)
import Filesystem.Path.CurrentOS (FilePath, directory, encode,
encodeString, fromText, (</>))
import Prelude hiding (FilePath)
unpackMultiFile
:: MonadResource m
=> FilePath -- ^ output folder
-> (Text -> Text) -- ^ fix each input line, good for variables
-> Sink S.ByteString m ()
unpackMultiFile root fixLine =
CT.decode CT.utf8 =$ CT.lines =$ CL.map fixLine =$ start
where
start =
await >>= maybe (return ()) go
where
go t =
case getFileName t of
Nothing -> error $ "Invalid input: " ++ show t
Just (fp', isBinary) -> do
let fp = root </> fromText fp'
liftIO $ createTree $ directory fp
let src
| isBinary = binaryLoop
| otherwise = textLoop
src =$ sinkFile (encodeString fp)
start
binaryLoop = do
await >>= maybe (error "binaryLoop needs 1 line") go
where
go = yield . B64.decodeLenient . encodeUtf8
textLoop =
await >>= maybe (return ()) go
where
go t =
case getFileName t of
Just{} -> leftover t
Nothing -> do
yield $ encodeUtf8 t
yield "\n"
textLoop
getFileName t =
case T.words t of
["{-#", "START_FILE", fn, "#-}"] -> Just (fn, False)
["{-#", "START_FILE", "BASE64", fn, "#-}"] -> Just (fn, True)
_ -> Nothing
createMultiFile
:: MonadIO m
=> FilePath -- ^ folder containing the files
-> Conduit FilePath m S.ByteString -- ^ FilePath is relative to containing folder
createMultiFile root = do
awaitForever handleFile
where
handleFile fp' = do
bs <- liftIO $ S.readFile $ encodeString fp
case runIdentity $ runExceptionT $ yield bs $$ CT.decode CT.utf8 =$ sinkNull of
Left{} -> do
yield "{-# START_FILE BASE64 "
yield $ encode fp'
yield " #-}\n"
yield $ B64.encode bs
yield "\n"
Right{} -> do
yield "{-# START_FILE "
yield $ encode fp'
yield " #-}\n"
yield bs
unless ("\n" `S.isSuffixOf` bs) $ yield "\n"
where
fp = root </> fp'
| piyush-kurur/yesod | yesod/MultiFile.hs | mit | 3,652 | 0 | 19 | 1,527 | 869 | 459 | 410 | 81 | 5 |
{-# LANGUAGE MultiWayIf#-}
module Mealy where
import Data.Maybe (isJust)
import FRP.Yampa
import qualified Graphics.Gloss.Interface.IO.Game as G
import Buttons
leftmost p = (p>90)
rightmost p = (p<(-90))
centermost p = (p>(-10) && p <10)
isClick = isEvent. filterE (isJust. toYampaEvent)
moveLeft = (-)
moveRight = (+)
updateVel = id
updateScore = id
stateTrans :: (Int,Int,Event G.Event,Int,Int) -> (Int,Int,Int,Int)
stateTrans (p,v,c,s,d) =
if
| p1(p,v,c,s,d) -> c1 (p,v,c,s,d)
-- | p2(p,v,c,s,d) -> c2 (p,v,c,s,d)
p1 (p,v,c,s,d) =
rightmost p
&& (not.centermost) p
&& (not. isClick) c
&& (not. leftmost) p
&& (d==0)
c1 (p,v,c,s,d) = (p',v',s',d')
where
p' = moveLeft p v
v' = updateVel 1
s' = updateScore 0
d' = id d
| santolucito/Euterpea_Projects | QuantumArt/Mealy.hs | mit | 801 | 0 | 11 | 193 | 398 | 231 | 167 | 29 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.MutationObserver
(js_newMutationObserver, newMutationObserver, js_observe, observe,
js_takeRecords, takeRecords, js_disconnect, disconnect,
MutationObserver, castToMutationObserver, gTypeMutationObserver)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe
"new window[\"MutationObserver\"]($1)" js_newMutationObserver ::
JSRef MutationCallback -> IO (JSRef MutationObserver)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver Mozilla MutationObserver documentation>
newMutationObserver ::
(MonadIO m, IsMutationCallback callback) =>
Maybe callback -> m MutationObserver
newMutationObserver callback
= liftIO
(js_newMutationObserver
(maybe jsNull (unMutationCallback . toMutationCallback) callback)
>>= fromJSRefUnchecked)
foreign import javascript unsafe "$1[\"observe\"]($2, $3)"
js_observe ::
JSRef MutationObserver -> JSRef Node -> JSRef Dictionary -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver.observe Mozilla MutationObserver.observe documentation>
observe ::
(MonadIO m, IsNode target, IsDictionary options) =>
MutationObserver -> Maybe target -> Maybe options -> m ()
observe self target options
= liftIO
(js_observe (unMutationObserver self)
(maybe jsNull (unNode . toNode) target)
(maybe jsNull (unDictionary . toDictionary) options))
foreign import javascript unsafe "$1[\"takeRecords\"]()"
js_takeRecords ::
JSRef MutationObserver -> IO (JSRef [Maybe MutationRecord])
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver.takeRecords Mozilla MutationObserver.takeRecords documentation>
takeRecords ::
(MonadIO m) => MutationObserver -> m [Maybe MutationRecord]
takeRecords self
= liftIO
((js_takeRecords (unMutationObserver self)) >>= fromJSRefUnchecked)
foreign import javascript unsafe "$1[\"disconnect\"]()"
js_disconnect :: JSRef MutationObserver -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver.disconnect Mozilla MutationObserver.disconnect documentation>
disconnect :: (MonadIO m) => MutationObserver -> m ()
disconnect self = liftIO (js_disconnect (unMutationObserver self)) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/MutationObserver.hs | mit | 3,169 | 28 | 12 | 505 | 723 | 417 | 306 | 53 | 1 |
import Data.Array
minfree :: [Int] -> Int
minfree = findFalse . buildList
findFalse :: Array Int Bool -> Int
findFalse = length . takeWhile id . elems
buildList :: [Int] -> Array Int Bool
buildList xs = accumArray (||) False (0, n) $
zip [x | x <- xs, x <= n] $ repeat True
where
n = length xs
| hajifkd/pearlsfd | chapter1/sol2.hs | gpl-3.0 | 346 | 0 | 10 | 112 | 140 | 73 | 67 | 9 | 1 |
module DocGraph.Document where
import Control.Arrow ((>>>))
import Data.Char (isSpace)
import Data.Either (rights)
import Data.List (dropWhile, dropWhileEnd)
import qualified Data.Tree as DT
import DocGraph.Types
import Text.Parsec hiding (token)
traverseDocumentIO :: FilePath -> IO DocGraph
traverseDocumentIO path = do
contents <- readFile path
return $ DT.Node (newItem path) (build $ itemize $ tokenize contents)
data Token = TNode Int String
| TLink String
deriving (Show)
tokenize :: String -> [Token]
tokenize s = rights $ map (parse' tok) (lines s)
where tok = choice [node, link]
strip :: Label -> Label
strip = dropWhile isSpace >>> dropWhileEnd isSpace
type Parser = Parsec String ()
parse' :: Parser a -> String -> Either ParseError a
parse' p = parse p ""
node :: Parser Token
node = do hs <- many1 (char '#')
skipMany space
s <- many1 (noneOf "#\n")
let lvl = length hs
return $ TNode lvl (strip s)
link :: Parser Token
link = do string "->"
skipMany space
s <- manyTill anyChar eof
return $ TLink (strip s)
itemize :: [Token] -> [(Int, Item)]
itemize [] = []
itemize (TNode l s : ts) = (l, itm) : itemize ts'
where (subs, ts') = break atNode ts
itm = newItem s `linkToAll` links
links = map asStr $ filter isLink subs
atNode (TNode {}) = True
atNode _ = False
isLink (TLink {}) = True
isLink _ = False
asStr (TLink s') = s'
asStr _ = error "asStr should only be called on TLinks"
itemize (TLink {}:_) = error "TLink encountered. Should have been filtered out."
build :: [(Int, Item)] -> [DocGraph]
build [] = []
build ((n, itm):rest) = (DT.Node itm (build children)) : build siblings
where (children, siblings) = span ((>n).fst) rest
| wldmr/haskell-docgraph | src/DocGraph/Document.hs | gpl-3.0 | 1,878 | 0 | 11 | 514 | 738 | 382 | 356 | 51 | 4 |
module Stlc.Types
( Type (Tvar, Arrow, Times, Union, Unitty, Bottom)
, typeinference
, typeinfer
, unify
, applyctx
, emptyctx
, incrementindices
, variables
, normalizeTemplate
, applynormalization
, Top (Top)
, Context
, Variable
, Substitution
)
where
import Control.Monad
import Lambda
import qualified Data.Map as Map
-- | A type context is a map from deBruijn indices to types. Given
-- any lambda variable as a deBruijn index, it returns its type.
type Context = Map.Map Integer Type
-- | A type variable is an integer.
type Variable = Integer
-- | A type substitution is a function that can be applied to any type
-- to get a new one.
type Substitution = Type -> Type
-- | A type template is a free type variable or an arrow between two
-- types; that is, the function type.
data Type = Tvar Variable
| Arrow Type Type
| Times Type Type
| Union Type Type
| Unitty
| Bottom
deriving (Eq)
instance Show Type where
show (Tvar t) = typevariableNames !! fromInteger t
show (Arrow a b) = showparens a ++ " → " ++ show b
show (Times a b) = showparens a ++ " × " ++ showparens b
show (Union a b) = showparens a ++ " + " ++ showparens b
show Unitty = "⊤"
show Bottom = "⊥"
showparens :: Type -> String
showparens (Tvar t) = show (Tvar t)
showparens Unitty = show Unitty
showparens Bottom = show Bottom
showparens m = "(" ++ show m ++ ")"
-- | Creates the substitution given by the change of a variable for
-- the given type.
subs :: Variable -> Type -> Substitution
subs x typ (Tvar y)
| x == y = typ
| otherwise = Tvar y
subs x typ (Arrow a b) = Arrow (subs x typ a) (subs x typ b)
subs x typ (Times a b) = Times (subs x typ a) (subs x typ b)
subs x typ (Union a b) = Union (subs x typ a) (subs x typ b)
subs _ _ Unitty = Unitty
subs _ _ Bottom = Bottom
-- | Returns true if the given variable appears on the type.
occurs :: Variable -> Type -> Bool
occurs x (Tvar y) = x == y
occurs x (Arrow a b) = occurs x a || occurs x b
occurs x (Times a b) = occurs x a || occurs x b
occurs x (Union a b) = occurs x a || occurs x b
occurs _ Unitty = False
occurs _ Bottom = False
-- | Unifies two types with their most general unifier. Returns the substitution
-- that transforms any of the types into the unifier.
unify :: Type -> Type -> Maybe Substitution
unify (Tvar x) (Tvar y)
| x == y = Just id
| otherwise = Just (subs x (Tvar y))
unify (Tvar x) b
| occurs x b = Nothing
| otherwise = Just (subs x b)
unify a (Tvar y)
| occurs y a = Nothing
| otherwise = Just (subs y a)
unify (Arrow a b) (Arrow c d) = unifypair (a,b) (c,d)
unify (Times a b) (Times c d) = unifypair (a,b) (c,d)
unify (Union a b) (Union c d) = unifypair (a, b) (c, d)
unify Unitty Unitty = Just id
unify Bottom Bottom = Just id
unify _ _ = Nothing
-- | Unifies a pair of types
unifypair :: (Type,Type) -> (Type,Type) -> Maybe Substitution
unifypair (a,b) (c,d) = do
p <- unify b d
q <- unify (p a) (p c)
return (q . p)
-- | Apply a substitution to all the types on a type context.
applyctx :: Substitution -> Context -> Context
applyctx = Map.map
-- | The empty context.
emptyctx :: Context
emptyctx = Map.empty
-- | Increments all the indices of a given context. It is useful for
-- adapting the context to a new scope.
incrementindices :: Context -> Context
incrementindices = Map.mapKeys succ
-- | Type inference algorithm. Infers a type from a given context and expression
-- with a set of constraints represented by a unifier type. The result type must
-- be unifiable with this given type.
typeinfer :: [Variable] -- ^ List of fresh variables
-> Context -- ^ Type context
-> Exp -- ^ Lambda expression whose type has to be inferred
-> Type -- ^ Constraint
-> Maybe Substitution
typeinfer [] _ _ _ = Nothing
typeinfer [_] _ _ _ = Nothing
typeinfer _ ctx (Var n) b
| Map.member n ctx = do
var <- Map.lookup n ctx
unify var b
| otherwise = Nothing
typeinfer (x:vars) ctx (App p q) b = do
sigma <- typeinfer (evens vars) ctx p (Arrow (Tvar x) b)
tau <- typeinfer (odds vars) (applyctx sigma ctx) q (sigma (Tvar x))
return (tau . sigma)
where
odds [] = []
odds [_] = []
odds (_:e:xs) = e : odds xs
evens [] = []
evens [e] = [e]
evens (e:_:xs) = e : evens xs
typeinfer (a:x:vars) ctx (Lambda p) b = do
sigma <- unify b (Arrow (Tvar a) (Tvar x))
let nctx = applyctx sigma (Map.insert 1 (sigma $ Tvar a) (incrementindices ctx))
tau <- typeinfer vars nctx p (sigma $ Tvar x)
return (tau . sigma)
typeinfer (x:y:vars) ctx (Pair m n) a = do
sigma <- unify a (Times (Tvar x) (Tvar y))
tau <- typeinfer (evens vars) (applyctx sigma ctx) m (sigma (Tvar x))
rho <- typeinfer (odds vars) (applyctx (tau . sigma) ctx) n (tau (sigma (Tvar y)))
return (rho . tau . sigma)
where
odds [] = []
odds [_] = []
odds (_:e:xs) = e : odds xs
evens [] = []
evens [e] = [e]
evens (e:_:xs) = e : evens xs
typeinfer (y:vars) ctx (Pi1 m) a = typeinfer vars ctx m (Times a (Tvar y))
typeinfer (x:vars) ctx (Pi2 m) b = typeinfer vars ctx m (Times (Tvar x) b)
typeinfer (x:y:vars) ctx (Inl m) a = do
sigma <- unify a (Union (Tvar x) (Tvar y))
tau <- typeinfer vars (applyctx sigma ctx) m (sigma (Tvar x))
return (tau . sigma)
typeinfer (x:y:vars) ctx (Inr m) a = do
sigma <- unify a (Union (Tvar x) (Tvar y))
tau <- typeinfer vars (applyctx sigma ctx) m (sigma (Tvar y))
return (tau . sigma)
typeinfer (x:y:vars) ctx (Caseof m f g) a = do
sigma <- typeinfer (third1 vars) ctx f (Arrow (Tvar x) a)
tau <- typeinfer (third2 vars) (applyctx sigma ctx) g (Arrow (sigma $ Tvar y) (sigma a))
rho <- typeinfer (third3 vars) (applyctx (tau . sigma) ctx) m (Union (tau . sigma $ Tvar x) (tau . sigma $ Tvar y))
return (rho . tau . sigma)
where
third1 [] = []
third1 [_] = []
third1 [_,_] = []
third1 (_:_:e:xs) = e : third1 xs
third2 [] = []
third2 [_] = []
third2 [_,e] = [e]
third2 (_:e:_:xs) = e : third2 xs
third3 [] = []
third3 [e] = [e]
third3 [e,_] = [e]
third3 (e:_:_:xs) = e : third3 xs
typeinfer _ _ Unit a = unify Unitty a
typeinfer vars ctx (Abort m) _ = typeinfer vars ctx m Bottom
typeinfer vars ctx (Absurd m) a = do
sigma <- unify Bottom a
tau <- typeinfer vars (applyctx sigma ctx) m Bottom
return (tau . sigma)
-- | Type inference of a lambda expression.
typeinference :: Exp -> Maybe Type
typeinference e = normalize <$> (typeinfer variables emptyctx e (Tvar 0) <*> pure (Tvar 0))
-- | List of possible variable names.
typevariableNames :: [String]
typevariableNames = concatMap (`replicateM` ['A'..'Z']) [1..]
-- | Infinite list of variables.
variables :: [Variable]
variables = [1..]
-- | Substitutes a set of type variables on a type template for the smaller
-- possible ones.
normalizeTemplate :: Map.Map Integer Integer -> Integer -> Type -> (Map.Map Integer Integer, Integer)
normalizeTemplate sub n (Tvar m) = case Map.lookup m sub of
Just _ -> (sub, n)
Nothing -> (Map.insert m n sub, succ n)
normalizeTemplate sub n (Arrow a b) =
let (nsub, nn) = normalizeTemplate sub n a in normalizeTemplate nsub nn b
normalizeTemplate sub n (Times a b) =
let (nsub, nn) = normalizeTemplate sub n a in normalizeTemplate nsub nn b
normalizeTemplate sub n (Union a b) =
let (nsub, nn) = normalizeTemplate sub n a in normalizeTemplate nsub nn b
normalizeTemplate sub n Unitty = (sub, n)
normalizeTemplate sub n Bottom = (sub, n)
-- | Applies a set of variable substitutions to a type to normalize it.
applynormalization :: Map.Map Integer Integer -> Type -> Type
applynormalization sub (Tvar m) = case Map.lookup m sub of
Just n -> Tvar n
Nothing -> Tvar m
applynormalization sub (Arrow a b) = Arrow (applynormalization sub a) (applynormalization sub b)
applynormalization sub (Times a b) = Times (applynormalization sub a) (applynormalization sub b)
applynormalization sub (Union a b) = Union (applynormalization sub a) (applynormalization sub b)
applynormalization _ Unitty = Unitty
applynormalization _ Bottom = Bottom
-- | Normalizes a type, that is, substitutes the set of type variables for
-- the smaller possible ones.
normalize :: Type -> Type
normalize t = applynormalization (fst $ normalizeTemplate Map.empty 0 t) t
-- This is definitely not an easter egg
newtype Top = Top Type
instance Show Top where
show (Top (Tvar t)) = typevariableNames !! fromInteger t
show (Top (Arrow a Bottom)) = "(Ω∖" ++ showparensT (Top a) ++ ")ᴼ"
show (Top (Arrow a b)) = "((Ω∖" ++ showparensT (Top a) ++ ") ∪ " ++ showparensT (Top b) ++ ")ᴼ"
show (Top (Times a b)) = showparensT (Top a) ++ " ∩ " ++ showparensT (Top b)
show (Top (Union a b)) = showparensT (Top a) ++ " ∪ " ++ showparensT (Top b)
show (Top Unitty) = "Ω"
show (Top Bottom) = "Ø"
showparensT :: Top -> String
showparensT (Top (Tvar t)) = show (Top (Tvar t))
showparensT (Top Unitty) = show (Top Unitty)
showparensT (Top Bottom) = show (Top Bottom)
showparensT (Top m) = "(" ++ show (Top m) ++ ")"
| M42/mikrokosmos | source/Stlc/Types.hs | gpl-3.0 | 9,417 | 0 | 15 | 2,394 | 3,921 | 1,976 | 1,945 | 201 | 18 |
{-#LANGUAGE TypeSynonymInstances, UndecidableInstances, FlexibleInstances, MultiParamTypeClasses, GADTs, DataKinds, PolyKinds, TypeOperators, ViewPatterns, PatternSynonyms, RankNTypes, FlexibleContexts, AutoDeriveTypeable #-}
module Carnap.Languages.Util.GenericConstructors where
import Carnap.Core.Data.Types
import Carnap.Core.Data.Optics
import Carnap.Core.Data.Classes
import Data.List (intercalate)
-----------------------
-- 1. Propositions --
-----------------------
data IntProp b a where
Prop :: Int -> IntProp b (Form b)
instance Schematizable (IntProp b) where
schematize (Prop n) _
| n >= 26 = (['A' .. 'Z'] !! (n `mod` 26)) : '_' : show (n `div` 26)
| n >= 0 = [['A' .. 'Z'] !! n]
| otherwise = "P_" ++ show n
instance UniformlyEq (IntProp b) where
(Prop n) =* (Prop m) = n == m
instance FirstOrderLex (IntProp b)
data SchematicIntProp b a where
SProp :: Int -> SchematicIntProp b (Form b)
instance Schematizable (SchematicIntProp b) where
schematize (SProp n) _
| n >= 14 && n <= 21 = ["ζφψχθγξ" !! (n - 14)]
| n >= 0 = ("ζφψχθγξ" !! (n `mod` 7)) : '_' : show (n `div` 7)
| otherwise = "φ_" ++ show n
instance UniformlyEq (SchematicIntProp b) where
(SProp n) =* (SProp m) = n == m
instance FirstOrderLex (SchematicIntProp b) where
isVarLex _ = True
instance Evaluable (SchematicIntProp b) where
eval = error "You should not be able to evaluate schemata"
instance Modelable m (SchematicIntProp b) where
satisfies = const eval
---------------------
-- 2. Predicates --
---------------------
data IntPred b c a where
Pred :: Arity (Term c) (Form b) ret -> Int -> IntPred b c ret
instance Schematizable (IntPred b c) where
schematize (Pred a n) xs = pred ++ tail
where arity = read $ show a
args = take arity $ xs ++ repeat "_"
pred
| n >= 26 = (['A' .. 'Z'] !! (n `mod` 26)) : '_' : show (n `div` 26)
| n >= 0 = [['A' .. 'Z'] !! n]
| otherwise = "F_" ++ show n
tail
| arity == 0 = ""
| otherwise = "(" ++ intercalate "," args ++ ")"
instance UniformlyEq (IntPred b c) where
(Pred a n) =* (Pred a' m) = show a == show a' && n == m
instance FirstOrderLex (IntPred b c)
data SchematicIntPred b c a where
SPred :: Arity (Term c) (Form b) ret -> Int -> SchematicIntPred b c ret
instance Schematizable (SchematicIntPred b c) where
schematize (SPred a n) xs = pred ++ tail
where arity = read $ show a
args = take arity $ xs ++ repeat "_"
pred
| n >= 0 && n <= 7 = ["ξθγζϚφψχ" !! n]
| n >= 0 = ("ξθγζϚφψχ" !! (n `mod` 8)) : '_' : show (n `div` 8)
| otherwise = "φ_" ++ show n
tail
| arity == 0 = ""
| otherwise = "(" ++ intercalate "," args ++ ")"
instance UniformlyEq (SchematicIntPred b c) where
(SPred a n) =* (SPred a' m) = show a == show a' && n == m
instance FirstOrderLex (SchematicIntPred b c) where
isVarLex _ = True
instance Evaluable (SchematicIntPred b c) where
eval = error "You should not be able to evaluate schemata"
instance Modelable m (SchematicIntPred b c) where
satisfies = const eval
data TermEq c b a where
TermEq :: TermEq c b (Term b -> Term b -> Form c)
instance Schematizable (TermEq c b) where
schematize TermEq = \(t1:t2:_) -> t1 ++ "=" ++ t2
instance UniformlyEq (TermEq c b) where
_ =* _ = True
instance FirstOrderLex (TermEq c b)
data TermElem c b a where
TermElem :: TermElem c b (Term b -> Term b -> Form c)
instance Schematizable (TermElem c b) where
schematize TermElem = \(t1:t2:_) -> t1 ++ "∈" ++ t2
instance UniformlyEq (TermElem c b) where
_ =* _ = True
instance FirstOrderLex (TermElem c b)
data TermSubset c b a where
TermSubset :: TermSubset c b (Term b -> Term b -> Form c)
instance Schematizable (TermSubset c b) where
schematize TermSubset = \(t1:t2:_) -> t1 ++ "⊆" ++ t2
instance UniformlyEq (TermSubset c b) where
_ =* _ = True
instance FirstOrderLex (TermSubset c b)
---------------------------
-- 3. Function Symbols --
---------------------------
data IntFunc c b a where
Func :: Arity (Term c) (Term b) ret -> Int -> IntFunc b c ret
instance Schematizable (IntFunc b c) where
schematize (Func a n) xs = pred ++ tail
where arity = read $ show a
args = take arity $ xs ++ repeat "_"
pred
| n >= 26 = (['a' .. 'z'] !! (n `mod` 26)) : '_' : show (n `div` 26)
| n >= 0 = [['a' .. 'z'] !! n]
| otherwise = "f_" ++ show n
tail
| arity == 0 = ""
| otherwise = "(" ++ intercalate "," args ++ ")"
instance UniformlyEq (IntFunc b c) where
(Func a n) =* (Func a' m) = show a == show a' && n == m
instance FirstOrderLex (IntFunc b c)
data SchematicIntFunc c b a where
SFunc :: Arity (Term c) (Term b) ret -> Int -> SchematicIntFunc b c ret
instance Schematizable (SchematicIntFunc b c) where
schematize (SFunc a n) xs = pred ++ tail
where arity = read $ show a
args = take arity $ xs ++ repeat "_"
pred
| arity == 0 && n >= 3 = ("τπμ" !! (n `mod` 3)) : '_' : show (n `div` 3)
| arity == 0 && n >= 0 = ["τπμ" !! n]
| n >= 5 && n <= 9 = ["τνυρκ" !! (n - 5)]
| n >= 0 = ("τνυρκ" !! (n `mod` 5)) : '_' : show (n `div` 5)
| otherwise = "τ_" ++ show n
tail
| arity == 0 = ""
| otherwise = "(" ++ intercalate "," args ++ ")"
instance UniformlyEq (SchematicIntFunc b c) where
(SFunc a n) =* (SFunc a' m) = show a == show a' && n == m
instance FirstOrderLex (SchematicIntFunc b c) where
isVarLex _ = True
data ElementarySetOperations b a where
Intersection :: ElementarySetOperations b (Term b -> Term b -> Term b)
Union :: ElementarySetOperations b (Term b -> Term b -> Term b)
RelComplement :: ElementarySetOperations b (Term b -> Term b -> Term b)
Powerset :: ElementarySetOperations b (Term b -> Term b)
instance Schematizable (ElementarySetOperations b) where
schematize Intersection (x:y:_) = "(" ++ x ++ "∩" ++ y ++ ")"
schematize Intersection _ = "∩"
schematize Union (x:y:_) = "(" ++ x ++ "∪" ++ y ++ ")"
schematize Union _ = "∪"
schematize RelComplement (x:y:_) = "(" ++ x ++ "/" ++ y ++ ")"
schematize RelComplement _ = "/"
schematize Powerset (x:_) = "Pow(" ++ x ++ ")"
schematize Powerset _ = "Pow"
instance UniformlyEq (ElementarySetOperations b) where
Intersection =* Intersection = True
Union =* Union = True
RelComplement =* RelComplement = True
Powerset =* Powerset = True
_ =* _ = False
instance FirstOrderLex (ElementarySetOperations b)
----------------------
-- 4. Connectives --
----------------------
data BooleanConn b a where
And :: BooleanConn b (Form b -> Form b -> Form b)
Or :: BooleanConn b (Form b -> Form b -> Form b)
If :: BooleanConn b (Form b -> Form b -> Form b)
Iff :: BooleanConn b (Form b -> Form b -> Form b)
Not :: BooleanConn b (Form b -> Form b)
instance Schematizable (BooleanConn b) where
schematize Iff (x:y:_) = "(" ++ x ++ " ↔ " ++ y ++ ")"
schematize Iff [] = "↔"
schematize If (x:y:_) = "(" ++ x ++ " → " ++ y ++ ")"
schematize If [] = "→"
schematize Or (x:y:_) = "(" ++ x ++ " ∨ " ++ y ++ ")"
schematize Or [] = "∨"
schematize And (x:y:_) = "(" ++ x ++ " ∧ " ++ y ++ ")"
schematize And [] = "∧"
schematize Not (x:_) = "¬" ++ x
schematize Not [] = "¬"
instance UniformlyEq (BooleanConn b) where
And =* And = True
Or =* Or = True
If =* If = True
Iff =* Iff = True
Not =* Not = True
_ =* _ = False
instance FirstOrderLex (BooleanConn b)
data BooleanConst b a where
Verum :: BooleanConst b (Form b)
Falsum :: BooleanConst b (Form b)
instance Schematizable (BooleanConst b) where
schematize Verum _ = "⊤"
schematize Falsum _ = "⊥"
instance UniformlyEq (BooleanConst b) where
Verum =* Verum = True
Falsum =* Falsum = True
_ =* _ = False
instance FirstOrderLex (BooleanConst b)
data Modality b a where
Box :: Modality b (Form b -> Form b)
Diamond :: Modality b (Form b -> Form b)
instance Schematizable (Modality b) where
schematize Box = \(x:_) -> "□" ++ x
schematize Diamond = \(x:_) -> "◇" ++ x
instance UniformlyEq (Modality b) where
Box =* Box = True
Diamond =* Diamond = True
_ =* _ = False
instance FirstOrderLex (Modality b)
data GenericContext b c a where
Context :: Int -> GenericContext b c (Form b -> Form c)
instance Schematizable (GenericContext b c) where
schematize (Context n) (x:_) = "Φ_" ++ show n ++ "(" ++ x ++ ")"
instance UniformlyEq (GenericContext b c) where
(Context n) =* (Context m) = n == m
instance FirstOrderLex (GenericContext b c) where
isVarLex _ = True
type PropositionalContext b = GenericContext b b
----------------
-- 5. Terms --
----------------
data IntConst b a where
Constant :: Int -> IntConst b (Term b)
instance Schematizable (IntConst b) where
schematize (Constant n) _
| n >= 26 = (['a' .. 'z'] !! (n `mod` 26)) : '_' : show (n `div` 26)
| n >= 0 = [['a' .. 'z'] !! n]
| otherwise = "c_" ++ show n
instance UniformlyEq (IntConst b) where
(Constant n) =* (Constant m) = n == m
instance FirstOrderLex (IntConst b)
data StandardVar b a where
Var :: String -> StandardVar b (Term b)
instance Schematizable (StandardVar b) where
schematize (Var s) = const s
instance UniformlyEq (StandardVar b) where
(Var n) =* (Var m) = n == m
-- XXX Note: standard variables are not schematic variables
instance FirstOrderLex (StandardVar b)
data IntIndex b a where
Index :: Int -> IntIndex b (Term b)
instance Schematizable (IntIndex b) where
schematize (Index n) _ = show n
instance UniformlyEq (IntIndex b) where
(Index n) =* (Index m) = n == m
instance FirstOrderLex (IntIndex b)
data PolyVar c b a where
PolyVar :: String -> Arity c b t -> PolyVar c b (Form t)
instance Schematizable (PolyVar c b) where
schematize (PolyVar s a) = const s
instance UniformlyEq (PolyVar c b) where
(PolyVar n a) =* (PolyVar m a') = n == m && show a == show a'
instance FirstOrderLex (PolyVar c b)
----------------------
-- 6. Binders --
----------------------
data RescopingOperator f g b c :: (* -> *) -> * -> * where
Rescope :: String -> RescopingOperator f g b c lang (f b -> (f b -> g c) -> g c)
instance UniformlyEq (RescopingOperator f g b c lang) where
(Rescope _) =* (Rescope _) = True
instance FirstOrderLex (RescopingOperator f g b c lang)
instance Schematizable (RescopingOperator f g b c lang) where
schematize (Rescope v) = \(t:f:_) -> "(" ++ t ++ "/" ++ v ++ ")" ++ f
type ScopedTermOperator = RescopingOperator Term Form
data DefiniteDescription b c a where
DefinDesc :: String -> DefiniteDescription b c ((Term c -> Form b) -> Term c)
instance Schematizable (DefiniteDescription b c) where
schematize (DefinDesc v) = \(x:_) -> "℩" ++ v ++ x
instance UniformlyEq (DefiniteDescription b c) where
(DefinDesc _) =* (DefinDesc _) = True
instance FirstOrderLex (DefiniteDescription b c)
data GenericTypedLambda f g b a where
TypedLambda :: String -> GenericTypedLambda f g b ((f b -> g c) -> g (b -> c))
instance UniformlyEq (GenericTypedLambda f g b) where
(TypedLambda _) =* (TypedLambda _) = True
instance FirstOrderLex (GenericTypedLambda f g b)
instance Schematizable (GenericTypedLambda f g b) where
schematize (TypedLambda v) = \(x:_) -> if last x == ']' then "λ" ++ v ++ x
else "λ" ++ v ++ "[" ++ x ++ "]"
type SOLambda = GenericTypedLambda Term Form Int
data GenericQuant f g b c a where
All :: String -> GenericQuant f g b c ((f c -> g b) -> g b)
Some :: String -> GenericQuant f g b c ((f c -> g b) -> g b)
type StandardQuant = GenericQuant Term Form
instance Schematizable (GenericQuant f g b c) where
schematize (All v) (x:_) = "∀" ++ v ++ x
schematize (All v) [] = "∀" ++ v
schematize (Some v) (x:_) = "∃" ++ v ++ x
schematize (Some v) [] = "∃" ++ v
instance UniformlyEq (GenericQuant f g b c) where
(All _) =* (All _) = True
(Some _) =* (Some _) = True
_ =* _ = False
instance FirstOrderLex (GenericQuant f g b c)
data QuantifiedContext b c :: (* -> *) -> * -> * where
QuantContext :: Int -> Arity (Term c) (Form b) ret -> QuantifiedContext b c lang (ret -> Form b)
instance Schematizable (QuantifiedContext b c lang) where
schematize (QuantContext n a) (x:_) = "Ψ^" ++ show a ++ "_" ++ show n ++ "(" ++ x ++ ")"
schematize (QuantContext n a) [] = "Ψ^" ++ show a ++ "_" ++ show n
instance UniformlyEq (QuantifiedContext b c lang) where
(QuantContext n a) =* (QuantContext m a') = n == m && show a == show a'
instance ReLex (QuantifiedContext b c) where
relex (QuantContext n a) = QuantContext n a
instance FirstOrderLex (QuantifiedContext b c lang) where
isVarLex _ = True
------------------
-- 7. Exotica --
------------------
data Indexer a b c :: (* -> *) -> * -> * where
AtIndex :: Indexer a b c lang (Form b -> Term a -> Form c)
instance FirstOrderLex (Indexer a b c lang)
instance UniformlyEq (Indexer a b c lang) where
AtIndex =* AtIndex = True
instance Schematizable (Indexer a b c lang) where
schematize AtIndex = \(x:y:_) -> "(" ++ x ++ "/" ++ y ++ ")"
instance ReLex (Indexer a b c) where
relex AtIndex = AtIndex
data Cons b a where
Cons :: Cons b (Term b -> Term b -> Term b)
instance Schematizable (Cons b) where
schematize Cons = \(x:y:_) -> x ++ "-" ++ y
instance FirstOrderLex (Cons b)
instance UniformlyEq (Cons b) where
Cons =* Cons = True
data Accessor c b a where
Accesses :: Accessor c b (Term b -> Term b -> Form c)
instance Schematizable (Accessor c b) where
schematize Accesses = \(t1:t2:_) -> t1 ++ "≺" ++ t2
instance UniformlyEq (Accessor c b) where
_ =* _ = True
instance FirstOrderLex (Accessor c b)
data Separation b c :: (* -> *) -> * -> * where
Separation :: String -> Separation b c lang (Term b -> (Term b -> Form c) -> Term b)
instance Schematizable (Separation b c lang) where
schematize (Separation v) (t:f:xs) = concat ["{",v,"∈",t,"|", f,"}"]
-- XXX Quick and dirty fix for display issue. Should
-- actually make this dependent on the presence of
-- some kind of variable constructor in the language
schematize (Separation v) _ = "{|}"
instance UniformlyEq (Separation b c lang) where
_ =* _ = True
instance FirstOrderLex (Separation b c lang)
instance ReLex (Separation b c) where
relex (Separation v) = (Separation v)
| gleachkr/Carnap | Carnap/src/Carnap/Languages/Util/GenericConstructors.hs | gpl-3.0 | 16,035 | 0 | 14 | 4,924 | 6,232 | 3,176 | 3,056 | 314 | 0 |
{-# LANGUAGE EmptyDataDecls,
MultiParamTypeClasses,
GADTs,
RankNTypes,
FlexibleInstances #-}
----------------
---------- Old, deprecated Csound interface!
---------- See CsoundExp.hs for better interface (incl. realtime support)
----------------
module Csound where
import Music (Note3, AbstractNote(..), AbstractPitch3(..), AbstractInt3(..), AbstractDur3(..), absolute, AbstractPhrase(..), Note(..), Music(..), explodeVoices)
import Util (log2)
import System.Process
import GHC.IO.Exception
-- import Codec.Midi
-- type MidiPitch = Double
-- freqToMidi :: Freq -> MidiPitch
-- freqToMidi f = 69 + 12*(log2 (f / 440))
-- midiToFreq :: MidiPitch -> Freq
-- midiToFreq d = (440*) $ 2 ** ((d - 69)/12)
csoundHeader = "f1 0 8192 10 1 .02 .01\n\nt 0 30"
testFreqList [] _ = ""
testFreqList (f:fs) n = "i1 " ++ (show n) ++ " 1 200 " ++ f ++ "\n\n"
++ (testFreqList fs (n+1))
csoundFreqs freqs = csoundHeader ++ "\n"
++ (testFreqList (map (\(AbstractPitch3 f) -> show f) freqs) 0)
csoundLine :: Double -> Double -> Double -> String
csoundLine time dur pitch = "i1 " ++ (show time) ++ " " ++ (show dur) ++ " 200 " ++ (show pitch) ++ "\n\n"
phraseToList :: (Note p i d) => AbstractPhrase (AbstractNote p i d) -> [AbstractNote p i d]
phraseToList (AbstractPhrase ns) = ns
csound :: AbstractPhrase Note3 -> String
csound notes = csoundHeader ++ "\n" ++ csoundPhrase 0 notes'
-- where notes' = phraseToList $ absolute notes -- absolute is currently buggy
where notes' = phraseToList notes
csounds :: [AbstractPhrase Note3] -> String
csounds notes = csoundHeader ++ "\n" ++ foldl (++) "" (map (csoundPhrase 0) notes')
-- where notes' = map (phraseToList . absolute) notes -- absolute is currently buggy
where notes' = map phraseToList notes
csoundPhrase _ [] = ""
csoundPhrase n ((AbstractPitch (AbstractPitch3 p) (AbstractDur3 d)):notes) = (csoundLine n (d/1000) p) ++ (csoundPhrase (n + (d/1000)) notes)
csoundPhrase n ((Rest (AbstractDur3 d)):notes) = (csoundLine n (d/1000) 0) ++ (csoundPhrase (n + (d/1000)) notes)
csoundPhrase n (d:notes) = csoundPhrase n notes
testFreqs freqs = createProcess (proc "tones" (map (\(AbstractPitch3 f) -> show f) freqs))
playCsound notes = do writeFile "./csound-test.sco" $ csound notes;
readProcess "csound" ["-d", "csound-test/organ.orc", "csound-test.sco"] "";
system "mplayer -volume 48 test.wav"
playCsounds :: Music Note3 -> IO ExitCode
playCsounds (Start phrase) = playCsounds $ explodeVoices (Start phrase)
playCsounds (Voices phrases) = do writeFile "./csound-test.sco" $ csounds phrases;
readProcess "csound" ["-d", "csound-test/organ.orc", "csound-test.sco"] "";
system "mplayer test.wav"
| ejlilley/AbstractMusic | Csound.hs | gpl-3.0 | 2,900 | 0 | 13 | 659 | 836 | 445 | 391 | 39 | 1 |
-- Utility functions specific to Aura
{-
Copyright 2012, 2013, 2014 Colin Woodbury <colingw@gmail.com>
This file is part of Aura.
Aura is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Aura is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Aura. If not, see <http://www.gnu.org/licenses/>.
-}
module Aura.Utils where
import System.IO.Temp (withTempDirectory)
import Text.Regex.PCRE ((=~))
import System.IO (stdout, hFlush)
import Data.List (sortBy,intercalate)
import Aura.Colour.Text
import Aura.Languages (Language,whitespace)
import Aura.Monad.Aura
import Aura.Settings.Base
import Aura.Utils.Numbers
import Utilities (inDir,postPad)
import Shell (pwd)
---
----------------
-- CUSTOM OUTPUT
----------------
putStrLnA :: Colouror -> String -> Aura ()
putStrLnA colour s = putStrA colour $ s ++ "\n"
putStrLnA' :: Colouror -> String -> String
putStrLnA' colour s = putStrA' colour s ++ "\n"
-- Added `hFlush` here because some output appears to lag sometimes.
putStrA :: Colouror -> String -> Aura ()
putStrA colour = liftIO . putStr . putStrA' colour
--putStrA colour s = liftIO (putStr (putStrA' colour s) >> hFlush stdout)
putStrA' :: Colouror -> String -> String
putStrA' colour s = "aura >>= " ++ colour s
printList :: Colouror -> Colouror -> String -> [String] -> Aura ()
printList _ _ _ [] = return ()
printList tc ic msg items = liftIO . putStrLn . printList' tc ic msg $ items
printList' :: Colouror -> Colouror -> String -> [String] -> String
printList' tc ic m is = putStrLnA' tc m ++ colouredItems
where colouredItems = is >>= \i -> ic i ++ "\n"
scoldAndFail :: (Language -> String) -> Aura a
scoldAndFail msg = asks langOf >>= failure . putStrA' red . msg
----------
-- PROMPTS
----------
-- Takes a prompt message and a regex of valid answer patterns.
yesNoPrompt :: (Language -> String) -> Aura Bool
yesNoPrompt msg = asks langOf >>= \lang -> do
putStrA yellow $ msg lang ++ " [Y/n] "
liftIO $ hFlush stdout
response <- liftIO getLine
return $ response =~ "y|Y|\\B"
-- | Doesn't prompt when `--noconfirm` is used.
optionalPrompt :: (Language -> String) -> Aura Bool
optionalPrompt msg = ask >>= check
where check ss | mustConfirm ss = yesNoPrompt msg
| otherwise = return True
-------
-- MISC
-------
withTempDir :: FilePath -> Aura a -> Aura a
withTempDir name action = ask >>= \ss -> do
curr <- liftIO pwd
let inTemp = withTempDirectory curr name
result <- liftIO $ inTemp (\dir -> inDir dir (runAura action ss))
wrap result
splitNameAndVer :: String -> (String,String)
splitNameAndVer pkg = (before,after)
where (before,_,after) = pkg =~ "[<>=]+" :: (String,String,String)
splitName :: String -> String
splitName = fst . splitNameAndVer
splitVer :: String -> String
splitVer = snd . splitNameAndVer
groupPkgs :: [([a],[b],[c])] -> ([a],[b],[c])
groupPkgs = foldl groupPkgs' ([],[],[])
groupPkgs' :: ([a],[b],[c]) -> ([a],[b],[c]) -> ([a],[b],[c])
groupPkgs' (ps,as,os) (p,a,o) = (p ++ ps, a ++ as, o ++ os)
sortPkgs :: [String] -> [String]
sortPkgs = sortBy verNums
where verNums a b | name a /= name b = compare a b -- Different pkgs
| otherwise = compare (ver a) (ver b)
name = fst . pkgFileNameAndVer
ver = snd . pkgFileNameAndVer
-- Test on:
-- linux-3.2.14-1-x86_64.pkg.tar.xz
-- wine-1.4rc6-1-x86_64.pkg.tar.xz
-- ruby-1.9.3_p125-4-x86_64.pkg.tar.xz
-- NOTE: regex stuff is a little sloppy here.
pkgFileNameAndVer :: String -> (String, Maybe Version)
pkgFileNameAndVer p = (name,verNum')
where (name,_,_) = p =~ "-[0-9]+" :: (String,String,String)
verNum = p =~ ("[0-9][-0-9a-z._]+-" ++ archs) :: String
archs = "(a|x|i)" -- Representing "(any|x86_64|i686)"
verNum' = version verNum
-- Format two lists into two nice rows a la `-Qi` or `-Si`.
entrify :: Settings -> [String] -> [String] -> String
entrify ss fs es = intercalate "\n" fsEs
where fsEs = zipWith combine fs' es
fs' = padding ss fs
combine f e = f ++ " : " ++ e
-- Right-pads strings according to the longest string in the group.
padding :: Settings -> [String] -> [String]
padding ss fs = map (\x -> postPad x ws longest) fs
where ws = whitespace $ langOf ss
longest = maximum $ map length fs
| joehillen/aura | src/Aura/Utils.hs | gpl-3.0 | 4,801 | 0 | 16 | 1,032 | 1,371 | 743 | 628 | 76 | 1 |
module Handler.LibraryItemToggleStatus where
import Import
-- | Toggle the 'inProgress' attribute of a 'LibraryItem'.
getLibraryItemToggleStatusR :: LibraryItemId -> Handler Html
getLibraryItemToggleStatusR libraryItemId = do
item <- runDB $ get404 libraryItemId
toggleInProgressStatus $ Entity libraryItemId item
redirect LibraryR
| prikhi/MyBookList | Handler/LibraryItemToggleStatus.hs | gpl-3.0 | 357 | 0 | 9 | 62 | 62 | 30 | 32 | 7 | 1 |
-- | These tools are useful when you actually want to *implement* a grammar in
-- Haskell. They mostly provide "lifting" functions from the single- to the
-- multi-tape case.
--
-- NOTE Consider copying this code, if you extracted stand-alone code, as the
-- grammar products library pulls in many additional dependencies.
module BioInf.GrammarProducts.Tools where
| choener/GrammarProducts | old-BioInf/GrammarProducts/Tools.hs | gpl-3.0 | 369 | 0 | 3 | 59 | 13 | 11 | 2 | 1 | 0 |
{-|
Module : Emyrald
Description : The Emyrald Language v0.1.0.0
Copyright : © 2015 Dathan Ault-McCoy
License : GPL-3
Stability : experimental
Portability : POSIX
Emyrald is a lightweight, functional, symbolic, and homoiconic
programming language inspired by Scheme and Haskell. Its goal
is to create the first completely functional homoiconic language.
This library defines a set of data types and functions for the
parsing and evaluation of Emyrald expressions and programs.It has
been divided into five sub-modules:
* Error: defines the 'Error' data type for errors in evaluation
* Expr: defines the 'Expr' data type for representing Emyrald
data types in Haskell, as well as functions for working with it.
* Eval: provides functions for evaluating 'Expr's
* Builtins: defines all Emyrald builtin functions, exported as
'baseEnv'.
* Parse: provides functions for parsing a string into 'Expr's.
=Background
Not long after I first began to program, I was introduced to the
family of languages collectively known as Scheme. I was immediately
taken by its elegance and simplicity. I was fascinated by its
functionality and homoiconicity in particular. It quickly became
my favorite language and I devoted much of my time spent programming
to exploring it. Several years later I was introduced to Haskell.
Once again, I was taken. I became mesmerized by curried functions
and monads, and the inticacies of Haskell's typing system. Slowly
but surely, I began to shift my focus from Scheme to this new and
exciting language.
Between my introduction to Haskell and severa articles that I ran
accros online, I began to see some of Scheme's faults. Obvious
issues, like the ease with which you can evaluate an expression
"too much" or "too little", and subtle things, like how numbers
and strings are self evaluating but lists aren't. I also began to
find myself missing many of the features I had grown used to in
Haskell, such as partial application, function composition, and
monads. What was really the nail in the coffin, though, was that
Scheme isn't /actually/ a functional language; its functions can
have side effects.
But even as a drifted away from Scheme and towards Haskell, through
every function I composed and monad I bound, there was this constant
nagging feeling in the back of my mind. After Scheme's macros and
dynamic types, Haskell's statements felt messy and its complicated
typing system clumsy and unnecesary. And, it wasn't homoiconic. For
all Haskell's elegance, it lacked Scheme's simplicity and symmetry
that was my first love.
I was conflicted, badly so. I just didn't know what to do. Should I
go for Haskell's elegance and brevity? Or should I stick to Scheme's
simplicity and symmetry? Then I slowly began to wonder, why should
I have to choose? Why not both? Is there some physical law that
prevents a unity of the two? No, there isn't! And thus began my
quest to create the perfect language, an idealogical unity of Scheme
and Haskell. This is the quest that has led me to create Emyrald.
Although it is an early attempt, I hope it paves the way for future,
more succesful attempts.
=Philosophy
Emyrald is an attempt to combin the best of Scheme (homoiconicity,
syntactic simplicity, and a simple type system) with the best of
Haskell (brevity, purely-functional curried functions, lazy
evaluation and computational contexts). This means no statements,
strict typing or complicated "syntactic sugar".
=Types
Emyrald has only three types: integers, symbols and functions. All
other "types" are implemented using structs which are, in turn,
implemented using self-evaluating functions.
===Integers
Integers are arbitrary precision but are otherwise as you would find
them in any other language. They are analagous to Scheme's @Bignum@
or Haskell's @Integer@.
===Symbols
Symbols in Emyrald are similar to symbols in LISP or Scheme. They are
effectively strings with different evaluation rules. Unlike in Scheme,
the @quote@ function has been built into symbols. Where @''foo@ would
parses to @(quote (quote foo))@ in Scheme, it parses to @Sym $ Sym $
SBase "foo"@ in Emyrald. In otherwords, symbols in Emyrald effectively
take an argument, which can either be another symbol, or an "SBase". A
symbol which has an "SBase" as its argument is a "bottom-level" symbol,
analagous to an unquoted symbol in Scheme. Evaluating @'foo@ yields
@foo@, and evaluating @foo@ returns whatever expression is bound to
@foo@. As such, symbols are used as variables in Emyrald.
===Functions
Functions in Emyrald are curried, so they take only one argument. A
function, @f@ has two parts, a symbol, @a@ and an expression, @e@. When
@f@ is applied to argument @x@, @x@ is bound to @a@ within the scope of
@f@ and @e@ is evaluated. There are convenience functions for constructing
functions of multiple arguments.
===Lists
Lists are implemented using the builtin, two argument, self-evaluating
@cons@ function. It acts similar to the @(:)@ constructor in Haskell.
Emyrald supports all of the regular list manipulation functions.
===Characters and Strings
Unlike in most languages, Emyrald has no concept of characters or strings.
Characters are instead represented as their ascii (or unicode) code and
strings are lists of characters. Whether a number represents a number or
a character is inferred from the function that it is being passed to.
===Floats
Floating point numbers are implemented using structs. The builtin function
@float@, like @cons@, takes to arguments and is self evaluating. To make
operations of floats more efficient, they are converted into Haskell's
@Double@ for arithmetic operations.
=Syntax
Emyralds syntax is very simple and completely whitespace insensitive.
===Literals
The syntaxes for literals are:
* Integers: @42@, @-7@
* Symbols: @foo@, @'bar@, @''baz@, etc.
Emyrald also specifies convenience syntaxes for chars, strings, lists
and floats:
* Characters: @&c@, @&"@, @&\\newline@, @&\\space@
* Strings: @"Hello World!"@
* Lists: @(7, "foo", 'symbol)@, @()@
* Floats: @3.7@, @-2.0@
==Function Application
Functions are applied by @f[x]@. Function application is left associative,
and since Emyrald functions are curried, @f[x][y][z]@ applies multi argument
functions. For convenience, @f[x][y][z]@ can be written @f[x,y,z]@.
-}
module Emyrald
(
Error,
Expr (..),
Env,
display,
symBase,
nSymBase,
evalExpr,
{-evalProg,-}
baseEnv,
parseExpr,
parseProg
) where
import Emyrald.Error
import Emyrald.Expr
import Emyrald.Eval
import Emyrald.Builtins
import Emyrald.Parse
| Kwarrtz/Emyrald | src/Emyrald.hs | gpl-3.0 | 6,688 | 0 | 5 | 1,194 | 67 | 44 | 23 | 17 | 0 |
module Lib
( runTargets
) where
import System.IO
import System.Exit (die)
import Network.SSH.Client.LibSSH2
import Network.SSH.Client.LibSSH2.Foreign (initialize, exit)
import Control.Concurrent.ParallelIO.Global
import System.Log.Logger (infoM, Priority (INFO, DEBUG), setLevel, updateGlobalLogger, addHandler, rootLoggerName, removeHandler)
import System.Log.Handler.Simple (fileHandler, streamHandler)
import System.Log.Handler (setFormatter)
import System.Log.Formatter (simpleLogFormatter)
import Control.Exception (catch, SomeException)
-- Target = host command
data Target = Target {
targetAddress :: String,
targetCommand :: String
} deriving (Show)
getPassword :: String -> IO String
getPassword prompt = do
putStr prompt
hFlush stdout
oldEchoState <- hGetEcho stdin
hSetEcho stdin False
pass <- getLine
hSetEcho stdin oldEchoState
putStrLn ""
return pass
getLineWithPrompt :: String -> IO String
getLineWithPrompt prompt = do
putStr prompt
hFlush stdout
getLine
readTargetLine :: String -> IO Target
readTargetLine line = return $ Target host (drop (length host) line)
where
host = head $ words line
loadTargetsFrom :: FilePath -> IO [Target]
loadTargetsFrom path = do
content <- readFile path
mapM readTargetLine (lines content)
openEnvironment :: IO()
openEnvironment = do
initialize True
updateGlobalLogger rootLoggerName removeHandler
h' <- fileHandler "clustercute.log" DEBUG
let h = setFormatter h' (simpleLogFormatter "$time $loggername: $msg")
updateGlobalLogger "out" (addHandler h . setLevel INFO)
updateGlobalLogger "exit" (addHandler h . setLevel INFO)
ph' <- streamHandler stdout DEBUG
let ph = setFormatter ph' (simpleLogFormatter "$time $msg")
updateGlobalLogger "progress" (addHandler ph . setLevel INFO)
closeEnvironment :: IO()
closeEnvironment = do
exit
stopGlobalPool
runOnTargets :: String -> String -> [Target] -> IO ()
runOnTargets username password targets = do
openEnvironment
parallel_ $ map (executeOnTarget username password) targets
closeEnvironment
executeOnTarget :: String -> String -> Target -> IO ()
executeOnTarget username password target = do
infoM "progress" ("start " ++ host)
catch remoteOperation (\e -> infoM "exit" (host ++ " failed " ++ show (e :: SomeException)))
infoM "progress" ("finish " ++ host)
return ()
where
host = targetAddress target
remoteOperation = do
(status, outputs) <- withSSH2User "/dev/null" username password host 22 (\session -> execCommands session [targetCommand target])
mapM_ (\output -> infoM "out" (host ++ ": " ++ show output)) outputs
infoM "exit" (host ++ " with " ++ show status)
runTargets :: String -> IO ()
runTargets targetFilePath = do
username <- getLineWithPrompt "Username: "
password <- getPassword "Password (first time): "
passwordVerify <- getPassword "Password (second time): "
if password == passwordVerify
then do
targets <- loadTargetsFrom targetFilePath
runOnTargets username password targets
else die "Different passwords given, exiting"
| bneijt/clustercute | src/Lib.hs | gpl-3.0 | 3,209 | 0 | 15 | 650 | 944 | 467 | 477 | 79 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.StorageGateway.Types
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.StorageGateway.Types
(
-- * Service Configuration
storageGateway
-- * Errors
, _InvalidGatewayRequestException
, _InternalServerError
-- * CachediSCSIVolume
, CachediSCSIVolume
, cachediSCSIVolume
, cscsivVolumeiSCSIAttributes
, cscsivVolumeStatus
, cscsivSourceSnapshotId
, cscsivVolumeARN
, cscsivVolumeProgress
, cscsivVolumeSizeInBytes
, cscsivVolumeId
, cscsivVolumeType
-- * ChapInfo
, ChapInfo
, chapInfo
, ciTargetARN
, ciSecretToAuthenticateInitiator
, ciInitiatorName
, ciSecretToAuthenticateTarget
-- * DeviceiSCSIAttributes
, DeviceiSCSIAttributes
, deviceiSCSIAttributes
, dscsiaTargetARN
, dscsiaChapEnabled
, dscsiaNetworkInterfaceId
, dscsiaNetworkInterfacePort
-- * Disk
, Disk
, disk
, dDiskAllocationResource
, dDiskAllocationType
, dDiskNode
, dDiskPath
, dDiskSizeInBytes
, dDiskStatus
, dDiskId
-- * GatewayInfo
, GatewayInfo
, gatewayInfo
, giGatewayARN
, giGatewayOperationalState
, giGatewayType
-- * NetworkInterface
, NetworkInterface
, networkInterface
, niIPv6Address
, niMACAddress
, niIPv4Address
-- * StorediSCSIVolume
, StorediSCSIVolume
, storediSCSIVolume
, sscsivVolumeiSCSIAttributes
, sscsivVolumeStatus
, sscsivSourceSnapshotId
, sscsivPreservedExistingData
, sscsivVolumeARN
, sscsivVolumeProgress
, sscsivVolumeSizeInBytes
, sscsivVolumeId
, sscsivVolumeDiskId
, sscsivVolumeType
-- * Tape
, Tape
, tape
, tTapeBarcode
, tTapeStatus
, tTapeARN
, tProgress
, tTapeSizeInBytes
, tVTLDevice
-- * TapeArchive
, TapeArchive
, tapeArchive
, taTapeBarcode
, taTapeStatus
, taTapeARN
, taTapeSizeInBytes
, taCompletionTime
, taRetrievedTo
-- * TapeRecoveryPointInfo
, TapeRecoveryPointInfo
, tapeRecoveryPointInfo
, trpiTapeStatus
, trpiTapeRecoveryPointTime
, trpiTapeARN
, trpiTapeSizeInBytes
-- * VTLDevice
, VTLDevice
, vTLDevice
, vtldDeviceiSCSIAttributes
, vtldVTLDeviceVendor
, vtldVTLDeviceARN
, vtldVTLDeviceType
, vtldVTLDeviceProductIdentifier
-- * VolumeInfo
, VolumeInfo
, volumeInfo
, viVolumeARN
, viVolumeType
-- * VolumeRecoveryPointInfo
, VolumeRecoveryPointInfo
, volumeRecoveryPointInfo
, vrpiVolumeRecoveryPointTime
, vrpiVolumeARN
, vrpiVolumeSizeInBytes
, vrpiVolumeUsageInBytes
-- * VolumeiSCSIAttributes
, VolumeiSCSIAttributes
, volumeiSCSIAttributes
, vscsiaLunNumber
, vscsiaTargetARN
, vscsiaChapEnabled
, vscsiaNetworkInterfaceId
, vscsiaNetworkInterfacePort
) where
import Network.AWS.Prelude
import Network.AWS.Sign.V4
import Network.AWS.StorageGateway.Types.Product
import Network.AWS.StorageGateway.Types.Sum
-- | API version '2013-06-30' of the Amazon Storage Gateway SDK configuration.
storageGateway :: Service
storageGateway =
Service
{ _svcAbbrev = "StorageGateway"
, _svcSigner = v4
, _svcPrefix = "storagegateway"
, _svcVersion = "2013-06-30"
, _svcEndpoint = defaultEndpoint storageGateway
, _svcTimeout = Just 70
, _svcCheck = statusSuccess
, _svcError = parseJSONError
, _svcRetry = retry
}
where
retry =
Exponential
{ _retryBase = 5.0e-2
, _retryGrowth = 2
, _retryAttempts = 5
, _retryCheck = check
}
check e
| has (hasCode "ThrottlingException" . hasStatus 400) e =
Just "throttling_exception"
| has (hasCode "Throttling" . hasStatus 400) e = Just "throttling"
| has (hasStatus 503) e = Just "service_unavailable"
| has (hasStatus 500) e = Just "general_server_error"
| has (hasStatus 509) e = Just "limit_exceeded"
| otherwise = Nothing
-- | An exception occurred because an invalid gateway request was issued to
-- the service. See the error and message fields for more information.
_InvalidGatewayRequestException :: AsError a => Getting (First ServiceError) a ServiceError
_InvalidGatewayRequestException =
_ServiceError . hasStatus 400 . hasCode "InvalidGatewayRequestException"
-- | An internal server error has occurred during the request. See the error
-- and message fields for more information.
_InternalServerError :: AsError a => Getting (First ServiceError) a ServiceError
_InternalServerError =
_ServiceError . hasStatus 500 . hasCode "InternalServerError"
| fmapfmapfmap/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/Types.hs | mpl-2.0 | 5,058 | 0 | 13 | 1,244 | 712 | 433 | 279 | 141 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.FusionTables.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.FusionTables.Types.Product where
import Network.Google.FusionTables.Types.Sum
import Network.Google.Prelude
-- | Represents a list of columns in a table.
--
-- /See:/ 'columnList' smart constructor.
data ColumnList =
ColumnList'
{ _clTotalItems :: !(Maybe (Textual Int32))
, _clNextPageToken :: !(Maybe Text)
, _clKind :: !Text
, _clItems :: !(Maybe [Column])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ColumnList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'clTotalItems'
--
-- * 'clNextPageToken'
--
-- * 'clKind'
--
-- * 'clItems'
columnList
:: ColumnList
columnList =
ColumnList'
{ _clTotalItems = Nothing
, _clNextPageToken = Nothing
, _clKind = "fusiontables#columnList"
, _clItems = Nothing
}
-- | Total number of columns for the table.
clTotalItems :: Lens' ColumnList (Maybe Int32)
clTotalItems
= lens _clTotalItems (\ s a -> s{_clTotalItems = a})
. mapping _Coerce
-- | Token used to access the next page of this result. No token is displayed
-- if there are no more pages left.
clNextPageToken :: Lens' ColumnList (Maybe Text)
clNextPageToken
= lens _clNextPageToken
(\ s a -> s{_clNextPageToken = a})
-- | The kind of item this is. For a column list, this is always
-- fusiontables#columnList.
clKind :: Lens' ColumnList Text
clKind = lens _clKind (\ s a -> s{_clKind = a})
-- | List of all requested columns.
clItems :: Lens' ColumnList [Column]
clItems
= lens _clItems (\ s a -> s{_clItems = a}) . _Default
. _Coerce
instance FromJSON ColumnList where
parseJSON
= withObject "ColumnList"
(\ o ->
ColumnList' <$>
(o .:? "totalItems") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "fusiontables#columnList")
<*> (o .:? "items" .!= mempty))
instance ToJSON ColumnList where
toJSON ColumnList'{..}
= object
(catMaybes
[("totalItems" .=) <$> _clTotalItems,
("nextPageToken" .=) <$> _clNextPageToken,
Just ("kind" .= _clKind), ("items" .=) <$> _clItems])
-- | Represents a list of tables.
--
-- /See:/ 'tableList' smart constructor.
data TableList =
TableList'
{ _tlNextPageToken :: !(Maybe Text)
, _tlKind :: !Text
, _tlItems :: !(Maybe [Table])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TableList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tlNextPageToken'
--
-- * 'tlKind'
--
-- * 'tlItems'
tableList
:: TableList
tableList =
TableList'
{ _tlNextPageToken = Nothing
, _tlKind = "fusiontables#tableList"
, _tlItems = Nothing
}
-- | Token used to access the next page of this result. No token is displayed
-- if there are no more pages left.
tlNextPageToken :: Lens' TableList (Maybe Text)
tlNextPageToken
= lens _tlNextPageToken
(\ s a -> s{_tlNextPageToken = a})
-- | The kind of item this is. For table list, this is always
-- fusiontables#tableList.
tlKind :: Lens' TableList Text
tlKind = lens _tlKind (\ s a -> s{_tlKind = a})
-- | List of all requested tables.
tlItems :: Lens' TableList [Table]
tlItems
= lens _tlItems (\ s a -> s{_tlItems = a}) . _Default
. _Coerce
instance FromJSON TableList where
parseJSON
= withObject "TableList"
(\ o ->
TableList' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "fusiontables#tableList")
<*> (o .:? "items" .!= mempty))
instance ToJSON TableList where
toJSON TableList'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _tlNextPageToken,
Just ("kind" .= _tlKind), ("items" .=) <$> _tlItems])
-- | Represents a StyleFunction within a StyleSetting
--
-- /See:/ 'styleFunction' smart constructor.
data StyleFunction =
StyleFunction'
{ _sfBuckets :: !(Maybe [Bucket])
, _sfKind :: !(Maybe Text)
, _sfGradient :: !(Maybe StyleFunctionGradient)
, _sfColumnName :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'StyleFunction' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sfBuckets'
--
-- * 'sfKind'
--
-- * 'sfGradient'
--
-- * 'sfColumnName'
styleFunction
:: StyleFunction
styleFunction =
StyleFunction'
{ _sfBuckets = Nothing
, _sfKind = Nothing
, _sfGradient = Nothing
, _sfColumnName = Nothing
}
-- | Bucket function that assigns a style based on the range a column value
-- falls into.
sfBuckets :: Lens' StyleFunction [Bucket]
sfBuckets
= lens _sfBuckets (\ s a -> s{_sfBuckets = a}) .
_Default
. _Coerce
-- | Stylers can be one of three kinds: \"fusiontables#fromColumn if the
-- column value is to be used as is, i.e., the column values can have
-- colors in #RRGGBBAA format or integer line widths or icon names;
-- fusiontables#gradient if the styling of the row is to be based on
-- applying the gradient function on the column value; or
-- fusiontables#buckets if the styling is to based on the bucket into which
-- the the column value falls.
sfKind :: Lens' StyleFunction (Maybe Text)
sfKind = lens _sfKind (\ s a -> s{_sfKind = a})
-- | Gradient function that interpolates a range of colors based on column
-- value.
sfGradient :: Lens' StyleFunction (Maybe StyleFunctionGradient)
sfGradient
= lens _sfGradient (\ s a -> s{_sfGradient = a})
-- | Name of the column whose value is used in the style.
sfColumnName :: Lens' StyleFunction (Maybe Text)
sfColumnName
= lens _sfColumnName (\ s a -> s{_sfColumnName = a})
instance FromJSON StyleFunction where
parseJSON
= withObject "StyleFunction"
(\ o ->
StyleFunction' <$>
(o .:? "buckets" .!= mempty) <*> (o .:? "kind") <*>
(o .:? "gradient")
<*> (o .:? "columnName"))
instance ToJSON StyleFunction where
toJSON StyleFunction'{..}
= object
(catMaybes
[("buckets" .=) <$> _sfBuckets,
("kind" .=) <$> _sfKind,
("gradient" .=) <$> _sfGradient,
("columnName" .=) <$> _sfColumnName])
-- | Identifier of the base column. If present, this column is derived from
-- the specified base column.
--
-- /See:/ 'columnBaseColumn' smart constructor.
data ColumnBaseColumn =
ColumnBaseColumn'
{ _cbcTableIndex :: !(Maybe (Textual Int32))
, _cbcColumnId :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ColumnBaseColumn' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cbcTableIndex'
--
-- * 'cbcColumnId'
columnBaseColumn
:: ColumnBaseColumn
columnBaseColumn =
ColumnBaseColumn' {_cbcTableIndex = Nothing, _cbcColumnId = Nothing}
-- | Offset to the entry in the list of base tables in the table definition.
cbcTableIndex :: Lens' ColumnBaseColumn (Maybe Int32)
cbcTableIndex
= lens _cbcTableIndex
(\ s a -> s{_cbcTableIndex = a})
. mapping _Coerce
-- | The id of the column in the base table from which this column is
-- derived.
cbcColumnId :: Lens' ColumnBaseColumn (Maybe Int32)
cbcColumnId
= lens _cbcColumnId (\ s a -> s{_cbcColumnId = a}) .
mapping _Coerce
instance FromJSON ColumnBaseColumn where
parseJSON
= withObject "ColumnBaseColumn"
(\ o ->
ColumnBaseColumn' <$>
(o .:? "tableIndex") <*> (o .:? "columnId"))
instance ToJSON ColumnBaseColumn where
toJSON ColumnBaseColumn'{..}
= object
(catMaybes
[("tableIndex" .=) <$> _cbcTableIndex,
("columnId" .=) <$> _cbcColumnId])
-- | Represents a response to a SQL statement.
--
-- /See:/ 'sQLresponse' smart constructor.
data SQLresponse =
SQLresponse'
{ _sqlKind :: !Text
, _sqlRows :: !(Maybe [[JSONValue]])
, _sqlColumns :: !(Maybe [Text])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SQLresponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sqlKind'
--
-- * 'sqlRows'
--
-- * 'sqlColumns'
sQLresponse
:: SQLresponse
sQLresponse =
SQLresponse'
{ _sqlKind = "fusiontables#sqlresponse"
, _sqlRows = Nothing
, _sqlColumns = Nothing
}
-- | The kind of item this is. For responses to SQL queries, this is always
-- fusiontables#sqlresponse.
sqlKind :: Lens' SQLresponse Text
sqlKind = lens _sqlKind (\ s a -> s{_sqlKind = a})
-- | The rows in the table. For each cell we print out whatever cell value
-- (e.g., numeric, string) exists. Thus it is important that each cell
-- contains only one value.
sqlRows :: Lens' SQLresponse [[JSONValue]]
sqlRows
= lens _sqlRows (\ s a -> s{_sqlRows = a}) . _Default
. _Coerce
-- | Columns in the table.
sqlColumns :: Lens' SQLresponse [Text]
sqlColumns
= lens _sqlColumns (\ s a -> s{_sqlColumns = a}) .
_Default
. _Coerce
instance FromJSON SQLresponse where
parseJSON
= withObject "SQLresponse"
(\ o ->
SQLresponse' <$>
(o .:? "kind" .!= "fusiontables#sqlresponse") <*>
(o .:? "rows" .!= mempty)
<*> (o .:? "columns" .!= mempty))
instance ToJSON SQLresponse where
toJSON SQLresponse'{..}
= object
(catMaybes
[Just ("kind" .= _sqlKind), ("rows" .=) <$> _sqlRows,
("columns" .=) <$> _sqlColumns])
--
-- /See:/ 'styleFunctionGradientColorsItem' smart constructor.
data StyleFunctionGradientColorsItem =
StyleFunctionGradientColorsItem'
{ _sfgciColor :: !(Maybe Text)
, _sfgciOpacity :: !(Maybe (Textual Double))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'StyleFunctionGradientColorsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sfgciColor'
--
-- * 'sfgciOpacity'
styleFunctionGradientColorsItem
:: StyleFunctionGradientColorsItem
styleFunctionGradientColorsItem =
StyleFunctionGradientColorsItem'
{_sfgciColor = Nothing, _sfgciOpacity = Nothing}
-- | Color in #RRGGBB format.
sfgciColor :: Lens' StyleFunctionGradientColorsItem (Maybe Text)
sfgciColor
= lens _sfgciColor (\ s a -> s{_sfgciColor = a})
-- | Opacity of the color: 0.0 (transparent) to 1.0 (opaque).
sfgciOpacity :: Lens' StyleFunctionGradientColorsItem (Maybe Double)
sfgciOpacity
= lens _sfgciOpacity (\ s a -> s{_sfgciOpacity = a})
. mapping _Coerce
instance FromJSON StyleFunctionGradientColorsItem
where
parseJSON
= withObject "StyleFunctionGradientColorsItem"
(\ o ->
StyleFunctionGradientColorsItem' <$>
(o .:? "color") <*> (o .:? "opacity"))
instance ToJSON StyleFunctionGradientColorsItem where
toJSON StyleFunctionGradientColorsItem'{..}
= object
(catMaybes
[("color" .=) <$> _sfgciColor,
("opacity" .=) <$> _sfgciOpacity])
-- | Represents a list of styles for a given table.
--
-- /See:/ 'styleSettingList' smart constructor.
data StyleSettingList =
StyleSettingList'
{ _sslTotalItems :: !(Maybe (Textual Int32))
, _sslNextPageToken :: !(Maybe Text)
, _sslKind :: !Text
, _sslItems :: !(Maybe [StyleSetting])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'StyleSettingList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sslTotalItems'
--
-- * 'sslNextPageToken'
--
-- * 'sslKind'
--
-- * 'sslItems'
styleSettingList
:: StyleSettingList
styleSettingList =
StyleSettingList'
{ _sslTotalItems = Nothing
, _sslNextPageToken = Nothing
, _sslKind = "fusiontables#styleSettingList"
, _sslItems = Nothing
}
-- | Total number of styles for the table.
sslTotalItems :: Lens' StyleSettingList (Maybe Int32)
sslTotalItems
= lens _sslTotalItems
(\ s a -> s{_sslTotalItems = a})
. mapping _Coerce
-- | Token used to access the next page of this result. No token is displayed
-- if there are no more styles left.
sslNextPageToken :: Lens' StyleSettingList (Maybe Text)
sslNextPageToken
= lens _sslNextPageToken
(\ s a -> s{_sslNextPageToken = a})
-- | The kind of item this is. For a style list, this is always
-- fusiontables#styleSettingList .
sslKind :: Lens' StyleSettingList Text
sslKind = lens _sslKind (\ s a -> s{_sslKind = a})
-- | All requested style settings.
sslItems :: Lens' StyleSettingList [StyleSetting]
sslItems
= lens _sslItems (\ s a -> s{_sslItems = a}) .
_Default
. _Coerce
instance FromJSON StyleSettingList where
parseJSON
= withObject "StyleSettingList"
(\ o ->
StyleSettingList' <$>
(o .:? "totalItems") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "fusiontables#styleSettingList")
<*> (o .:? "items" .!= mempty))
instance ToJSON StyleSettingList where
toJSON StyleSettingList'{..}
= object
(catMaybes
[("totalItems" .=) <$> _sslTotalItems,
("nextPageToken" .=) <$> _sslNextPageToken,
Just ("kind" .= _sslKind),
("items" .=) <$> _sslItems])
-- | Specifies the minimum and maximum values, the color, opacity, icon and
-- weight of a bucket within a StyleSetting.
--
-- /See:/ 'bucket' smart constructor.
data Bucket =
Bucket'
{ _bMax :: !(Maybe (Textual Double))
, _bColor :: !(Maybe Text)
, _bWeight :: !(Maybe (Textual Int32))
, _bIcon :: !(Maybe Text)
, _bOpacity :: !(Maybe (Textual Double))
, _bMin :: !(Maybe (Textual Double))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Bucket' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bMax'
--
-- * 'bColor'
--
-- * 'bWeight'
--
-- * 'bIcon'
--
-- * 'bOpacity'
--
-- * 'bMin'
bucket
:: Bucket
bucket =
Bucket'
{ _bMax = Nothing
, _bColor = Nothing
, _bWeight = Nothing
, _bIcon = Nothing
, _bOpacity = Nothing
, _bMin = Nothing
}
-- | Maximum value in the selected column for a row to be styled according to
-- the bucket color, opacity, icon, or weight.
bMax :: Lens' Bucket (Maybe Double)
bMax
= lens _bMax (\ s a -> s{_bMax = a}) .
mapping _Coerce
-- | Color of line or the interior of a polygon in #RRGGBB format.
bColor :: Lens' Bucket (Maybe Text)
bColor = lens _bColor (\ s a -> s{_bColor = a})
-- | Width of a line (in pixels).
bWeight :: Lens' Bucket (Maybe Int32)
bWeight
= lens _bWeight (\ s a -> s{_bWeight = a}) .
mapping _Coerce
-- | Icon name used for a point.
bIcon :: Lens' Bucket (Maybe Text)
bIcon = lens _bIcon (\ s a -> s{_bIcon = a})
-- | Opacity of the color: 0.0 (transparent) to 1.0 (opaque).
bOpacity :: Lens' Bucket (Maybe Double)
bOpacity
= lens _bOpacity (\ s a -> s{_bOpacity = a}) .
mapping _Coerce
-- | Minimum value in the selected column for a row to be styled according to
-- the bucket color, opacity, icon, or weight.
bMin :: Lens' Bucket (Maybe Double)
bMin
= lens _bMin (\ s a -> s{_bMin = a}) .
mapping _Coerce
instance FromJSON Bucket where
parseJSON
= withObject "Bucket"
(\ o ->
Bucket' <$>
(o .:? "max") <*> (o .:? "color") <*>
(o .:? "weight")
<*> (o .:? "icon")
<*> (o .:? "opacity")
<*> (o .:? "min"))
instance ToJSON Bucket where
toJSON Bucket'{..}
= object
(catMaybes
[("max" .=) <$> _bMax, ("color" .=) <$> _bColor,
("weight" .=) <$> _bWeight, ("icon" .=) <$> _bIcon,
("opacity" .=) <$> _bOpacity, ("min" .=) <$> _bMin])
-- | Represents a line geometry.
--
-- /See:/ 'line' smart constructor.
data Line =
Line'
{ _lCoordinates :: !(Maybe [[Textual Double]])
, _lType :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Line' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lCoordinates'
--
-- * 'lType'
line
:: Line
line = Line' {_lCoordinates = Nothing, _lType = "LineString"}
-- | The coordinates that define the line.
lCoordinates :: Lens' Line [[Double]]
lCoordinates
= lens _lCoordinates (\ s a -> s{_lCoordinates = a})
. _Default
. _Coerce
-- | Type: A line geometry.
lType :: Lens' Line Text
lType = lens _lType (\ s a -> s{_lType = a})
instance FromJSON Line where
parseJSON
= withObject "Line"
(\ o ->
Line' <$>
(o .:? "coordinates" .!= mempty) <*>
(o .:? "type" .!= "LineString"))
instance ToJSON Line where
toJSON Line'{..}
= object
(catMaybes
[("coordinates" .=) <$> _lCoordinates,
Just ("type" .= _lType)])
-- | Represents a complete StyleSettings object. The primary key is a
-- combination of the tableId and a styleId.
--
-- /See:/ 'styleSetting' smart constructor.
data StyleSetting =
StyleSetting'
{ _ssPolylineOptions :: !(Maybe LineStyle)
, _ssPolygonOptions :: !(Maybe PolygonStyle)
, _ssMarkerOptions :: !(Maybe PointStyle)
, _ssKind :: !Text
, _ssName :: !(Maybe Text)
, _ssStyleId :: !(Maybe (Textual Int32))
, _ssTableId :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'StyleSetting' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ssPolylineOptions'
--
-- * 'ssPolygonOptions'
--
-- * 'ssMarkerOptions'
--
-- * 'ssKind'
--
-- * 'ssName'
--
-- * 'ssStyleId'
--
-- * 'ssTableId'
styleSetting
:: StyleSetting
styleSetting =
StyleSetting'
{ _ssPolylineOptions = Nothing
, _ssPolygonOptions = Nothing
, _ssMarkerOptions = Nothing
, _ssKind = "fusiontables#styleSetting"
, _ssName = Nothing
, _ssStyleId = Nothing
, _ssTableId = Nothing
}
-- | Style definition for lines in the table.
ssPolylineOptions :: Lens' StyleSetting (Maybe LineStyle)
ssPolylineOptions
= lens _ssPolylineOptions
(\ s a -> s{_ssPolylineOptions = a})
-- | Style definition for polygons in the table.
ssPolygonOptions :: Lens' StyleSetting (Maybe PolygonStyle)
ssPolygonOptions
= lens _ssPolygonOptions
(\ s a -> s{_ssPolygonOptions = a})
-- | Style definition for points in the table.
ssMarkerOptions :: Lens' StyleSetting (Maybe PointStyle)
ssMarkerOptions
= lens _ssMarkerOptions
(\ s a -> s{_ssMarkerOptions = a})
-- | The kind of item this is. A StyleSetting contains the style definitions
-- for points, lines, and polygons in a table. Since a table can have any
-- one or all of them, a style definition can have point, line and polygon
-- style definitions.
ssKind :: Lens' StyleSetting Text
ssKind = lens _ssKind (\ s a -> s{_ssKind = a})
-- | Optional name for the style setting.
ssName :: Lens' StyleSetting (Maybe Text)
ssName = lens _ssName (\ s a -> s{_ssName = a})
-- | Identifier for the style setting (unique only within tables).
ssStyleId :: Lens' StyleSetting (Maybe Int32)
ssStyleId
= lens _ssStyleId (\ s a -> s{_ssStyleId = a}) .
mapping _Coerce
-- | Identifier for the table.
ssTableId :: Lens' StyleSetting (Maybe Text)
ssTableId
= lens _ssTableId (\ s a -> s{_ssTableId = a})
instance FromJSON StyleSetting where
parseJSON
= withObject "StyleSetting"
(\ o ->
StyleSetting' <$>
(o .:? "polylineOptions") <*>
(o .:? "polygonOptions")
<*> (o .:? "markerOptions")
<*> (o .:? "kind" .!= "fusiontables#styleSetting")
<*> (o .:? "name")
<*> (o .:? "styleId")
<*> (o .:? "tableId"))
instance ToJSON StyleSetting where
toJSON StyleSetting'{..}
= object
(catMaybes
[("polylineOptions" .=) <$> _ssPolylineOptions,
("polygonOptions" .=) <$> _ssPolygonOptions,
("markerOptions" .=) <$> _ssMarkerOptions,
Just ("kind" .= _ssKind), ("name" .=) <$> _ssName,
("styleId" .=) <$> _ssStyleId,
("tableId" .=) <$> _ssTableId])
-- | Represents a point object.
--
-- /See:/ 'point' smart constructor.
data Point =
Point'
{ _pCoordinates :: !(Maybe [Textual Double])
, _pType :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Point' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pCoordinates'
--
-- * 'pType'
point
:: Point
point = Point' {_pCoordinates = Nothing, _pType = "Point"}
-- | The coordinates that define the point.
pCoordinates :: Lens' Point [Double]
pCoordinates
= lens _pCoordinates (\ s a -> s{_pCoordinates = a})
. _Default
. _Coerce
-- | Point: A point geometry.
pType :: Lens' Point Text
pType = lens _pType (\ s a -> s{_pType = a})
instance FromJSON Point where
parseJSON
= withObject "Point"
(\ o ->
Point' <$>
(o .:? "coordinates" .!= mempty) <*>
(o .:? "type" .!= "Point"))
instance ToJSON Point where
toJSON Point'{..}
= object
(catMaybes
[("coordinates" .=) <$> _pCoordinates,
Just ("type" .= _pType)])
-- | Represents a polygon object.
--
-- /See:/ 'polygon' smart constructor.
data Polygon =
Polygon'
{ _polCoordinates :: !(Maybe [[[Textual Double]]])
, _polType :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Polygon' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'polCoordinates'
--
-- * 'polType'
polygon
:: Polygon
polygon = Polygon' {_polCoordinates = Nothing, _polType = "Polygon"}
-- | The coordinates that define the polygon.
polCoordinates :: Lens' Polygon [[[Double]]]
polCoordinates
= lens _polCoordinates
(\ s a -> s{_polCoordinates = a})
. _Default
. _Coerce
-- | Type: A polygon geometry.
polType :: Lens' Polygon Text
polType = lens _polType (\ s a -> s{_polType = a})
instance FromJSON Polygon where
parseJSON
= withObject "Polygon"
(\ o ->
Polygon' <$>
(o .:? "coordinates" .!= mempty) <*>
(o .:? "type" .!= "Polygon"))
instance ToJSON Polygon where
toJSON Polygon'{..}
= object
(catMaybes
[("coordinates" .=) <$> _polCoordinates,
Just ("type" .= _polType)])
-- | Represents a list of tasks for a table.
--
-- /See:/ 'taskList' smart constructor.
data TaskList =
TaskList'
{ _tTotalItems :: !(Maybe (Textual Int32))
, _tNextPageToken :: !(Maybe Text)
, _tKind :: !Text
, _tItems :: !(Maybe [Task])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TaskList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tTotalItems'
--
-- * 'tNextPageToken'
--
-- * 'tKind'
--
-- * 'tItems'
taskList
:: TaskList
taskList =
TaskList'
{ _tTotalItems = Nothing
, _tNextPageToken = Nothing
, _tKind = "fusiontables#taskList"
, _tItems = Nothing
}
-- | Total number of tasks for the table.
tTotalItems :: Lens' TaskList (Maybe Int32)
tTotalItems
= lens _tTotalItems (\ s a -> s{_tTotalItems = a}) .
mapping _Coerce
-- | Token used to access the next page of this result. No token is displayed
-- if there are no more pages left.
tNextPageToken :: Lens' TaskList (Maybe Text)
tNextPageToken
= lens _tNextPageToken
(\ s a -> s{_tNextPageToken = a})
-- | Type of the resource. This is always \"fusiontables#taskList\".
tKind :: Lens' TaskList Text
tKind = lens _tKind (\ s a -> s{_tKind = a})
-- | List of all requested tasks.
tItems :: Lens' TaskList [Task]
tItems
= lens _tItems (\ s a -> s{_tItems = a}) . _Default .
_Coerce
instance FromJSON TaskList where
parseJSON
= withObject "TaskList"
(\ o ->
TaskList' <$>
(o .:? "totalItems") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "fusiontables#taskList")
<*> (o .:? "items" .!= mempty))
instance ToJSON TaskList where
toJSON TaskList'{..}
= object
(catMaybes
[("totalItems" .=) <$> _tTotalItems,
("nextPageToken" .=) <$> _tNextPageToken,
Just ("kind" .= _tKind), ("items" .=) <$> _tItems])
-- | Represents a Geometry object.
--
-- /See:/ 'geometry' smart constructor.
data Geometry =
Geometry'
{ _gGeometries :: !(Maybe [JSONValue])
, _gGeometry :: !(Maybe JSONValue)
, _gType :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Geometry' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gGeometries'
--
-- * 'gGeometry'
--
-- * 'gType'
geometry
:: Geometry
geometry =
Geometry'
{ _gGeometries = Nothing
, _gGeometry = Nothing
, _gType = "GeometryCollection"
}
-- | The list of geometries in this geometry collection.
gGeometries :: Lens' Geometry [JSONValue]
gGeometries
= lens _gGeometries (\ s a -> s{_gGeometries = a}) .
_Default
. _Coerce
gGeometry :: Lens' Geometry (Maybe JSONValue)
gGeometry
= lens _gGeometry (\ s a -> s{_gGeometry = a})
-- | Type: A collection of geometries.
gType :: Lens' Geometry Text
gType = lens _gType (\ s a -> s{_gType = a})
instance FromJSON Geometry where
parseJSON
= withObject "Geometry"
(\ o ->
Geometry' <$>
(o .:? "geometries" .!= mempty) <*>
(o .:? "geometry")
<*> (o .:? "type" .!= "GeometryCollection"))
instance ToJSON Geometry where
toJSON Geometry'{..}
= object
(catMaybes
[("geometries" .=) <$> _gGeometries,
("geometry" .=) <$> _gGeometry,
Just ("type" .= _gType)])
-- | Represents a list of templates for a given table.
--
-- /See:/ 'templateList' smart constructor.
data TemplateList =
TemplateList'
{ _temTotalItems :: !(Maybe (Textual Int32))
, _temNextPageToken :: !(Maybe Text)
, _temKind :: !Text
, _temItems :: !(Maybe [Template])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TemplateList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'temTotalItems'
--
-- * 'temNextPageToken'
--
-- * 'temKind'
--
-- * 'temItems'
templateList
:: TemplateList
templateList =
TemplateList'
{ _temTotalItems = Nothing
, _temNextPageToken = Nothing
, _temKind = "fusiontables#templateList"
, _temItems = Nothing
}
-- | Total number of templates for the table.
temTotalItems :: Lens' TemplateList (Maybe Int32)
temTotalItems
= lens _temTotalItems
(\ s a -> s{_temTotalItems = a})
. mapping _Coerce
-- | Token used to access the next page of this result. No token is displayed
-- if there are no more pages left.
temNextPageToken :: Lens' TemplateList (Maybe Text)
temNextPageToken
= lens _temNextPageToken
(\ s a -> s{_temNextPageToken = a})
-- | The kind of item this is. For a template list, this is always
-- fusiontables#templateList .
temKind :: Lens' TemplateList Text
temKind = lens _temKind (\ s a -> s{_temKind = a})
-- | List of all requested templates.
temItems :: Lens' TemplateList [Template]
temItems
= lens _temItems (\ s a -> s{_temItems = a}) .
_Default
. _Coerce
instance FromJSON TemplateList where
parseJSON
= withObject "TemplateList"
(\ o ->
TemplateList' <$>
(o .:? "totalItems") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "fusiontables#templateList")
<*> (o .:? "items" .!= mempty))
instance ToJSON TemplateList where
toJSON TemplateList'{..}
= object
(catMaybes
[("totalItems" .=) <$> _temTotalItems,
("nextPageToken" .=) <$> _temNextPageToken,
Just ("kind" .= _temKind),
("items" .=) <$> _temItems])
-- | Represents an import request.
--
-- /See:/ 'import'' smart constructor.
data Import =
Import'
{ _iKind :: !Text
, _iNumRowsReceived :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Import' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'iKind'
--
-- * 'iNumRowsReceived'
import'
:: Import
import' = Import' {_iKind = "fusiontables#import", _iNumRowsReceived = Nothing}
-- | The kind of item this is. For an import, this is always
-- fusiontables#import.
iKind :: Lens' Import Text
iKind = lens _iKind (\ s a -> s{_iKind = a})
-- | The number of rows received from the import request.
iNumRowsReceived :: Lens' Import (Maybe Int64)
iNumRowsReceived
= lens _iNumRowsReceived
(\ s a -> s{_iNumRowsReceived = a})
. mapping _Coerce
instance FromJSON Import where
parseJSON
= withObject "Import"
(\ o ->
Import' <$>
(o .:? "kind" .!= "fusiontables#import") <*>
(o .:? "numRowsReceived"))
instance ToJSON Import where
toJSON Import'{..}
= object
(catMaybes
[Just ("kind" .= _iKind),
("numRowsReceived" .=) <$> _iNumRowsReceived])
-- | A background task on a table, initiated for time- or resource-consuming
-- operations such as changing column types or deleting all rows.
--
-- /See:/ 'task' smart constructor.
data Task =
Task'
{ _tasProgress :: !(Maybe Text)
, _tasTaskId :: !(Maybe (Textual Int64))
, _tasKind :: !Text
, _tasType :: !(Maybe Text)
, _tasStarted :: !(Maybe Bool)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Task' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tasProgress'
--
-- * 'tasTaskId'
--
-- * 'tasKind'
--
-- * 'tasType'
--
-- * 'tasStarted'
task
:: Task
task =
Task'
{ _tasProgress = Nothing
, _tasTaskId = Nothing
, _tasKind = "fusiontables#task"
, _tasType = Nothing
, _tasStarted = Nothing
}
-- | Task percentage completion.
tasProgress :: Lens' Task (Maybe Text)
tasProgress
= lens _tasProgress (\ s a -> s{_tasProgress = a})
-- | Identifier for the task.
tasTaskId :: Lens' Task (Maybe Int64)
tasTaskId
= lens _tasTaskId (\ s a -> s{_tasTaskId = a}) .
mapping _Coerce
-- | Type of the resource. This is always \"fusiontables#task\".
tasKind :: Lens' Task Text
tasKind = lens _tasKind (\ s a -> s{_tasKind = a})
-- | Type of background task.
tasType :: Lens' Task (Maybe Text)
tasType = lens _tasType (\ s a -> s{_tasType = a})
-- | false while the table is busy with some other task. true if this
-- background task is currently running.
tasStarted :: Lens' Task (Maybe Bool)
tasStarted
= lens _tasStarted (\ s a -> s{_tasStarted = a})
instance FromJSON Task where
parseJSON
= withObject "Task"
(\ o ->
Task' <$>
(o .:? "progress") <*> (o .:? "taskId") <*>
(o .:? "kind" .!= "fusiontables#task")
<*> (o .:? "type")
<*> (o .:? "started"))
instance ToJSON Task where
toJSON Task'{..}
= object
(catMaybes
[("progress" .=) <$> _tasProgress,
("taskId" .=) <$> _tasTaskId,
Just ("kind" .= _tasKind), ("type" .=) <$> _tasType,
("started" .=) <$> _tasStarted])
-- | Represents the contents of InfoWindow templates.
--
-- /See:/ 'template' smart constructor.
data Template =
Template'
{ _ttAutomaticColumnNames :: !(Maybe [Text])
, _ttTemplateId :: !(Maybe (Textual Int32))
, _ttKind :: !Text
, _ttBody :: !(Maybe Text)
, _ttName :: !(Maybe Text)
, _ttTableId :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Template' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ttAutomaticColumnNames'
--
-- * 'ttTemplateId'
--
-- * 'ttKind'
--
-- * 'ttBody'
--
-- * 'ttName'
--
-- * 'ttTableId'
template
:: Template
template =
Template'
{ _ttAutomaticColumnNames = Nothing
, _ttTemplateId = Nothing
, _ttKind = "fusiontables#template"
, _ttBody = Nothing
, _ttName = Nothing
, _ttTableId = Nothing
}
-- | List of columns from which the template is to be automatically
-- constructed. Only one of body or automaticColumns can be specified.
ttAutomaticColumnNames :: Lens' Template [Text]
ttAutomaticColumnNames
= lens _ttAutomaticColumnNames
(\ s a -> s{_ttAutomaticColumnNames = a})
. _Default
. _Coerce
-- | Identifier for the template, unique within the context of a particular
-- table.
ttTemplateId :: Lens' Template (Maybe Int32)
ttTemplateId
= lens _ttTemplateId (\ s a -> s{_ttTemplateId = a})
. mapping _Coerce
-- | The kind of item this is. For a template, this is always
-- fusiontables#template.
ttKind :: Lens' Template Text
ttKind = lens _ttKind (\ s a -> s{_ttKind = a})
-- | Body of the template. It contains HTML with {column_name} to insert
-- values from a particular column. The body is sanitized to remove certain
-- tags, e.g., script. Only one of body or automaticColumns can be
-- specified.
ttBody :: Lens' Template (Maybe Text)
ttBody = lens _ttBody (\ s a -> s{_ttBody = a})
-- | Optional name assigned to a template.
ttName :: Lens' Template (Maybe Text)
ttName = lens _ttName (\ s a -> s{_ttName = a})
-- | Identifier for the table for which the template is defined.
ttTableId :: Lens' Template (Maybe Text)
ttTableId
= lens _ttTableId (\ s a -> s{_ttTableId = a})
instance FromJSON Template where
parseJSON
= withObject "Template"
(\ o ->
Template' <$>
(o .:? "automaticColumnNames" .!= mempty) <*>
(o .:? "templateId")
<*> (o .:? "kind" .!= "fusiontables#template")
<*> (o .:? "body")
<*> (o .:? "name")
<*> (o .:? "tableId"))
instance ToJSON Template where
toJSON Template'{..}
= object
(catMaybes
[("automaticColumnNames" .=) <$>
_ttAutomaticColumnNames,
("templateId" .=) <$> _ttTemplateId,
Just ("kind" .= _ttKind), ("body" .=) <$> _ttBody,
("name" .=) <$> _ttName,
("tableId" .=) <$> _ttTableId])
-- | Represents a PointStyle within a StyleSetting
--
-- /See:/ 'pointStyle' smart constructor.
data PointStyle =
PointStyle'
{ _psIconName :: !(Maybe Text)
, _psIconStyler :: !(Maybe StyleFunction)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PointStyle' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'psIconName'
--
-- * 'psIconStyler'
pointStyle
:: PointStyle
pointStyle = PointStyle' {_psIconName = Nothing, _psIconStyler = Nothing}
-- | Name of the icon. Use values defined in
-- http:\/\/www.google.com\/fusiontables\/DataSource?dsrcid=308519
psIconName :: Lens' PointStyle (Maybe Text)
psIconName
= lens _psIconName (\ s a -> s{_psIconName = a})
-- | Column or a bucket value from which the icon name is to be determined.
psIconStyler :: Lens' PointStyle (Maybe StyleFunction)
psIconStyler
= lens _psIconStyler (\ s a -> s{_psIconStyler = a})
instance FromJSON PointStyle where
parseJSON
= withObject "PointStyle"
(\ o ->
PointStyle' <$>
(o .:? "iconName") <*> (o .:? "iconStyler"))
instance ToJSON PointStyle where
toJSON PointStyle'{..}
= object
(catMaybes
[("iconName" .=) <$> _psIconName,
("iconStyler" .=) <$> _psIconStyler])
-- | Represents a PolygonStyle within a StyleSetting
--
-- /See:/ 'polygonStyle' smart constructor.
data PolygonStyle =
PolygonStyle'
{ _psFillColorStyler :: !(Maybe StyleFunction)
, _psFillColor :: !(Maybe Text)
, _psStrokeColorStyler :: !(Maybe StyleFunction)
, _psStrokeWeight :: !(Maybe (Textual Int32))
, _psStrokeOpacity :: !(Maybe (Textual Double))
, _psFillOpacity :: !(Maybe (Textual Double))
, _psStrokeWeightStyler :: !(Maybe StyleFunction)
, _psStrokeColor :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PolygonStyle' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'psFillColorStyler'
--
-- * 'psFillColor'
--
-- * 'psStrokeColorStyler'
--
-- * 'psStrokeWeight'
--
-- * 'psStrokeOpacity'
--
-- * 'psFillOpacity'
--
-- * 'psStrokeWeightStyler'
--
-- * 'psStrokeColor'
polygonStyle
:: PolygonStyle
polygonStyle =
PolygonStyle'
{ _psFillColorStyler = Nothing
, _psFillColor = Nothing
, _psStrokeColorStyler = Nothing
, _psStrokeWeight = Nothing
, _psStrokeOpacity = Nothing
, _psFillOpacity = Nothing
, _psStrokeWeightStyler = Nothing
, _psStrokeColor = Nothing
}
-- | Column-value, gradient, or bucket styler that is used to determine the
-- interior color and opacity of the polygon.
psFillColorStyler :: Lens' PolygonStyle (Maybe StyleFunction)
psFillColorStyler
= lens _psFillColorStyler
(\ s a -> s{_psFillColorStyler = a})
-- | Color of the interior of the polygon in #RRGGBB format.
psFillColor :: Lens' PolygonStyle (Maybe Text)
psFillColor
= lens _psFillColor (\ s a -> s{_psFillColor = a})
-- | Column-value, gradient or buckets styler that is used to determine the
-- border color and opacity.
psStrokeColorStyler :: Lens' PolygonStyle (Maybe StyleFunction)
psStrokeColorStyler
= lens _psStrokeColorStyler
(\ s a -> s{_psStrokeColorStyler = a})
-- | Width of the polyon border in pixels.
psStrokeWeight :: Lens' PolygonStyle (Maybe Int32)
psStrokeWeight
= lens _psStrokeWeight
(\ s a -> s{_psStrokeWeight = a})
. mapping _Coerce
-- | Opacity of the polygon border: 0.0 (transparent) to 1.0 (opaque).
psStrokeOpacity :: Lens' PolygonStyle (Maybe Double)
psStrokeOpacity
= lens _psStrokeOpacity
(\ s a -> s{_psStrokeOpacity = a})
. mapping _Coerce
-- | Opacity of the interior of the polygon: 0.0 (transparent) to 1.0
-- (opaque).
psFillOpacity :: Lens' PolygonStyle (Maybe Double)
psFillOpacity
= lens _psFillOpacity
(\ s a -> s{_psFillOpacity = a})
. mapping _Coerce
-- | Column-value or bucket styler that is used to determine the width of the
-- polygon border.
psStrokeWeightStyler :: Lens' PolygonStyle (Maybe StyleFunction)
psStrokeWeightStyler
= lens _psStrokeWeightStyler
(\ s a -> s{_psStrokeWeightStyler = a})
-- | Color of the polygon border in #RRGGBB format.
psStrokeColor :: Lens' PolygonStyle (Maybe Text)
psStrokeColor
= lens _psStrokeColor
(\ s a -> s{_psStrokeColor = a})
instance FromJSON PolygonStyle where
parseJSON
= withObject "PolygonStyle"
(\ o ->
PolygonStyle' <$>
(o .:? "fillColorStyler") <*> (o .:? "fillColor") <*>
(o .:? "strokeColorStyler")
<*> (o .:? "strokeWeight")
<*> (o .:? "strokeOpacity")
<*> (o .:? "fillOpacity")
<*> (o .:? "strokeWeightStyler")
<*> (o .:? "strokeColor"))
instance ToJSON PolygonStyle where
toJSON PolygonStyle'{..}
= object
(catMaybes
[("fillColorStyler" .=) <$> _psFillColorStyler,
("fillColor" .=) <$> _psFillColor,
("strokeColorStyler" .=) <$> _psStrokeColorStyler,
("strokeWeight" .=) <$> _psStrokeWeight,
("strokeOpacity" .=) <$> _psStrokeOpacity,
("fillOpacity" .=) <$> _psFillOpacity,
("strokeWeightStyler" .=) <$> _psStrokeWeightStyler,
("strokeColor" .=) <$> _psStrokeColor])
-- | Gradient function that interpolates a range of colors based on column
-- value.
--
-- /See:/ 'styleFunctionGradient' smart constructor.
data StyleFunctionGradient =
StyleFunctionGradient'
{ _sfgMax :: !(Maybe (Textual Double))
, _sfgMin :: !(Maybe (Textual Double))
, _sfgColors :: !(Maybe [StyleFunctionGradientColorsItem])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'StyleFunctionGradient' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sfgMax'
--
-- * 'sfgMin'
--
-- * 'sfgColors'
styleFunctionGradient
:: StyleFunctionGradient
styleFunctionGradient =
StyleFunctionGradient'
{_sfgMax = Nothing, _sfgMin = Nothing, _sfgColors = Nothing}
-- | Higher-end of the interpolation range: rows with this value will be
-- assigned to colors[n-1].
sfgMax :: Lens' StyleFunctionGradient (Maybe Double)
sfgMax
= lens _sfgMax (\ s a -> s{_sfgMax = a}) .
mapping _Coerce
-- | Lower-end of the interpolation range: rows with this value will be
-- assigned to colors[0].
sfgMin :: Lens' StyleFunctionGradient (Maybe Double)
sfgMin
= lens _sfgMin (\ s a -> s{_sfgMin = a}) .
mapping _Coerce
-- | Array with two or more colors.
sfgColors :: Lens' StyleFunctionGradient [StyleFunctionGradientColorsItem]
sfgColors
= lens _sfgColors (\ s a -> s{_sfgColors = a}) .
_Default
. _Coerce
instance FromJSON StyleFunctionGradient where
parseJSON
= withObject "StyleFunctionGradient"
(\ o ->
StyleFunctionGradient' <$>
(o .:? "max") <*> (o .:? "min") <*>
(o .:? "colors" .!= mempty))
instance ToJSON StyleFunctionGradient where
toJSON StyleFunctionGradient'{..}
= object
(catMaybes
[("max" .=) <$> _sfgMax, ("min" .=) <$> _sfgMin,
("colors" .=) <$> _sfgColors])
-- | Specifies the details of a column in a table.
--
-- /See:/ 'column' smart constructor.
data Column =
Column'
{ _cColumnJSONSchema :: !(Maybe Text)
, _cGraphPredicate :: !(Maybe Text)
, _cKind :: !Text
, _cBaseColumn :: !(Maybe ColumnBaseColumn)
, _cColumnPropertiesJSON :: !(Maybe Text)
, _cName :: !(Maybe Text)
, _cType :: !(Maybe Text)
, _cFormatPattern :: !(Maybe Text)
, _cColumnId :: !(Maybe (Textual Int32))
, _cValidValues :: !(Maybe [Text])
, _cValidateData :: !(Maybe Bool)
, _cDescription :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Column' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cColumnJSONSchema'
--
-- * 'cGraphPredicate'
--
-- * 'cKind'
--
-- * 'cBaseColumn'
--
-- * 'cColumnPropertiesJSON'
--
-- * 'cName'
--
-- * 'cType'
--
-- * 'cFormatPattern'
--
-- * 'cColumnId'
--
-- * 'cValidValues'
--
-- * 'cValidateData'
--
-- * 'cDescription'
column
:: Column
column =
Column'
{ _cColumnJSONSchema = Nothing
, _cGraphPredicate = Nothing
, _cKind = "fusiontables#column"
, _cBaseColumn = Nothing
, _cColumnPropertiesJSON = Nothing
, _cName = Nothing
, _cType = Nothing
, _cFormatPattern = Nothing
, _cColumnId = Nothing
, _cValidValues = Nothing
, _cValidateData = Nothing
, _cDescription = Nothing
}
-- | JSON schema for interpreting JSON in this column.
cColumnJSONSchema :: Lens' Column (Maybe Text)
cColumnJSONSchema
= lens _cColumnJSONSchema
(\ s a -> s{_cColumnJSONSchema = a})
-- | Column graph predicate. Used to map table to graph data model
-- (subject,predicate,object) See W3C Graph-based Data Model.
cGraphPredicate :: Lens' Column (Maybe Text)
cGraphPredicate
= lens _cGraphPredicate
(\ s a -> s{_cGraphPredicate = a})
-- | The kind of item this is. For a column, this is always
-- fusiontables#column.
cKind :: Lens' Column Text
cKind = lens _cKind (\ s a -> s{_cKind = a})
-- | Identifier of the base column. If present, this column is derived from
-- the specified base column.
cBaseColumn :: Lens' Column (Maybe ColumnBaseColumn)
cBaseColumn
= lens _cBaseColumn (\ s a -> s{_cBaseColumn = a})
-- | JSON object containing custom column properties.
cColumnPropertiesJSON :: Lens' Column (Maybe Text)
cColumnPropertiesJSON
= lens _cColumnPropertiesJSON
(\ s a -> s{_cColumnPropertiesJSON = a})
-- | Name of the column.
cName :: Lens' Column (Maybe Text)
cName = lens _cName (\ s a -> s{_cName = a})
-- | Type of the column.
cType :: Lens' Column (Maybe Text)
cType = lens _cType (\ s a -> s{_cType = a})
-- | Format pattern. Acceptable values are DT_DATE_MEDIUMe.g Dec 24, 2008
-- DT_DATE_SHORTfor example 12\/24\/08 DT_DATE_TIME_MEDIUMfor example Dec
-- 24, 2008 8:30:45 PM DT_DATE_TIME_SHORTfor example 12\/24\/08 8:30 PM
-- DT_DAY_MONTH_2_DIGIT_YEARfor example 24\/12\/08
-- DT_DAY_MONTH_2_DIGIT_YEAR_TIMEfor example 24\/12\/08 20:30
-- DT_DAY_MONTH_2_DIGIT_YEAR_TIME_MERIDIANfor example 24\/12\/08 8:30 PM
-- DT_DAY_MONTH_4_DIGIT_YEARfor example 24\/12\/2008
-- DT_DAY_MONTH_4_DIGIT_YEAR_TIMEfor example 24\/12\/2008 20:30
-- DT_DAY_MONTH_4_DIGIT_YEAR_TIME_MERIDIANfor example 24\/12\/2008 8:30 PM
-- DT_ISO_YEAR_MONTH_DAYfor example 2008-12-24
-- DT_ISO_YEAR_MONTH_DAY_TIMEfor example 2008-12-24 20:30:45
-- DT_MONTH_DAY_4_DIGIT_YEARfor example 12\/24\/2008 DT_TIME_LONGfor
-- example 8:30:45 PM UTC-6 DT_TIME_MEDIUMfor example 8:30:45 PM
-- DT_TIME_SHORTfor example 8:30 PM DT_YEAR_ONLYfor example 2008
-- HIGHLIGHT_UNTYPED_CELLSHighlight cell data that does not match the data
-- type NONENo formatting (default) NUMBER_CURRENCYfor example $1234.56
-- NUMBER_DEFAULTfor example 1,234.56 NUMBER_INTEGERfor example 1235
-- NUMBER_NO_SEPARATORfor example 1234.56 NUMBER_PERCENTfor example
-- 123,456% NUMBER_SCIENTIFICfor example 1E3
-- STRING_EIGHT_LINE_IMAGEDisplays thumbnail images as tall as eight lines
-- of text STRING_FOUR_LINE_IMAGEDisplays thumbnail images as tall as four
-- lines of text STRING_JSON_TEXTAllows editing of text as JSON in UI
-- STRING_JSON_LISTAllows editing of text as a JSON list in UI
-- STRING_LINKTreats cell as a link (must start with http:\/\/ or
-- https:\/\/) STRING_ONE_LINE_IMAGEDisplays thumbnail images as tall as
-- one line of text STRING_VIDEO_OR_MAPDisplay a video or map thumbnail
cFormatPattern :: Lens' Column (Maybe Text)
cFormatPattern
= lens _cFormatPattern
(\ s a -> s{_cFormatPattern = a})
-- | Identifier for the column.
cColumnId :: Lens' Column (Maybe Int32)
cColumnId
= lens _cColumnId (\ s a -> s{_cColumnId = a}) .
mapping _Coerce
-- | List of valid values used to validate data and supply a drop-down list
-- of values in the web application.
cValidValues :: Lens' Column [Text]
cValidValues
= lens _cValidValues (\ s a -> s{_cValidValues = a})
. _Default
. _Coerce
-- | If true, data entered via the web application is validated.
cValidateData :: Lens' Column (Maybe Bool)
cValidateData
= lens _cValidateData
(\ s a -> s{_cValidateData = a})
-- | Column description.
cDescription :: Lens' Column (Maybe Text)
cDescription
= lens _cDescription (\ s a -> s{_cDescription = a})
instance FromJSON Column where
parseJSON
= withObject "Column"
(\ o ->
Column' <$>
(o .:? "columnJsonSchema") <*>
(o .:? "graphPredicate")
<*> (o .:? "kind" .!= "fusiontables#column")
<*> (o .:? "baseColumn")
<*> (o .:? "columnPropertiesJson")
<*> (o .:? "name")
<*> (o .:? "type")
<*> (o .:? "formatPattern")
<*> (o .:? "columnId")
<*> (o .:? "validValues" .!= mempty)
<*> (o .:? "validateData")
<*> (o .:? "description"))
instance ToJSON Column where
toJSON Column'{..}
= object
(catMaybes
[("columnJsonSchema" .=) <$> _cColumnJSONSchema,
("graphPredicate" .=) <$> _cGraphPredicate,
Just ("kind" .= _cKind),
("baseColumn" .=) <$> _cBaseColumn,
("columnPropertiesJson" .=) <$>
_cColumnPropertiesJSON,
("name" .=) <$> _cName, ("type" .=) <$> _cType,
("formatPattern" .=) <$> _cFormatPattern,
("columnId" .=) <$> _cColumnId,
("validValues" .=) <$> _cValidValues,
("validateData" .=) <$> _cValidateData,
("description" .=) <$> _cDescription])
-- | Represents a table.
--
-- /See:/ 'table' smart constructor.
data Table =
Table'
{ _tabaIsExportable :: !(Maybe Bool)
, _tabaKind :: !Text
, _tabaColumnPropertiesJSONSchema :: !(Maybe Text)
, _tabaTablePropertiesJSONSchema :: !(Maybe Text)
, _tabaName :: !(Maybe Text)
, _tabaTablePropertiesJSON :: !(Maybe Text)
, _tabaColumns :: !(Maybe [Column])
, _tabaBaseTableIds :: !(Maybe [Text])
, _tabaTableId :: !(Maybe Text)
, _tabaSQL :: !(Maybe Text)
, _tabaDescription :: !(Maybe Text)
, _tabaAttribution :: !(Maybe Text)
, _tabaAttributionLink :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Table' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tabaIsExportable'
--
-- * 'tabaKind'
--
-- * 'tabaColumnPropertiesJSONSchema'
--
-- * 'tabaTablePropertiesJSONSchema'
--
-- * 'tabaName'
--
-- * 'tabaTablePropertiesJSON'
--
-- * 'tabaColumns'
--
-- * 'tabaBaseTableIds'
--
-- * 'tabaTableId'
--
-- * 'tabaSQL'
--
-- * 'tabaDescription'
--
-- * 'tabaAttribution'
--
-- * 'tabaAttributionLink'
table
:: Table
table =
Table'
{ _tabaIsExportable = Nothing
, _tabaKind = "fusiontables#table"
, _tabaColumnPropertiesJSONSchema = Nothing
, _tabaTablePropertiesJSONSchema = Nothing
, _tabaName = Nothing
, _tabaTablePropertiesJSON = Nothing
, _tabaColumns = Nothing
, _tabaBaseTableIds = Nothing
, _tabaTableId = Nothing
, _tabaSQL = Nothing
, _tabaDescription = Nothing
, _tabaAttribution = Nothing
, _tabaAttributionLink = Nothing
}
-- | Variable for whether table is exportable.
tabaIsExportable :: Lens' Table (Maybe Bool)
tabaIsExportable
= lens _tabaIsExportable
(\ s a -> s{_tabaIsExportable = a})
-- | The kind of item this is. For a table, this is always
-- fusiontables#table.
tabaKind :: Lens' Table Text
tabaKind = lens _tabaKind (\ s a -> s{_tabaKind = a})
-- | Default JSON schema for validating all JSON column properties.
tabaColumnPropertiesJSONSchema :: Lens' Table (Maybe Text)
tabaColumnPropertiesJSONSchema
= lens _tabaColumnPropertiesJSONSchema
(\ s a -> s{_tabaColumnPropertiesJSONSchema = a})
-- | JSON schema for validating the JSON table properties.
tabaTablePropertiesJSONSchema :: Lens' Table (Maybe Text)
tabaTablePropertiesJSONSchema
= lens _tabaTablePropertiesJSONSchema
(\ s a -> s{_tabaTablePropertiesJSONSchema = a})
-- | Name assigned to a table.
tabaName :: Lens' Table (Maybe Text)
tabaName = lens _tabaName (\ s a -> s{_tabaName = a})
-- | JSON object containing custom table properties.
tabaTablePropertiesJSON :: Lens' Table (Maybe Text)
tabaTablePropertiesJSON
= lens _tabaTablePropertiesJSON
(\ s a -> s{_tabaTablePropertiesJSON = a})
-- | Columns in the table.
tabaColumns :: Lens' Table [Column]
tabaColumns
= lens _tabaColumns (\ s a -> s{_tabaColumns = a}) .
_Default
. _Coerce
-- | Base table identifier if this table is a view or merged table.
tabaBaseTableIds :: Lens' Table [Text]
tabaBaseTableIds
= lens _tabaBaseTableIds
(\ s a -> s{_tabaBaseTableIds = a})
. _Default
. _Coerce
-- | Encrypted unique alphanumeric identifier for the table.
tabaTableId :: Lens' Table (Maybe Text)
tabaTableId
= lens _tabaTableId (\ s a -> s{_tabaTableId = a})
-- | SQL that encodes the table definition for derived tables.
tabaSQL :: Lens' Table (Maybe Text)
tabaSQL = lens _tabaSQL (\ s a -> s{_tabaSQL = a})
-- | Description assigned to the table.
tabaDescription :: Lens' Table (Maybe Text)
tabaDescription
= lens _tabaDescription
(\ s a -> s{_tabaDescription = a})
-- | Attribution assigned to the table.
tabaAttribution :: Lens' Table (Maybe Text)
tabaAttribution
= lens _tabaAttribution
(\ s a -> s{_tabaAttribution = a})
-- | Optional link for attribution.
tabaAttributionLink :: Lens' Table (Maybe Text)
tabaAttributionLink
= lens _tabaAttributionLink
(\ s a -> s{_tabaAttributionLink = a})
instance FromJSON Table where
parseJSON
= withObject "Table"
(\ o ->
Table' <$>
(o .:? "isExportable") <*>
(o .:? "kind" .!= "fusiontables#table")
<*> (o .:? "columnPropertiesJsonSchema")
<*> (o .:? "tablePropertiesJsonSchema")
<*> (o .:? "name")
<*> (o .:? "tablePropertiesJson")
<*> (o .:? "columns" .!= mempty)
<*> (o .:? "baseTableIds" .!= mempty)
<*> (o .:? "tableId")
<*> (o .:? "sql")
<*> (o .:? "description")
<*> (o .:? "attribution")
<*> (o .:? "attributionLink"))
instance ToJSON Table where
toJSON Table'{..}
= object
(catMaybes
[("isExportable" .=) <$> _tabaIsExportable,
Just ("kind" .= _tabaKind),
("columnPropertiesJsonSchema" .=) <$>
_tabaColumnPropertiesJSONSchema,
("tablePropertiesJsonSchema" .=) <$>
_tabaTablePropertiesJSONSchema,
("name" .=) <$> _tabaName,
("tablePropertiesJson" .=) <$>
_tabaTablePropertiesJSON,
("columns" .=) <$> _tabaColumns,
("baseTableIds" .=) <$> _tabaBaseTableIds,
("tableId" .=) <$> _tabaTableId,
("sql" .=) <$> _tabaSQL,
("description" .=) <$> _tabaDescription,
("attribution" .=) <$> _tabaAttribution,
("attributionLink" .=) <$> _tabaAttributionLink])
-- | Represents a LineStyle within a StyleSetting
--
-- /See:/ 'lineStyle' smart constructor.
data LineStyle =
LineStyle'
{ _lsStrokeColorStyler :: !(Maybe StyleFunction)
, _lsStrokeWeight :: !(Maybe (Textual Int32))
, _lsStrokeOpacity :: !(Maybe (Textual Double))
, _lsStrokeWeightStyler :: !(Maybe StyleFunction)
, _lsStrokeColor :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LineStyle' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsStrokeColorStyler'
--
-- * 'lsStrokeWeight'
--
-- * 'lsStrokeOpacity'
--
-- * 'lsStrokeWeightStyler'
--
-- * 'lsStrokeColor'
lineStyle
:: LineStyle
lineStyle =
LineStyle'
{ _lsStrokeColorStyler = Nothing
, _lsStrokeWeight = Nothing
, _lsStrokeOpacity = Nothing
, _lsStrokeWeightStyler = Nothing
, _lsStrokeColor = Nothing
}
-- | Column-value, gradient or buckets styler that is used to determine the
-- line color and opacity.
lsStrokeColorStyler :: Lens' LineStyle (Maybe StyleFunction)
lsStrokeColorStyler
= lens _lsStrokeColorStyler
(\ s a -> s{_lsStrokeColorStyler = a})
-- | Width of the line in pixels.
lsStrokeWeight :: Lens' LineStyle (Maybe Int32)
lsStrokeWeight
= lens _lsStrokeWeight
(\ s a -> s{_lsStrokeWeight = a})
. mapping _Coerce
-- | Opacity of the line : 0.0 (transparent) to 1.0 (opaque).
lsStrokeOpacity :: Lens' LineStyle (Maybe Double)
lsStrokeOpacity
= lens _lsStrokeOpacity
(\ s a -> s{_lsStrokeOpacity = a})
. mapping _Coerce
-- | Column-value or bucket styler that is used to determine the width of the
-- line.
lsStrokeWeightStyler :: Lens' LineStyle (Maybe StyleFunction)
lsStrokeWeightStyler
= lens _lsStrokeWeightStyler
(\ s a -> s{_lsStrokeWeightStyler = a})
-- | Color of the line in #RRGGBB format.
lsStrokeColor :: Lens' LineStyle (Maybe Text)
lsStrokeColor
= lens _lsStrokeColor
(\ s a -> s{_lsStrokeColor = a})
instance FromJSON LineStyle where
parseJSON
= withObject "LineStyle"
(\ o ->
LineStyle' <$>
(o .:? "strokeColorStyler") <*>
(o .:? "strokeWeight")
<*> (o .:? "strokeOpacity")
<*> (o .:? "strokeWeightStyler")
<*> (o .:? "strokeColor"))
instance ToJSON LineStyle where
toJSON LineStyle'{..}
= object
(catMaybes
[("strokeColorStyler" .=) <$> _lsStrokeColorStyler,
("strokeWeight" .=) <$> _lsStrokeWeight,
("strokeOpacity" .=) <$> _lsStrokeOpacity,
("strokeWeightStyler" .=) <$> _lsStrokeWeightStyler,
("strokeColor" .=) <$> _lsStrokeColor])
| brendanhay/gogol | gogol-fusiontables/gen/Network/Google/FusionTables/Types/Product.hs | mpl-2.0 | 60,269 | 0 | 23 | 15,925 | 12,626 | 7,244 | 5,382 | 1,392 | 1 |
--
-- Copyright 2017-2018 Azad Bolour
-- Licensed under GNU Affero General Public License v3.0 -
-- https://github.com/azadbolour/boardgame/blob/master/LICENSE.md
--
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE DisambiguateRecordFields #-}
{-# LANGUAGE RecordWildCards #-}
module BoardGame.Server.Domain.Play (
Play(..)
, BasePlay(..)
, mkWordPlay
, mkSwapPlay
)
where
import qualified Data.ByteString.Lazy.Char8 as BC
import GHC.Generics (Generic)
import Data.Aeson (FromJSON, ToJSON)
import qualified Data.Aeson as Aeson
import Bolour.Plane.Domain.Point (Point)
import BoardGame.Common.Domain.Piece (Piece)
import BoardGame.Server.Domain.Player (PlayerType(..))
import BoardGame.Common.Domain.PlayPiece (PlayPiece)
data PlayType = WordPlayType | SwapPlayType
deriving (Eq, Show, Generic)
instance FromJSON PlayType
instance ToJSON PlayType
data BasePlay = BasePlay {
playType :: PlayType
, playNumber :: Int
, playerType :: PlayerType
, scores :: [Int]
}
deriving (Eq, Show, Generic)
instance FromJSON BasePlay
instance ToJSON BasePlay
-- | Representation of a single play.
data Play =
WordPlay {
basePlay :: BasePlay
, playPieces :: [PlayPiece]
, replacementPieces :: [Piece]
, deadPoints :: [Point]
}
| SwapPlay {
basePlay :: BasePlay
, swappedPiece :: Piece
, newPiece :: Piece
}
deriving (Eq, Show, Generic)
instance FromJSON Play
instance ToJSON Play
encode :: Play -> String
encode play = BC.unpack $ Aeson.encode play
decode :: String -> Maybe Play
decode encoded = Aeson.decode $ BC.pack encoded
mkWordPlay ::
Int
-> PlayerType
-> [Int]
-> [PlayPiece]
-> [Piece]
-> [Point]
-> Play
mkWordPlay playNumber playerType scores =
let basePlay = BasePlay WordPlayType playNumber playerType scores
in WordPlay basePlay
mkSwapPlay ::
Int
-> PlayerType
-> [Int]
-> Piece
-> Piece
-> Play
mkSwapPlay playNumber playerType scores =
let basePlay = BasePlay SwapPlayType playNumber playerType scores
in SwapPlay basePlay
| azadbolour/boardgame | haskell-server/src/BoardGame/Server/Domain/Play.hs | agpl-3.0 | 2,073 | 0 | 11 | 386 | 529 | 306 | 223 | 67 | 1 |
-- yammat - Yet Another MateMAT
-- Copyright (C) 2015 Amedeo Molnár
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published
-- by the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
module Handler.Avatar where
import Import
import Handler.Common
import Data.Conduit.Binary
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Base64
import Data.Maybe (fromJust)
import qualified Crypto.Hash.SHA3 as SHA3
import Codec.Picture
import Codec.Picture.Metadata as PM hiding (delete)
import Codec.Picture.ScaleDCT
getAvatarR :: Handler Html
getAvatarR = do
avatars <- runDB $ selectList [] [Asc AvatarIdent]
defaultLayout $ do
setTitleI MsgAvatars
$(widgetFile "avatars")
getNewAvatarR :: Handler Html
getNewAvatarR = do
(newAvatarWidget, enctype) <- generateFormPost
$ renderBootstrap3 BootstrapBasicForm $ avatarNewForm
defaultLayout $ do
setTitleI MsgNewAvatar
$(widgetFile "newAvatar")
postNewAvatarR :: Handler Html
postNewAvatarR = do
((res, _), _) <- runFormPost
$ renderBootstrap3 BootstrapBasicForm avatarNewForm
case res of
FormSuccess na -> do
raw <- runResourceT $ fileSource (avatarNewFile na) $$ sinkLbs
tdata <- generateThumb $ B.concat $ L.toChunks raw
runDB $ insert_ $ Avatar
(avatarNewIdent na)
(thumbContent tdata)
(thumbHash tdata)
setMessageI MsgAvatarUploadSuccessfull
redirect HomeR
_ -> do
setMessageI MsgErrorOccured
redirect NewAvatarR
avatarNewForm :: AForm Handler AvatarNew
avatarNewForm = AvatarNew
<$> areq textField (bfs MsgAvatarIdent) Nothing
<*> areq fileField (bfs MsgAvatarFile) Nothing
<* bootstrapSubmit (msgToBSSubmit MsgSubmit)
data AvatarNew = AvatarNew
{ avatarNewIdent :: Text
, avatarNewFile :: FileInfo
}
getModifyAvatarR :: AvatarId -> Handler Html
getModifyAvatarR aId = do
ma <- runDB $ get aId
case ma of
Just avatar -> do
(avatarModifyWidget, enctype) <- generateFormPost
$ renderBootstrap3 BootstrapBasicForm
$ avatarModForm avatar
defaultLayout $ do
setTitleI MsgModifyAvatar
$(widgetFile "modifyAvatar")
Nothing -> do
setMessageI MsgAvatarUnknown
redirect AvatarR
postModifyAvatarR :: AvatarId -> Handler Html
postModifyAvatarR aId = do
ma <- runDB $ get aId
case ma of
Just avatar -> do
((res, _), _) <- runFormPost
$ renderBootstrap3 BootstrapBasicForm
$ avatarModForm avatar
case res of
FormSuccess md -> do
updateAvatar aId md
setMessageI MsgAvatarUpdateSuccessfull
redirect AvatarR
_ -> do
setMessageI MsgErrorOccured
redirect $ ModifyAvatarR aId
Nothing -> do
setMessageI MsgAvatarUnknown
redirect HomeR
avatarModForm :: Avatar -> AForm Handler AvatarMod
avatarModForm a = AvatarMod
<$> areq textField (bfs MsgAvatarIdent) (Just $ avatarIdent a)
<*> aopt fileField (bfs MsgAvatarFileChange) Nothing
<* bootstrapSubmit (msgToBSSubmit MsgSubmit)
data AvatarMod = AvatarMod
{ avatarModIdent :: Text
, avatarModFile :: Maybe FileInfo
}
updateAvatar :: AvatarId -> AvatarMod -> Handler ()
updateAvatar aId (AvatarMod ident Nothing) =
runDB $ update aId [AvatarIdent =. ident]
updateAvatar aId (AvatarMod ident (Just fi)) = do
raw <- runResourceT $ fileSource fi $$ sinkLbs
tdata <- generateThumb $ B.concat $ L.toChunks raw
runDB $ update aId
[ AvatarIdent =. ident
, AvatarData =. thumbContent tdata
, AvatarHash =. thumbHash tdata
]
data ThumbData = ThumbData
{ thumbContent :: ByteString
, thumbHash :: ByteString
}
generateThumb :: ByteString -> Handler ThumbData
generateThumb raw = do
let eimg = decodeImageWithMetadata raw
case eimg of
Left e -> error e
Right (img, meta) ->
return $ ThumbData
{ thumbContent = ava
, thumbHash = h
}
where
w1 = fromIntegral $ fromJust $ PM.lookup Width meta :: Int
h1 = fromIntegral $ fromJust $ PM.lookup Height meta :: Int
h2 = 140 :: Int
w2 = floor ((fromIntegral w1 :: Double) / (fromIntegral h1 :: Double) * (fromIntegral h2 :: Double)) :: Int
scimg = scale (w2, h2) $ convertRGBA8 img
ava = (B.concat . L.toChunks) $ encodePng scimg
h = encode (SHA3.hash 32 ava)
getGetAvatarR :: AvatarId -> Handler TypedContent
getGetAvatarR aId = do
avatar <- runDB $ get404 aId
setEtag $ decodeUtf8 $ avatarHash avatar
return $ TypedContent typePng $ toContent $ avatarData avatar
getAvatarDeleteR :: AvatarId -> Handler Html
getAvatarDeleteR aId = do
ma <- runDB $ get aId
case ma of
Just _ -> do
c <- runDB $ selectList [UserAvatar ==. Just aId] []
d <- runDB $ selectList [BeverageAvatar ==. Just aId] []
e <- runDB $ selectList [SupplierAvatar ==. Just aId] []
if null c && null d && null e
then do
runDB $ delete aId
setMessageI MsgAvatarDeleted
redirect HomeR
else do
setMessageI MsgAvatarInUseError
redirect AvatarR
Nothing -> do
setMessageI MsgAvatarUnknown
redirect AvatarR
| nek0/yammat | Handler/Avatar.hs | agpl-3.0 | 5,785 | 0 | 18 | 1,380 | 1,564 | 769 | 795 | 144 | 3 |
{- x2
Map an expanded, transposed (horizontal, if you will) file like
PINA XXHHHHLLL
PINB 01101Z010
To binary bytes as given by the wavetable
TODO This is a bit slower than I'd like
-}
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.ByteString.Char8 as B
import qualified Data.HashMap.Strict as Map
import Lib
waveTable :: HashMap ByteString Char
waveTable
= Map.fromList $
zip
[B.pack [a, b] | a <- states, b <- states]
(map chr [0..255])
defaultWave :: Char
defaultWave = Map.lookupDefault (chr 255) "xx" waveTable
statesMap :: ByteString -> ByteString
statesMap = B.pack . go
where
go bs
| B.null bs = []
| otherwise =
let
(s, rest) = B.splitAt 2 bs
in
Map.lookupDefault defaultWave s waveTable : go rest
xForm :: ByteString -> ByteString
xForm inp =
let
[pin, statesIn] = B.words inp
statesOut = statesMap . pad 2 . canonical $ statesIn
in B.unwords
[ pin
, B.pack $ show ( B.length statesOut )
, statesOut
]
data Opts =
Opts
{ repeatsOutFile :: FilePath
}
parseOpts :: OptionsParser Opts
parseOpts = Opts
<$> argument str
( metavar "HUSFILE.hus"
<> value ""
<> help "A Horizontal-Uncompressed State file (or use stdin)")
opts :: ParserInfo Opts
opts = info (parseOpts <**> helper)
( fullDesc
<> progDesc "Map states in a Horizontal-Uncompressed State file to the standard X2 wavetable and output Horizontal-Uncompressed Binary"
<> header "hus-x2-hub - apply standard X2 wavetable")
main :: IO ()
main = do
Opts inpFile <- execParser opts
contents <- getInput inpFile
mapM_
(B.putStrLn
. xForm
)
(B.lines contents)
| gitfoxi/vcd-haskell | app/hus-x2-hub.hs | agpl-3.0 | 1,708 | 7 | 12 | 433 | 478 | 237 | 241 | 51 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import qualified Prelude as P
import MatrixMul3x3
import System.RedPitaya.Bus
import CLaSH.Prelude
import System.RedPitaya.Fpga
import System.RedPitaya.Tcp
import Control.Monad.IO.Class
import System.Environment
matA = (1 :> 2 :> 3 :> Nil) :>
(4 :> 5 :> 6 :> Nil) :>
(7 :> 8 :> 9 :> Nil) :> Nil :: M3x3
matB = (10 :> 11 :> 12 :> Nil) :>
(13 :> 14 :> 15 :> Nil) :>
(16 :> 17 :> 18 :> Nil) :> Nil :: M3x3
multiply :: (M3x3,M3x3) -> NetworkFpgaSetGet M3x3
multiply = callCore $(bTQ matrixMul3x3sig) 5 0 5 0
rpPort = 4242
rpIp = "10.42.0.11"
main = runRemoteRp rpIp 4242 $ do
m <- multiply (matA,matB)
let p = liftIO . P.putStrLn . P.show
p matA
p matB
p m -- matA * matB
| ra1u/lambdaya-bus | examples/MatrixMultiply/Client.hs | lgpl-3.0 | 982 | 0 | 13 | 267 | 305 | 168 | 137 | 31 | 1 |
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ImpredicativeTypes #-}
import Control.Monad
import Control.Applicative
import Control.Exception
newtype DB c a = DB {
fromDB :: IO a
}
instance Functor (DB c) where
fmap = liftM
instance Applicative (DB c) where
pure = return
(<*>) = ap
instance Monad (DB c) where
return x = DB $ return x
m >>= f = DB $ do
x <- fromDB m
y <- fromDB $ f x
return y
data Connection = Connection
data Pool = Pool
newtype SafeConn c = Safe Connection
getConn :: Pool -> IO Connection
getConn = undefined
returnConn :: Pool -> Connection -> IO ()
returnConn = undefined
withConn :: Pool -> (Connection -> IO a) -> IO a
withConn pool act =
bracket (getConn pool) (returnConn pool) act
query :: Connection -> String -> IO [String]
query = undefined
-- When a connection is returned to the pool, we can't use it anymore.
-- However, withConn can be misused to expose a connection outside of it.
evil :: Pool -> IO Connection
evil pool = withConn pool return
-- This is evil.
-- However, withSafeConn can't expose a (usable) connection to the outside
--
safeQuery :: SafeConn c -> String -> DB c [String]
safeQuery (Safe conn) str = DB (query conn str)
-- Following will report error `c would escape its scope`.
-- That's because the scope of c extends only to the rightmost parenthesis of (forall c. SafeConn c -> DB c a). So after `return` is called, `c` no longer exist in the typing environment. You can't match it against anything.
-- notEvil pool = withSafeConn pool return
withSafeConn :: Pool -> (forall c. SafeConn c -> DB c a) -> IO a
withSafeConn pool act =
withConn pool $ \conn ->
fromDB (act (Safe conn))
term1 :: [forall a. a]
term1 = undefined
term2 :: [forall a. a]
term2 = []
term3 :: [forall a. a]
term3 = [undefined]
term4 :: [forall a. a]
term4 = [undefined, undefined]
term5 :: [forall a. (Enum a) => a]
term5 = undefined
term6 :: [forall a. (Enum a) => a]
term6 = []
term7 :: [forall a. (Enum a) => a]
term7 = [undefined]
| seckcoder/lang-learn | haskell/samples/src/RankN/DB.hs | unlicense | 2,020 | 0 | 11 | 426 | 673 | 348 | 325 | -1 | -1 |
module MyWords where
myWords :: Char -> String -> [String]
myWords c s = go [] s c
where go acc "" c = reverse acc
go acc (x:ss) c
| x == c = go acc ss c
| otherwise = go ((takeWhile (/= c) (x:ss)) : acc) (dropWhile (/= c) (x:ss)) c
firstSen = "Tyger Tyger, burning bright\n"
secondSen = "In the forests of the night\n"
thirdSen = "What immortal hand or eye\n"
fourthSen = "Could frame thy fearful symmetry?"
sentences = firstSen ++ secondSen
++ thirdSen ++ fourthSen
myLines :: String -> [String]
myLines = myWords '\n'
shouldEqual =
[ "Tyger Tyger, burning bright"
, "In the forests of the night"
, "What immortal hand or eye"
, "Could frame thy fearful symmetry?"
]
main :: IO ()
main =
print $ "Are they equal? "
++ show (myLines sentences == shouldEqual)
| aniketd/learn.haskell | haskellbook/myWords.hs | unlicense | 860 | 0 | 14 | 245 | 263 | 140 | 123 | 24 | 2 |
module HeronianTriangles.A305704Spec (main, spec) where
import Test.Hspec
import HeronianTriangles.A305704 (a305704)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A305704" $
it "correctly computes the first 20 elements" $
take 20 (map a305704 [1..]) `shouldBe` expectedValue where
expectedValue = [6, 8, 9, 12, 15, 16, 18, 20, 21, 22, 24, 25, 27, 28, 30, 32, 33, 34, 35, 36]
| peterokagey/haskellOEIS | test/HeronianTriangles/A305704Spec.hs | apache-2.0 | 405 | 0 | 10 | 78 | 160 | 95 | 65 | 10 | 1 |
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Thrift.Protocol.Binary
( module Thrift.Protocol
, BinaryProtocol(..)
) where
import Control.Exception ( throw )
import Control.Monad
import Data.Bits
import Data.ByteString.Builder
#if __GLASGOW_HASKELL__ < 710
import Data.Functor
#endif
import Data.Int
#if __GLASGOW_HASKELL__ < 804
import Data.Monoid
#endif
import Data.Word
import Data.Text.Lazy.Encoding ( decodeUtf8, encodeUtf8 )
import Thrift.Protocol
import Thrift.Transport
import Thrift.Types
import qualified Data.Attoparsec.ByteString as P
import qualified Data.Attoparsec.ByteString.Lazy as LP
import qualified Data.Binary as Binary
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as Map
import qualified Data.Text.Lazy as LT
-- | The Binary Protocol uses the standard Thrift 'TBinaryProtocol'
data BinaryProtocol a = BinaryProtocol a
-- ^ Construct a 'BinaryProtocol' with a 'Transport'
versionMask :: Int32
versionMask = fromIntegral (0xffff0000 :: Word32)
version1 :: Int32
version1 = fromIntegral (0x80010000 :: Word32)
-- NOTE: Reading and Writing functions rely on Builders and Data.Binary to
-- encode and decode data. Data.Binary assumes that the binary values it is
-- encoding to and decoding from are in BIG ENDIAN format, and converts the
-- endianness as necessary to match the local machine.
instance Protocol BinaryProtocol where
mkProtocol = BinaryProtocol
getTransport (BinaryProtocol t) = t
writeMessage p (n, t, s) = (writeMessageBegin >>)
where
writeMessageBegin = tWrite (getTransport p) $ toLazyByteString $
buildBinaryValue (TI32 (version1 .|. fromIntegral (fromEnum t))) <>
buildBinaryValue (TString $ encodeUtf8 n) <>
buildBinaryValue (TI32 s)
readMessage p = (readMessageBegin >>=)
where
readMessageBegin =runParser p $ do
TI32 ver <- parseBinaryValue T_I32
if ver .&. versionMask /= version1
then throw $ ProtocolExn PE_BAD_VERSION "Missing version identifier"
else do
TString s <- parseBinaryValue T_STRING
TI32 sz <- parseBinaryValue T_I32
return (decodeUtf8 s, toEnum $ fromIntegral $ ver .&. 0xFF, sz)
serializeVal _ = toLazyByteString . buildBinaryValue
deserializeVal _ ty bs =
case LP.eitherResult $ LP.parse (parseBinaryValue ty) bs of
Left s -> error s
Right val -> val
readVal p = runParser p . parseBinaryValue
-- | Writing Functions
buildBinaryValue :: ThriftVal -> Builder
buildBinaryValue (TStruct fields) = buildBinaryStruct fields <> buildType T_STOP
buildBinaryValue (TMap ky vt entries) =
buildType ky <>
buildType vt <>
int32BE (fromIntegral (length entries)) <>
buildBinaryMap entries
buildBinaryValue (TList ty entries) =
buildType ty <>
int32BE (fromIntegral (length entries)) <>
buildBinaryList entries
buildBinaryValue (TSet ty entries) =
buildType ty <>
int32BE (fromIntegral (length entries)) <>
buildBinaryList entries
buildBinaryValue (TBool b) =
word8 $ toEnum $ if b then 1 else 0
buildBinaryValue (TByte b) = int8 b
buildBinaryValue (TI16 i) = int16BE i
buildBinaryValue (TI32 i) = int32BE i
buildBinaryValue (TI64 i) = int64BE i
buildBinaryValue (TFloat f) = floatBE f
buildBinaryValue (TDouble d) = doubleBE d
buildBinaryValue (TString s) = int32BE len <> lazyByteString s
where
len :: Int32 = fromIntegral (LBS.length s)
buildBinaryStruct :: Map.HashMap Int16 (LT.Text, ThriftVal) -> Builder
buildBinaryStruct = Map.foldrWithKey combine mempty
where
combine fid (_,val) s =
buildTypeOf val <> int16BE fid <> buildBinaryValue val <> s
buildBinaryMap :: [(ThriftVal, ThriftVal)] -> Builder
buildBinaryMap = foldl combine mempty
where
combine s (key, val) = s <> buildBinaryValue key <> buildBinaryValue val
buildBinaryList :: [ThriftVal] -> Builder
buildBinaryList = foldr (mappend . buildBinaryValue) mempty
-- | Reading Functions
parseBinaryValue :: ThriftType -> P.Parser ThriftVal
parseBinaryValue (T_STRUCT _) = TStruct <$> parseBinaryStruct
parseBinaryValue (T_MAP _ _) = do
kt <- parseType
vt <- parseType
n <- Binary.decode . LBS.fromStrict <$> P.take 4
TMap kt vt <$> parseBinaryMap kt vt n
parseBinaryValue (T_LIST _) = do
t <- parseType
n <- Binary.decode . LBS.fromStrict <$> P.take 4
TList t <$> parseBinaryList t n
parseBinaryValue (T_SET _) = do
t <- parseType
n <- Binary.decode . LBS.fromStrict <$> P.take 4
TSet t <$> parseBinaryList t n
parseBinaryValue T_BOOL = TBool . (/=0) <$> P.anyWord8
parseBinaryValue T_BYTE = TByte . Binary.decode . LBS.fromStrict <$> P.take 1
parseBinaryValue T_I16 = TI16 . Binary.decode . LBS.fromStrict <$> P.take 2
parseBinaryValue T_I32 = TI32 . Binary.decode . LBS.fromStrict <$> P.take 4
parseBinaryValue T_I64 = TI64 . Binary.decode . LBS.fromStrict <$> P.take 8
parseBinaryValue T_FLOAT = TFloat . bsToFloating byteSwap32 <$> P.take 4
parseBinaryValue T_DOUBLE = TDouble . bsToFloating byteSwap64 <$> P.take 8
parseBinaryValue T_STRING = do
i :: Int32 <- Binary.decode . LBS.fromStrict <$> P.take 4
TString . LBS.fromStrict <$> P.take (fromIntegral i)
parseBinaryValue ty = error $ "Cannot read value of type " ++ show ty
parseBinaryStruct :: P.Parser (Map.HashMap Int16 (LT.Text, ThriftVal))
parseBinaryStruct = Map.fromList <$> P.manyTill parseField (matchType T_STOP)
where
parseField = do
t <- parseType
n <- Binary.decode . LBS.fromStrict <$> P.take 2
v <- parseBinaryValue t
return (n, ("", v))
parseBinaryMap :: ThriftType -> ThriftType -> Int32 -> P.Parser [(ThriftVal, ThriftVal)]
parseBinaryMap kt vt n | n <= 0 = return []
| otherwise = do
k <- parseBinaryValue kt
v <- parseBinaryValue vt
((k,v) :) <$> parseBinaryMap kt vt (n-1)
parseBinaryList :: ThriftType -> Int32 -> P.Parser [ThriftVal]
parseBinaryList ty n | n <= 0 = return []
| otherwise = liftM2 (:) (parseBinaryValue ty)
(parseBinaryList ty (n-1))
-- | Write a type as a byte
buildType :: ThriftType -> Builder
buildType t = word8 $ fromIntegral $ fromEnum t
-- | Write type of a ThriftVal as a byte
buildTypeOf :: ThriftVal -> Builder
buildTypeOf v = buildType $ case v of
TStruct{} -> T_STRUCT Map.empty
TMap{} -> T_MAP T_VOID T_VOID
TList{} -> T_LIST T_VOID
TSet{} -> T_SET T_VOID
TBool{} -> T_BOOL
TByte{} -> T_BYTE
TI16{} -> T_I16
TI32{} -> T_I32
TI64{} -> T_I64
TString{} -> T_STRING
TFloat{} -> T_FLOAT
TDouble{} -> T_DOUBLE
-- | Read a byte as though it were a ThriftType
parseType :: P.Parser ThriftType
parseType = toEnum . fromIntegral <$> P.anyWord8
matchType :: ThriftType -> P.Parser ThriftType
matchType t = t <$ P.word8 (fromIntegral $ fromEnum t)
| facebook/fbthrift | thrift/lib/hs/Thrift/Protocol/Binary.hs | apache-2.0 | 7,761 | 0 | 18 | 1,546 | 2,160 | 1,103 | 1,057 | 150 | 12 |
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE Trustworthy #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-|
Copyright : (C) 2013-2015, University of Twente
License : BSD2 (see the file LICENSE)
Maintainer : Christiaan Baaij <christiaan.baaij@gmail.com>
-}
module CLaSH.Signal
( -- * Implicitly clocked synchronous signal
Signal
-- * Basic circuit functions
, signal
, register
, regEn
, mux
-- * Boolean connectives
, (.&&.), (.||.), not1
-- * Product/Signal isomorphism
, Bundle
, Unbundled
, bundle
, unbundle
-- * Simulation functions (not synthesisable)
, simulate
, simulateB
-- * List \<-\> Signal conversion (not synthesisable)
, sample
, sampleN
, fromList
-- * QuickCheck combinators
, testFor
-- * Type classes
-- ** 'Eq'-like
, (.==.), (./=.)
-- ** 'Ord'-like
, compare1, (.<.), (.<=.), (.>=.), (.>.)
-- ** 'Enum'-like
, fromEnum1
-- ** 'Rational'-like
, toRational1
-- ** 'Integral'-like
, toInteger1
-- ** 'Bits'-like
, testBit1
, popCount1
, shift1
, rotate1
, setBit1
, clearBit1
, shiftL1
, unsafeShiftL1
, shiftR1
, unsafeShiftR1
, rotateL1
, rotateR1
)
where
import Data.Bits (Bits) -- Haddock only
import CLaSH.Signal.Internal (Signal', register#, regEn#, (.==.), (./=.),
compare1, (.<.), (.<=.), (.>=.), (.>.), fromEnum1,
toRational1, toInteger1, testBit1, popCount1,
shift1, rotate1, setBit1, clearBit1, shiftL1,
unsafeShiftL1, shiftR1, unsafeShiftR1, rotateL1,
rotateR1, (.||.), (.&&.), not1, mux, sample,
sampleN, fromList, simulate, signal, testFor)
import CLaSH.Signal.Explicit (SystemClock, systemClock, simulateB')
import CLaSH.Signal.Bundle (Bundle (..), Unbundled')
{- $setup
>>> let oscillate = register False (not1 oscillate)
>>> let count = regEn 0 oscillate (count + 1)
-}
-- * Implicitly clocked synchronous signal
-- | Signal synchronised to the \"system\" clock, which has a period of 1000.
type Signal a = Signal' SystemClock a
-- * Basic circuit functions
{-# INLINE register #-}
-- | 'register' @i s@ delays the values in 'Signal' @s@ for one cycle, and sets
-- the value at time 0 to @i@
--
-- >>> sampleN 3 (register 8 (fromList [1,2,3,4]))
-- [8,1,2]
register :: a -> Signal a -> Signal a
register = register# systemClock
{-# INLINE regEn #-}
-- | Version of 'register' that only updates its content when its second argument
-- is asserted. So given:
--
-- @
-- oscillate = 'register' False ('not1' oscillate)
-- count = 'regEn' 0 oscillate (count + 1)
-- @
--
-- We get:
--
-- >>> sampleN 8 oscillate
-- [False,True,False,True,False,True,False,True]
-- >>> sampleN 8 count
-- [0,0,1,1,2,2,3,3]
regEn :: a -> Signal Bool -> Signal a -> Signal a
regEn = regEn# systemClock
-- * Product/Signal isomorphism
-- | Isomorphism between a 'Signal' of a product type (e.g. a tuple) and a
-- product type of 'Signal's.
type Unbundled a = Unbundled' SystemClock a
{-# INLINE unbundle #-}
-- | Example:
--
-- @
-- __unbundle__ :: 'Signal' (a,b) -> ('Signal' a, 'Signal' b)
-- @
--
-- However:
--
-- @
-- __unbundle__ :: 'Signal' 'CLaSH.Sized.BitVector.Bit' -> 'Signal' 'CLaSH.Sized.BitVector.Bit'
-- @
unbundle :: Bundle a => Signal a -> Unbundled a
unbundle = unbundle' systemClock
{-# INLINE bundle #-}
-- | Example:
--
-- @
-- __bundle__ :: ('Signal' a, 'Signal' b) -> 'Signal' (a,b)
-- @
--
-- However:
--
-- @
-- __bundle__ :: 'Signal' 'CLaSH.Sized.BitVector.Bit' -> 'Signal' 'CLaSH.Sized.BitVector.Bit'
-- @
bundle :: Bundle a => Unbundled a -> Signal a
bundle = bundle' systemClock
-- | Simulate a (@'Unbundled' a -> 'Unbundled' b@) function given a list of
-- samples of type @a@
--
-- >>> simulateB (unbundle . register (8,8) . bundle) [(1,1), (2,2), (3,3)] :: [(Int,Int)]
-- [(8,8),(1,1),(2,2),(3,3)...
--
-- __NB__: This function is not synthesisable
simulateB :: (Bundle a, Bundle b) => (Unbundled a -> Unbundled b) -> [a] -> [b]
simulateB = simulateB' systemClock systemClock
| Ericson2314/clash-prelude | src/CLaSH/Signal.hs | bsd-2-clause | 4,201 | 0 | 9 | 955 | 599 | 401 | 198 | 65 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Types where
import Data.Ini
import Data.List (group, sort, sortOn)
import Data.Maybe (mapMaybe, listToMaybe)
import Data.Ord (Down(Down))
import Data.Text (Text, unpack)
import Data.Time.Calendar (Day)
import Text.XmlHtml (Document)
type File = (FilePath, Text)
data Blog = Blog
{ name :: Text
, url :: Text
, unpublished :: [Post]
, published :: [Post]
}
deriving Show
-- toBlog does three really important things:
--
-- 1. It ensures there are now slug collisions,
-- 2. It moves the "published" posts with a future publish date
-- into the "unpublished" list, and
-- 3. It sorts the list of published entries on datestamp (in
-- reverse chronological order!)
--
toBlog :: Ini -> Day -> [Post] -> [Post] -> Either [String] Blog
toBlog config today drafts published =
let
getDuplicates = mapMaybe (listToMaybe . drop 1) . group . sort
duplicates = getDuplicates (map slug (drafts ++ published))
unpublished = filter ((> today) . datestamp) published ++ drafts
actualPublished = sortOn (Down . datestamp) (filter ((<= today) . datestamp) published)
coerce = either (Left . (:[])) return
in do
blogName <- coerce (lookupValue "general" "title" config)
blogUrl <- coerce (lookupValue "general" "url" config)
if null duplicates then
Right (Blog blogName blogUrl unpublished actualPublished)
else
Left (map (("duplicate slug: " ++) . unpack . fromSlug) duplicates)
data Post = Post
{ title :: Text
, slug :: Slug Text
, datestamp :: Day
, content :: Document
, author :: () -- for a more civilized age
, tags :: [Slug Text]
}
deriving Show
-- Wrapper for slugs so we don't mix 'em up with regular text
data Slug a = Slug
{ fromSlug :: a
}
deriving (Show, Eq, Ord)
instance Monoid a => Monoid (Slug a) where
mempty = Slug mempty
mappend (Slug a) (Slug b) = Slug (mappend a b)
instance Functor Slug where
fmap f (Slug a) = Slug (f a)
| kqr/two-wrongs-st | src/Types.hs | bsd-2-clause | 2,105 | 0 | 16 | 552 | 619 | 346 | 273 | 46 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for @xm list --long@ parser -}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Hypervisor.Xen.XmParser
( testHypervisor_Xen_XmParser
) where
import Test.HUnit
import Test.QuickCheck as QuickCheck hiding (Result)
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Control.Monad (liftM)
import qualified Data.Attoparsec.Text as A
import Data.Text (pack)
import Data.Char
import qualified Data.Map as Map
import Text.Printf
import Ganeti.Hypervisor.Xen.Types
import Ganeti.Hypervisor.Xen.XmParser
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Arbitraries
-- | Generator for 'ListConfig'.
--
-- A completely arbitrary configuration would contain too many lists
-- and its size would be to big to be actually parsable in reasonable
-- time. This generator builds a random Config that is still of a
-- reasonable size, and it also Avoids generating strings that might
-- be interpreted as numbers.
genConfig :: Int -> Gen LispConfig
genConfig 0 =
-- only terminal values for size 0
frequency [ (5, liftM LCString (genName `suchThat` (not . canBeNumber)))
, (5, liftM LCDouble arbitrary)
]
genConfig n =
-- for size greater than 0, allow "some" lists
frequency [ (5, liftM LCString (resize n genName `suchThat`
(not . canBeNumber)))
, (5, liftM LCDouble arbitrary)
, (1, liftM LCList (choose (1, n) >>=
(\n' -> vectorOf n' (genConfig $ n `div` n'))))
]
-- | Arbitrary instance for 'LispConfig' using 'genConfig'.
instance Arbitrary LispConfig where
arbitrary = sized genConfig
-- | Determines conservatively whether a string could be a number.
canBeNumber :: String -> Bool
canBeNumber [] = False
canBeNumber [c] = canBeNumberChar c
canBeNumber (c:xs) = canBeNumberChar c && canBeNumber xs
-- | Determines whether a char can be part of the string representation of a
-- number (even in scientific notation).
canBeNumberChar :: Char -> Bool
canBeNumberChar c = isDigit c || (c `elem` "eE-")
-- | Generates an arbitrary @xm uptime@ output line.
instance Arbitrary UptimeInfo where
arbitrary = do
name <- genFQDN
NonNegative idNum <- arbitrary :: Gen (NonNegative Int)
NonNegative days <- arbitrary :: Gen (NonNegative Int)
hours <- choose (0, 23) :: Gen Int
mins <- choose (0, 59) :: Gen Int
secs <- choose (0, 59) :: Gen Int
let uptime :: String
uptime =
if days /= 0
then printf "%d days, %d:%d:%d" days hours mins secs
else printf "%d:%d:%d" hours mins secs
return $ UptimeInfo name idNum uptime
-- * Helper functions for tests
-- | Function for testing whether a domain configuration is parsed correctly.
testDomain :: String -> Map.Map String Domain -> Assertion
testDomain fileName expectedContent = do
fileContent <- readTestData fileName
case A.parseOnly xmListParser $ pack fileContent of
Left msg -> assertFailure $ "Parsing failed: " ++ msg
Right obtained -> assertEqual fileName expectedContent obtained
-- | Function for testing whether a @xm uptime@ output (stored in a file)
-- is parsed correctly.
testUptimeInfo :: String -> Map.Map Int UptimeInfo -> Assertion
testUptimeInfo fileName expectedContent = do
fileContent <- readTestData fileName
case A.parseOnly xmUptimeParser $ pack fileContent of
Left msg -> assertFailure $ "Parsing failed: " ++ msg
Right obtained -> assertEqual fileName expectedContent obtained
-- | Determines whether two LispConfig are equal, with the exception of Double
-- values, that just need to be \"almost equal\".
--
-- Meant mainly for testing purposes, given that Double values may be slightly
-- rounded during parsing.
isAlmostEqual :: LispConfig -> LispConfig -> Property
isAlmostEqual (LCList c1) (LCList c2) =
(length c1 ==? length c2) .&&.
conjoin (zipWith isAlmostEqual c1 c2)
isAlmostEqual (LCString s1) (LCString s2) = s1 ==? s2
isAlmostEqual (LCDouble d1) (LCDouble d2) = printTestCase msg $ rel <= 1e-12
where rel = relativeError d1 d2
msg = "Relative error " ++ show rel ++ " not smaller than 1e-12\n" ++
"expected: " ++ show d2 ++ "\n but got: " ++ show d1
isAlmostEqual a b =
failTest $ "Comparing different types: '" ++ show a ++ "' with '" ++
show b ++ "'"
-- | Function to serialize LispConfigs in such a way that they can be rebuilt
-- again by the lispConfigParser.
serializeConf :: LispConfig -> String
serializeConf (LCList c) = "(" ++ unwords (map serializeConf c) ++ ")"
serializeConf (LCString s) = s
serializeConf (LCDouble d) = show d
-- | Function to serialize UptimeInfos in such a way that they can be rebuilt
-- againg by the uptimeLineParser.
serializeUptime :: UptimeInfo -> String
serializeUptime (UptimeInfo name idNum uptime) =
printf "%s\t%d\t%s" name idNum uptime
-- | Test whether a randomly generated config can be parsed.
-- Implicitly, this also tests that the Show instance of Config is correct.
prop_config :: LispConfig -> Property
prop_config conf =
case A.parseOnly lispConfigParser . pack . serializeConf $ conf of
Left msg -> failTest $ "Parsing failed: " ++ msg
Right obtained -> printTestCase "Failing almost equal check" $
isAlmostEqual obtained conf
-- | Test whether a randomly generated UptimeInfo text line can be parsed.
prop_uptimeInfo :: UptimeInfo -> Property
prop_uptimeInfo uInfo =
case A.parseOnly uptimeLineParser . pack . serializeUptime $ uInfo of
Left msg -> failTest $ "Parsing failed: " ++ msg
Right obtained -> obtained ==? uInfo
-- | Test a Xen 4.0.1 @xm list --long@ output.
case_xen401list :: Assertion
case_xen401list = testDomain "xen-xm-list-long-4.0.1.txt" $
Map.fromList
[ ("Domain-0", Domain 0 "Domain-0" 184000.41332 ActualRunning Nothing)
, ("instance1.example.com", Domain 119 "instance1.example.com" 24.116146647
ActualBlocked Nothing)
]
-- | Test a Xen 4.0.1 @xm uptime@ output.
case_xen401uptime :: Assertion
case_xen401uptime = testUptimeInfo "xen-xm-uptime-4.0.1.txt" $
Map.fromList
[ (0, UptimeInfo "Domain-0" 0 "98 days, 2:27:44")
, (119, UptimeInfo "instance1.example.com" 119 "15 days, 20:57:07")
]
testSuite "Hypervisor/Xen/XmParser"
[ 'prop_config
, 'prop_uptimeInfo
, 'case_xen401list
, 'case_xen401uptime
]
| apyrgio/snf-ganeti | test/hs/Test/Ganeti/Hypervisor/Xen/XmParser.hs | bsd-2-clause | 7,773 | 0 | 16 | 1,575 | 1,410 | 744 | 666 | 107 | 2 |
module Data.Iteratee (
module Data.Iteratee.Base
, module Data.Iteratee.Exception
, module Data.Iteratee.IO
) where
------------------------------------------------------------------------
-- Imports
------------------------------------------------------------------------
import Data.Iteratee.Base
import Data.Iteratee.Exception
import Data.Iteratee.IO | tanimoto/iteratee | src/Data/Iteratee.hs | bsd-3-clause | 357 | 0 | 5 | 26 | 50 | 35 | 15 | 7 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ordinal.PT.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Ordinal.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale PT Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (OrdinalData 1)
[ "primeira"
, "primeiros"
]
, examples (OrdinalData 2)
[ "Segundo"
, "segundas"
]
, examples (OrdinalData 7)
[ "setimo"
, "sétimo"
, "sétimas"
]
, examples (OrdinalData 10)
[ "decimos"
, "décimos"
, "decima"
, "décima"
, "decimas"
, "décimas"
]
, examples (OrdinalData 11)
[ "decimos primeiros"
, "décimos primeiros"
, "decimo primeiro"
, "décimo primeiro"
, "decimas primeiras"
, "décimas primeiras"
, "decima primeira"
, "décima primeira"
]
, examples (OrdinalData 12)
[ "decimos segundos"
, "décimos segundos"
, "decimo segundo"
, "décimo segundo"
, "decimas segundas"
, "décimas segundas"
, "decima segunda"
, "décima segunda"
]
, examples (OrdinalData 17)
[ "decimos setimos"
, "décimos setimos"
, "decimo setimo"
, "décimo sétimo"
, "decimas setimas"
, "décimas sétimas"
, "decima setima"
, "décima setima"
]
, examples (OrdinalData 58)
[ "quinquagésimas oitavas"
, "qüinquagesimo oitavo"
, "quinquagésimo oitavo"
]
]
| facebookincubator/duckling | Duckling/Ordinal/PT/Corpus.hs | bsd-3-clause | 2,204 | 0 | 9 | 877 | 329 | 198 | 131 | 61 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Test.Concurrent where
import qualified Test.Hspec as HS
import qualified Test.Hspec.QuickCheck as HS
import qualified Test.QuickCheck as Q
import qualified Control.Concurrent as CC
import qualified Control.Concurrent.Async as AS
import qualified Control.Concurrent.MVar as MV
import qualified Control.Exception as E
import Control.Monad ((>=>))
import qualified Control.Monad as M
import qualified GHC.Conc as CC
import qualified Data.Maybe as MB
import qualified Data.Traversable as TF
import Data.Typeable (Typeable)
-- | Multiple times test enabled IO spec
ioprop :: (HS.HasCallStack, Q.Testable prop) => String -> prop -> HS.Spec
ioprop desc prop = HS.prop desc $ \ () -> prop
class HasThread th where
threadId :: th -> IO CC.ThreadId
throwTo :: E.Exception e => th -> e -> IO ()
throwTo th e = threadId th >>= flip E.throwTo e
threadStatus :: HasThread th => th -> IO CC.ThreadStatus
threadStatus = threadId >=> CC.threadStatus
instance HasThread CC.ThreadId where
threadId = return
instance HasThread (AS.Async x) where
threadId = return . AS.asyncThreadId
isFinish :: CC.ThreadStatus -> Bool
isFinish CC.ThreadFinished = True
isFinish CC.ThreadDied = True
isFinish _ = False
isStop :: CC.ThreadStatus -> Bool
isStop CC.ThreadRunning = False
isStop _ = True
withWaitStart :: (IO () -> IO x) -> IO x
withWaitStart actf = do
mv <- MV.newEmptyMVar
mdelay <- Q.generate $ arbitraryDelay 20000
async <- AS.async . actf $ MV.readMVar mv
case mdelay of
Just delay -> CC.threadDelay delay
Nothing -> return ()
CC.putMVar mv ()
AS.wait async
concurrently :: IO a -> IO b -> IO (a, b)
concurrently act1 act2 = do
mdelay1 <- Q.generate $ arbitraryDelay 20000
mdelay2 <- Q.generate $ arbitraryDelay 20000
withWaitStart $ \ wait ->
wrap wait mdelay1 act1 `AS.concurrently` wrap wait mdelay2 act2
where
wrap :: IO () -> Maybe Int -> IO a -> IO a
wrap wait mdelay act = wait >> TF.for mdelay CC.threadDelay >> act
mapConcurrently :: [IO a] -> IO [a]
mapConcurrently acts = do
let len = length acts
mds <- Q.generate . Q.vectorOf len $ fmap (`mod` 20000) <$> Q.arbitrary
withWaitStart $ \ wait -> do
AS.mapConcurrently id $ wrap wait <$> zip mds acts
where
wrap :: IO () -> (Maybe Int, IO a) -> IO a
wrap wait (mdelay, act) = wait >> TF.for mdelay CC.threadDelay >> act
mapConcurrently_ :: [IO a] -> IO ()
mapConcurrently_ = M.void . mapConcurrently
waitStop :: HasThread th => th -> IO CC.ThreadStatus
waitStop th = snd . head <$> waitAny isStop [th]
waitFinish :: HasThread th => th -> IO CC.ThreadStatus
waitFinish th = snd . head <$> waitFinishAny [th]
waitFinishAny :: HasThread th => [th] -> IO [(Int, CC.ThreadStatus)]
waitFinishAny = waitAny isFinish
waitAny :: HasThread th =>
(CC.ThreadStatus -> Bool) -> [th] -> IO [(Int, CC.ThreadStatus)]
waitAny = waitAnyAtLeast 1
waitAnyAtLeast :: HasThread th =>
Int -> (CC.ThreadStatus -> Bool) -> [th] -> IO [(Int, CC.ThreadStatus)]
waitAnyAtLeast num f ths = go
where
go = do
statuses <- M.sequence $ threadStatus <$> ths
let satisfied = filter (f . snd) $ zip [0..] statuses
if length satisfied >= num
then return satisfied
else CC.threadDelay 1 >> go
data RandomException = RandomException Int String
deriving (Show, Typeable)
instance E.Exception RandomException
ignoreException :: IO a -> IO (Maybe a)
ignoreException act = (Just <$> act)
`E.catch` \ (_err :: RandomException) -> do
-- E.uninterruptibleMask_ $ putStrLn $ "---- Exception throwed : " ++ show _err
return Nothing
ignoreException_ :: IO a -> IO ()
ignoreException_ = M.void . ignoreException
runningThreadId :: HasThread th => th -> IO (Maybe CC.ThreadId)
runningThreadId th = do
status <- threadStatus th
if isFinish status
then return Nothing
else Just <$> threadId th
throwExceptionRandomly :: HasThread th => [th] -> IO ()
throwExceptionRandomly ths = go (1 :: Int)
where
getAlives = fmap MB.catMaybes . M.sequence $ runningThreadId <$> ths
go !c = do
mdelay <- Q.generate $ arbitraryDelay $ 20000 * c
case mdelay of
Just delay -> CC.threadDelay delay
Nothing -> return ()
alives <- getAlives
if length alives == 0
then return ()
else do
alive <- Q.generate $ Q.elements alives
throwTo alive . RandomException c $ show mdelay ++ " : " ++ show (length alives)
go $ c+1
arbitraryDelay :: Int -> Q.Gen (Maybe Int)
arbitraryDelay limit = do
mbase <- Q.arbitrary
multi1 <- (+1) . abs <$> Q.arbitrary
multi2 <- (+1) . abs <$> Q.arbitrary
case mbase of
Just base -> return . Just . (`mod` limit) $ base * multi1 * multi2
Nothing -> return Nothing
| asakamirai/kazura-queue | test/Test/Concurrent.hs | bsd-3-clause | 5,114 | 0 | 16 | 1,331 | 1,785 | 906 | 879 | -1 | -1 |
{-# LANGUAGE Arrows, NoMonomorphismRestriction #-}
module AtomTestCases where
import Text.XML.HXT.Core
import Data.Tree.NTree.TypeDefs(NTree)
import Web.HRSS.Data.Atom
import Test.Framework as TF (testGroup, Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
xmlAtom0 :: [Char]
xmlAtom0 = "\
\<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
\<feed xmlns='http://www.w3.org/2005/Atom' xmlns:thr='http://purl.org/syndication/thread/1.0' xml:lang='en' xml:base='http://www.1point2vue.com/wp-atom.php'>\
\ <title type='text'>1point2vue</title>\
\</feed>"
xmlAtom1 :: [Char]
xmlAtom1 = "\
\<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
\<feed xmlns='http://www.w3.org/2005/Atom' xmlns:thr='http://purl.org/syndication/thread/1.0' xml:lang='en' xml:base='http://www.1point2vue.com/wp-atom.php'>\
\ <title type='text'>mon titre</title>\
\ <subtitle type='text'>Apprendre à faire des photos et à les retoucher</subtitle>\
\ <updated>2013-03-20T18:53:12Z</updated>\
\ <link rel='alternate' type='text/html' href='http://www.1point2vue.com'/>\
\ <id>http://www.1point2vue.com/feed/atom/</id>\
\ <link rel='self' type='application/atom+xml' href='http://www.1point2vue.com/feed/atom/'/>\
\ <generator uri='http://wordpress.org/' version='3.3.1'>WordPress</generator>\
\ <entry>\
\ <author>\
\ <name>Antoine</name>\
\ <uri>http://www.1point2vue.com</uri>\
\ </author>\
\ <title type='html'>Le projet photo: un outil pour façonner l’experience du photographe</title>\
\ <link rel='alternate' type='text/html' href='http://www.1point2vue.com/projet-photo-experience/'/>\
\ <id>http://www.1point2vue.com/?p=11026</id>\
\ <updated>2013-03-20T18:53:12Z</updated>\
\ <published>2013-03-20T18:51:02Z</published>\
\ <category scheme='http://www.1point2vue.com' term='projet photo'/>\
\ <category scheme='http://www.1point2vue.com' term='wordpress'/>\
\ <summary type='html'>S'imposer un projet photo est une façon de consolider son experience de la photo. Découvrez par quels moyens vous pouvez devenir un meilleur photographe simplement en ajoutant quelques contraintes à votre pratique de la photo.<br/><br/>Lire l'article <a href='http://www.1point2vue.com/projet-photo-experience/'>Le projet photo: un outil pour façonner l’experience du photographe</a><br /><hr /><em>Le bon plan du moment: <a href='http://ad.zanox.com/ppc/?17906432C82208704&zpar9=168E7CE40089AE6CAF3B'>80 photos offertes pour les nouveaux inscrit sur MyPix.com</a></em><hr /></summary>\
\ <link rel='replies' type='text/html' href='http://www.1point2vue.com/projet-photo-experience/#comments' thr:count='9'/>\
\ <link rel='replies' type='application/atom+xml' href='http://www.1point2vue.com/projet-photo-experience/feed/atom/' thr:count='9'/>\
\ <thr:total>9</thr:total>\
\ </entry>\
\ <entry>\
\ <author>\
\ <name>Antoine</name>\
\ <uri>http://www.1point2vue.com</uri>\
\ </author>\
\ <title type='html'>Réaliser un panographe avec Gimp</title>\
\ <link rel='alternate' type='text/html' href='http://www.1point2vue.com/panographe-avec-gimp/'/>\
\ <id>http://www.1point2vue.com/?p=10953</id>\
\ <updated>2013-01-26T18:01:13Z</updated>\
\ <published>2013-01-26T18:01:13Z</published>\
\ <category scheme='http://www.1point2vue.com' term='Graphisme'/>\
\ <category scheme='http://www.1point2vue.com' term='assemblage'/>\
\ <category scheme='http://www.1point2vue.com' term='deplacement'/>\
\ <category scheme='http://www.1point2vue.com' term='la boite à photo'/>\
\ <category scheme='http://www.1point2vue.com' term='Panographe'/>\
\ <category scheme='http://www.1point2vue.com' term='rotation'/>\
\ <summary type='html'>Le panographe est une autre façon de faire de la photo panoramique. Bien plus simple du point de vue de la prise de vue, il permet d'obtenir des effets vraiment originaux.<br/><br/>Lire l'article <a href='http://www.1point2vue.com/panographe-avec-gimp/'>Réaliser un panographe avec Gimp</a><br /><hr /><em>Le bon plan du moment: <a href='http://ad.zanox.com/ppc/?17906432C82208704&zpar9=168E7CE40089AE6CAF3B'>80 photos offertes pour les nouveaux inscrit sur MyPix.com</a></em><hr /></summary>\
\ <link rel='replies' type='text/html' href='http://www.1point2vue.com/panographe-avec-gimp/#comments' thr:count='7'/>\
\ <link rel='replies' type='application/atom+xml' href='http://www.1point2vue.com/panographe-avec-gimp/feed/atom/' thr:count='7'/>\
\ <thr:total>7</thr:total>\
\ </entry>\
\</feed>"
parse :: IOSLA (XIOState ()) (NTree XNode) a -> String -> IO [a]
parse get xml = runX ( parseXML xml >>> get )
where
parseXML :: String -> IOStateArrow s b XmlTree
parseXML doc = readString
[ withValidate no
, withTrace 0
, withRemoveWS yes
] doc
prop_getAtom :: [Atom] -> Bool -> String -> Int -> Int -> Bool
prop_getAtom [] True _ _ _ = True
prop_getAtom [(Atom t es ls)] False tt tel tll = and [ tt == t, tel == length es, tll == length ls]
prop_getAtom _ _ _ _ _ = False
tests :: IO (TF.Test)
tests = do
xml0 <- parse getAtom xmlAtom0
xml1 <- parse getAtom xmlAtom1
return $ testGroup "AtomTestCases"
[ testProperty "Error: xmlAtom0" $ prop_getAtom xml0 False "1point2vue" 0 0
, testProperty "NoError: xmlAtom1" $ prop_getAtom xml1 False "mon titre" 2 2
]
| kdridi/hrss | src/test/AtomTestCases.hs | bsd-3-clause | 5,484 | 0 | 11 | 782 | 413 | 218 | 195 | 30 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TupleSections #-}
-- | The general Stack configuration that starts everything off. This should
-- be smart to falback if there is no stack.yaml, instead relying on
-- whatever files are available.
--
-- If there is no stack.yaml, and there is a cabal.config, we
-- read in those constraints, and if there's a cabal.sandbox.config,
-- we read any constraints from there and also find the package
-- database from there, etc. And if there's nothing, we should
-- probably default to behaving like cabal, possibly with spitting out
-- a warning that "you should run `stk init` to make things better".
module Stack.Config
(MiniConfig
,loadConfig
,loadMiniConfig
,packagesParser
,resolvePackageEntry
,getImplicitGlobalProjectDir
,getIsGMP4
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Arrow ((***))
import Control.Exception (IOException)
import Control.Monad
import Control.Monad.Catch (Handler(..), MonadCatch, MonadThrow, catches, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger hiding (Loc)
import Control.Monad.Reader (MonadReader, ask, runReaderT)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Crypto.Hash.SHA256 as SHA256
import Data.Aeson.Extended
import qualified Data.ByteString as S
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Lazy as L
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8, decodeUtf8, decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import qualified Data.Yaml as Yaml
import Distribution.System (OS (..), Platform (..), buildPlatform)
import qualified Distribution.Text
import Distribution.Version (simplifyVersionRange)
import GHC.Conc (getNumProcessors)
import Network.HTTP.Client.Conduit (HasHttpManager, getHttpManager, Manager, parseUrl)
import Network.HTTP.Download (download)
import Options.Applicative (Parser, strOption, long, help)
import Path
import Path.IO
import qualified Paths_stack as Meta
import Safe (headMay)
import Stack.BuildPlan
import Stack.Constants
import Stack.Config.Docker
import qualified Stack.Image as Image
import Stack.Init
import Stack.PackageIndex
import Stack.Types
import Stack.Types.Internal
import System.Directory (getAppUserDataDirectory, createDirectoryIfMissing, canonicalizePath)
import System.Environment
import System.IO
import System.Process.Read
-- | Get the latest snapshot resolver available.
getLatestResolver
:: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m)
=> m Resolver
getLatestResolver = do
snapshots <- getSnapshots
let mlts = do
(x,y) <- listToMaybe (reverse (IntMap.toList (snapshotsLts snapshots)))
return (LTS x y)
snap = fromMaybe (Nightly (snapshotsNightly snapshots)) mlts
return (ResolverSnapshot snap)
-- Interprets ConfigMonoid options.
configFromConfigMonoid
:: (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasHttpManager env)
=> Path Abs Dir -- ^ stack root, e.g. ~/.stack
-> Path Abs File -- ^ user config file path, e.g. ~/.stack/config.yaml
-> Maybe Project
-> ConfigMonoid
-> m Config
configFromConfigMonoid configStackRoot configUserConfigPath mproject configMonoid@ConfigMonoid{..} = do
let configConnectionCount = fromMaybe 8 configMonoidConnectionCount
configHideTHLoading = fromMaybe True configMonoidHideTHLoading
configLatestSnapshotUrl = fromMaybe
"https://s3.amazonaws.com/haddock.stackage.org/snapshots.json"
configMonoidLatestSnapshotUrl
configPackageIndices = fromMaybe
[PackageIndex
{ indexName = IndexName "Hackage"
, indexLocation = ILGitHttp
"https://github.com/commercialhaskell/all-cabal-hashes.git"
"https://s3.amazonaws.com/hackage.fpcomplete.com/00-index.tar.gz"
, indexDownloadPrefix = "https://s3.amazonaws.com/hackage.fpcomplete.com/package/"
, indexGpgVerify = False
, indexRequireHashes = False
}]
configMonoidPackageIndices
configGHCVariant0 = configMonoidGHCVariant
configSystemGHC = fromMaybe (isNothing configGHCVariant0) configMonoidSystemGHC
configInstallGHC = fromMaybe False configMonoidInstallGHC
configSkipGHCCheck = fromMaybe False configMonoidSkipGHCCheck
configSkipMsys = fromMaybe False configMonoidSkipMsys
configExtraIncludeDirs = configMonoidExtraIncludeDirs
configExtraLibDirs = configMonoidExtraLibDirs
-- Only place in the codebase where platform is hard-coded. In theory
-- in the future, allow it to be configured.
(Platform defArch defOS) = buildPlatform
arch = fromMaybe defArch
$ configMonoidArch >>= Distribution.Text.simpleParse
os = fromMaybe defOS
$ configMonoidOS >>= Distribution.Text.simpleParse
configPlatform = Platform arch os
configRequireStackVersion = simplifyVersionRange configMonoidRequireStackVersion
configConfigMonoid = configMonoid
configImage = Image.imgOptsFromMonoid configMonoidImageOpts
configCompilerCheck = fromMaybe MatchMinor configMonoidCompilerCheck
configDocker <- dockerOptsFromMonoid mproject configStackRoot configMonoidDockerOpts
rawEnv <- liftIO getEnvironment
origEnv <- mkEnvOverride configPlatform
$ augmentPathMap (map toFilePath configMonoidExtraPath)
$ Map.fromList
$ map (T.pack *** T.pack) rawEnv
let configEnvOverride _ = return origEnv
platformOnlyDir <- runReaderT platformOnlyRelDir configPlatform
configLocalProgramsBase <-
case configPlatform of
Platform _ Windows -> do
progsDir <- getWindowsProgsDir configStackRoot origEnv
return $ progsDir </> $(mkRelDir stackProgName)
_ ->
return $
configStackRoot </> $(mkRelDir "programs")
let configLocalPrograms = configLocalProgramsBase </> platformOnlyDir
configLocalBin <-
case configMonoidLocalBinPath of
Nothing -> do
localDir <- liftIO (getAppUserDataDirectory "local") >>= parseAbsDir
return $ localDir </> $(mkRelDir "bin")
Just userPath ->
liftIO (canonicalizePath userPath >>= parseAbsDir)
`catches`
[Handler (\(_ :: IOException) -> throwM $ NoSuchDirectory userPath)
,Handler (\(_ :: PathParseException) -> throwM $ NoSuchDirectory userPath)
]
configJobs <-
case configMonoidJobs of
Nothing -> liftIO getNumProcessors
Just i -> return i
let configConcurrentTests = fromMaybe True configMonoidConcurrentTests
let configTemplateParams = configMonoidTemplateParameters
configScmInit = configMonoidScmInit
configGhcOptions = configMonoidGhcOptions
configSetupInfoLocations = configMonoidSetupInfoLocations
configPvpBounds = fromMaybe PvpBoundsNone configMonoidPvpBounds
configModifyCodePage = fromMaybe True configMonoidModifyCodePage
configExplicitSetupDeps = configMonoidExplicitSetupDeps
configRebuildGhcOptions = fromMaybe False configMonoidRebuildGhcOptions
configApplyGhcOptions = fromMaybe AGOLocals configMonoidApplyGhcOptions
configAllowNewer = fromMaybe False configMonoidAllowNewer
return Config {..}
-- | Get the default 'GHCVariant'. On older Linux systems with libgmp4, returns 'GHCGMP4'.
getDefaultGHCVariant
:: (MonadIO m, MonadBaseControl IO m, MonadCatch m, MonadLogger m)
=> EnvOverride -> Platform -> m GHCVariant
getDefaultGHCVariant menv (Platform _ Linux) = do
isGMP4 <- getIsGMP4 menv
return (if isGMP4 then GHCGMP4 else GHCStandard)
getDefaultGHCVariant _ _ = return GHCStandard
-- Determine whether 'stack' is linked with libgmp4 (libgmp.so.3)
getIsGMP4
:: (MonadIO m, MonadBaseControl IO m, MonadCatch m, MonadLogger m)
=> EnvOverride -> m Bool
getIsGMP4 menv = do
executablePath <- liftIO getExecutablePath
elddOut <- tryProcessStdout Nothing menv "ldd" [executablePath]
return $
case elddOut of
Left _ -> False
Right lddOut -> hasLineWithFirstWord "libgmp.so.3" lddOut
where
hasLineWithFirstWord w =
elem (Just w) .
map (headMay . T.words) . T.lines . decodeUtf8With lenientDecode
-- | Get the directory on Windows where we should install extra programs. For
-- more information, see discussion at:
-- https://github.com/fpco/minghc/issues/43#issuecomment-99737383
getWindowsProgsDir :: MonadThrow m
=> Path Abs Dir
-> EnvOverride
-> m (Path Abs Dir)
getWindowsProgsDir stackRoot m =
case Map.lookup "LOCALAPPDATA" $ unEnvOverride m of
Just t -> do
lad <- parseAbsDir $ T.unpack t
return $ lad </> $(mkRelDir "Programs")
Nothing -> return $ stackRoot </> $(mkRelDir "Programs")
-- | An environment with a subset of BuildConfig used for setup.
data MiniConfig = MiniConfig Manager GHCVariant Config
instance HasConfig MiniConfig where
getConfig (MiniConfig _ _ c) = c
instance HasStackRoot MiniConfig
instance HasHttpManager MiniConfig where
getHttpManager (MiniConfig man _ _) = man
instance HasPlatform MiniConfig
instance HasGHCVariant MiniConfig where
getGHCVariant (MiniConfig _ v _) = v
-- | Load the 'MiniConfig'.
loadMiniConfig
:: (MonadIO m, HasHttpManager a, MonadReader a m, MonadBaseControl IO m, MonadCatch m, MonadLogger m)
=> Config -> m MiniConfig
loadMiniConfig config = do
menv <- liftIO $ configEnvOverride config minimalEnvSettings
manager <- getHttpManager <$> ask
ghcVariant <-
case configGHCVariant0 config of
Just ghcVariant -> return ghcVariant
Nothing -> getDefaultGHCVariant menv (configPlatform config)
return (MiniConfig manager ghcVariant config)
-- | Load the configuration, using current directory, environment variables,
-- and defaults as necessary.
loadConfig :: (MonadLogger m,MonadIO m,MonadCatch m,MonadThrow m,MonadBaseControl IO m,MonadReader env m,HasHttpManager env,HasTerminal env)
=> ConfigMonoid
-- ^ Config monoid from parsed command-line arguments
-> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (LoadConfig m)
loadConfig configArgs mstackYaml = do
stackRoot <- determineStackRoot
userConfigPath <- getDefaultUserConfigPath stackRoot
extraConfigs0 <- getExtraConfigs userConfigPath >>= mapM loadYaml
let extraConfigs =
-- non-project config files' existence of a docker section should never default docker
-- to enabled, so make it look like they didn't exist
map (\c -> c {configMonoidDockerOpts =
(configMonoidDockerOpts c) {dockerMonoidDefaultEnable = False}})
extraConfigs0
mproject <- loadProjectConfig mstackYaml
config <- configFromConfigMonoid stackRoot userConfigPath (fmap (\(proj, _, _) -> proj) mproject) $ mconcat $
case mproject of
Nothing -> configArgs : extraConfigs
Just (_, _, projectConfig) -> configArgs : projectConfig : extraConfigs
unless (fromCabalVersion Meta.version `withinRange` configRequireStackVersion config)
(throwM (BadStackVersionException (configRequireStackVersion config)))
return LoadConfig
{ lcConfig = config
, lcLoadBuildConfig = loadBuildConfig mproject config
, lcProjectRoot = fmap (\(_, fp, _) -> parent fp) mproject
}
-- | Load the build configuration, adds build-specific values to config loaded by @loadConfig@.
-- values.
loadBuildConfig :: (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasHttpManager env, MonadBaseControl IO m, HasTerminal env)
=> Maybe (Project, Path Abs File, ConfigMonoid)
-> Config
-> Maybe AbstractResolver -- override resolver
-> Maybe CompilerVersion -- override compiler
-> m BuildConfig
loadBuildConfig mproject config mresolver mcompiler = do
env <- ask
miniConfig <- loadMiniConfig config
(project', stackYamlFP) <- case mproject of
Just (project, fp, _) -> return (project, fp)
Nothing -> do
$logInfo "Run from outside a project, using implicit global project config"
destDir <- getImplicitGlobalProjectDir config
let dest :: Path Abs File
dest = destDir </> stackDotYaml
dest' :: FilePath
dest' = toFilePath dest
createTree destDir
exists <- fileExists dest
if exists
then do
ProjectAndConfigMonoid project _ <- loadYaml dest
when (getTerminal env) $
case mresolver of
Nothing ->
$logInfo ("Using resolver: " <> resolverName (projectResolver project) <>
" from implicit global project's config file: " <> T.pack dest')
Just aresolver -> do
let name =
case aresolver of
ARResolver resolver -> resolverName resolver
ARLatestNightly -> "nightly"
ARLatestLTS -> "lts"
ARLatestLTSMajor x -> T.pack $ "lts-" ++ show x
ARGlobal -> "global"
$logInfo ("Using resolver: " <> name <>
" specified on command line")
return (project, dest)
else do
r <- runReaderT getLatestResolver miniConfig
$logInfo ("Using latest snapshot resolver: " <> resolverName r)
$logInfo ("Writing implicit global project config file to: " <> T.pack dest')
$logInfo "Note: You can change the snapshot via the resolver field there."
let p = Project
{ projectPackages = mempty
, projectExtraDeps = mempty
, projectFlags = mempty
, projectResolver = r
, projectCompiler = Nothing
, projectExtraPackageDBs = []
}
liftIO $ do
S.writeFile dest' $ S.concat
[ "# This is the implicit global project's config file, which is only used when\n"
, "# 'stack' is run outside of a real project. Settings here do _not_ act as\n"
, "# defaults for all projects. To change stack's default settings, edit\n"
, "# '", encodeUtf8 (T.pack $ toFilePath $ configUserConfigPath config), "' instead.\n"
, "#\n"
, "# For more information about stack's configuration, see\n"
, "# https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md\n"
, "#\n"
, Yaml.encode p]
S.writeFile (toFilePath $ parent dest </> $(mkRelFile "README.txt")) $ S.concat
[ "This is the implicit global project, which is used only when 'stack' is run\n"
, "outside of a real project.\n" ]
return (p, dest)
resolver <-
case mresolver of
Nothing -> return $ projectResolver project'
Just aresolver ->
runReaderT (makeConcreteResolver aresolver) miniConfig
let project = project'
{ projectResolver = resolver
, projectCompiler = mcompiler <|> projectCompiler project'
}
wantedCompiler <-
case projectCompiler project of
Just wantedCompiler -> return wantedCompiler
Nothing -> case projectResolver project of
ResolverSnapshot snapName -> do
mbp <- runReaderT (loadMiniBuildPlan snapName) miniConfig
return $ mbpCompilerVersion mbp
ResolverCustom _name url -> do
mbp <- runReaderT (parseCustomMiniBuildPlan stackYamlFP url) miniConfig
return $ mbpCompilerVersion mbp
ResolverCompiler wantedCompiler -> return wantedCompiler
extraPackageDBs <- mapM parseRelAsAbsDir (projectExtraPackageDBs project)
packageCaches <- runReaderT (getMinimalEnvOverride >>= getPackageCaches) miniConfig
return BuildConfig
{ bcConfig = config
, bcResolver = projectResolver project
, bcWantedCompiler = wantedCompiler
, bcPackageEntries = projectPackages project
, bcExtraDeps = projectExtraDeps project
, bcExtraPackageDBs = extraPackageDBs
, bcStackYaml = stackYamlFP
, bcFlags = projectFlags project
, bcImplicitGlobal = isNothing mproject
, bcGHCVariant = getGHCVariant miniConfig
, bcPackageCaches = packageCaches
}
-- | Resolve a PackageEntry into a list of paths, downloading and cloning as
-- necessary.
resolvePackageEntry
:: (MonadIO m, MonadThrow m, MonadReader env m, HasHttpManager env, MonadLogger m, MonadCatch m
,MonadBaseControl IO m)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageEntry
-> m [(Path Abs Dir, Bool)]
resolvePackageEntry menv projRoot pe = do
entryRoot <- resolvePackageLocation menv projRoot (peLocation pe)
paths <-
case peSubdirs pe of
[] -> return [entryRoot]
subs -> mapM (resolveDir entryRoot) subs
case peValidWanted pe of
Nothing -> return ()
Just _ -> $logWarn "Warning: you are using the deprecated valid-wanted field. You should instead use extra-dep. See: https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md#packages"
return $ map (, not $ peExtraDep pe) paths
-- | Resolve a PackageLocation into a path, downloading and cloning as
-- necessary.
resolvePackageLocation
:: (MonadIO m, MonadThrow m, MonadReader env m, HasHttpManager env, MonadLogger m, MonadCatch m
,MonadBaseControl IO m)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageLocation
-> m (Path Abs Dir)
resolvePackageLocation _ projRoot (PLFilePath fp) = resolveDir projRoot fp
resolvePackageLocation _ projRoot (PLHttpTarball url) = do
let name = T.unpack $ decodeUtf8 $ B16.encode $ SHA256.hash $ encodeUtf8 url
root = projRoot </> workDirRel </> $(mkRelDir "downloaded")
fileRel <- parseRelFile $ name ++ ".tar.gz"
dirRel <- parseRelDir name
dirRelTmp <- parseRelDir $ name ++ ".tmp"
let file = root </> fileRel
dir = root </> dirRel
dirTmp = root </> dirRelTmp
exists <- dirExists dir
unless exists $ do
req <- parseUrl $ T.unpack url
_ <- download req file
removeTreeIfExists dirTmp
liftIO $ withBinaryFile (toFilePath file) ReadMode $ \h -> do
lbs <- L.hGetContents h
let entries = Tar.read $ GZip.decompress lbs
Tar.unpack (toFilePath dirTmp) entries
renameDir dirTmp dir
x <- listDirectory dir
case x of
([dir'], []) -> return dir'
(dirs, files) -> do
removeFileIfExists file
removeTreeIfExists dir
throwM $ UnexpectedTarballContents dirs files
resolvePackageLocation menv projRoot (PLGit url commit) = do
let name = T.unpack $ decodeUtf8 $ B16.encode $ SHA256.hash $ encodeUtf8 $ T.unwords [url, commit]
root = projRoot </> workDirRel </> $(mkRelDir "downloaded")
dirRel <- parseRelDir $ name ++ ".git"
dirRelTmp <- parseRelDir $ name ++ ".git.tmp"
let dir = root </> dirRel
dirTmp = root </> dirRelTmp
exists <- dirExists dir
unless exists $ do
removeTreeIfExists dirTmp
createTree (parent dirTmp)
readInNull (parent dirTmp) "git" menv
[ "clone"
, T.unpack url
, toFilePath dirTmp
]
Nothing
readInNull dirTmp "git" menv
[ "reset"
, "--hard"
, T.unpack commit
]
Nothing
renameDir dirTmp dir
return dir
-- | Get the stack root, e.g. ~/.stack
determineStackRoot :: (MonadIO m, MonadThrow m) => m (Path Abs Dir)
determineStackRoot = do
env <- liftIO getEnvironment
case lookup stackRootEnvVar env of
Nothing -> do
x <- liftIO $ getAppUserDataDirectory stackProgName
parseAbsDir x
Just x -> do
y <- liftIO $ do
createDirectoryIfMissing True x
canonicalizePath x
parseAbsDir y
-- | Determine the extra config file locations which exist.
--
-- Returns most local first
getExtraConfigs :: (MonadIO m, MonadLogger m)
=> Path Abs File -- ^ use config path
-> m [Path Abs File]
getExtraConfigs userConfigPath = do
defaultStackGlobalConfigPath <- getDefaultGlobalConfigPath
liftIO $ do
env <- getEnvironment
mstackConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_CONFIG" env
mstackGlobalConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_GLOBAL_CONFIG" env
filterM fileExists
$ fromMaybe userConfigPath mstackConfig
: maybe [] return (mstackGlobalConfig <|> defaultStackGlobalConfigPath)
-- | Load and parse YAML from the given file.
loadYaml :: (FromJSON (a, [JSONWarning]), MonadIO m, MonadLogger m) => Path Abs File -> m a
loadYaml path = do
(result,warnings) <-
liftIO $
Yaml.decodeFileEither (toFilePath path) >>=
either (throwM . ParseConfigFileException path) return
logJSONWarnings (toFilePath path) warnings
return result
-- | Get the location of the project config file, if it exists.
getProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (Maybe (Path Abs File))
getProjectConfig (Just stackYaml) = return $ Just stackYaml
getProjectConfig Nothing = do
env <- liftIO getEnvironment
case lookup "STACK_YAML" env of
Just fp -> do
$logInfo "Getting project config file from STACK_YAML environment"
liftM Just $ case parseAbsFile fp of
Left _ -> do
currDir <- getWorkingDir
resolveFile currDir fp
Right path -> return path
Nothing -> do
currDir <- getWorkingDir
search currDir
where
search dir = do
let fp = dir </> stackDotYaml
fp' = toFilePath fp
$logDebug $ "Checking for project config at: " <> T.pack fp'
exists <- fileExists fp
if exists
then return $ Just fp
else do
let dir' = parent dir
if dir == dir'
-- fully traversed, give up
then return Nothing
else search dir'
-- | Find the project config file location, respecting environment variables
-- and otherwise traversing parents. If no config is found, we supply a default
-- based on current directory.
loadProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (Maybe (Project, Path Abs File, ConfigMonoid))
loadProjectConfig mstackYaml = do
mfp <- getProjectConfig mstackYaml
case mfp of
Just fp -> do
currDir <- getWorkingDir
$logDebug $ "Loading project config file " <>
T.pack (maybe (toFilePath fp) toFilePath (stripDir currDir fp))
load fp
Nothing -> do
$logDebug $ "No project config file found, using defaults."
return Nothing
where
load fp = do
ProjectAndConfigMonoid project config <- loadYaml fp
return $ Just (project, fp, config)
-- | Get the location of the default stack configuration file.
-- If a file already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getDefaultGlobalConfigPath
:: (MonadIO m, MonadLogger m)
=> m (Maybe (Path Abs File))
getDefaultGlobalConfigPath =
case (defaultGlobalConfigPath, defaultGlobalConfigPathDeprecated) of
(Just new,Just old) ->
liftM (Just . fst ) $
tryDeprecatedPath
(Just "non-project global configuration file")
fileExists
new
old
(Just new,Nothing) -> return (Just new)
_ -> return Nothing
-- | Get the location of the default user configuration file.
-- If a file already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getDefaultUserConfigPath
:: (MonadIO m, MonadLogger m)
=> Path Abs Dir -> m (Path Abs File)
getDefaultUserConfigPath stackRoot = do
(path, exists) <- tryDeprecatedPath
(Just "non-project configuration file")
fileExists
(defaultUserConfigPath stackRoot)
(defaultUserConfigPathDeprecated stackRoot)
unless exists $ do
createTree (parent path)
liftIO $ S.writeFile (toFilePath path) $ S.concat
[ "# This file contains default non-project-specific settings for 'stack', used\n"
, "# in all projects. For more information about stack's configuration, see\n"
, "# https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md\n"
, "#\n"
, Yaml.encode (mempty :: Object) ]
return path
packagesParser :: Parser [String]
packagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
| vigoo/stack | src/Stack/Config.hs | bsd-3-clause | 27,563 | 0 | 30 | 8,183 | 5,733 | 2,882 | 2,851 | 521 | 12 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TemplateHaskell #-}
module Diagrams.TwoD.Path.Metafont.Types where
import Control.Lens hiding (( # ))
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid
#endif
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup
#endif
import Diagrams.Direction
import Diagrams.TwoD.Types
-- | A @PathJoin@ specifies the directions at both ends of a segment,
-- and a join which describes the control points explicitly or implicitly.
data PathJoin d j = PJ { _d1 :: d, _j :: j, _d2 :: d }
deriving (Functor, Show)
makeLenses ''PathJoin
-- | A direction can be specified at any point of a path. A /curl/
-- should only be specified at the endpoints. The endpoints default
-- to curl 1 if not set.
data PathDir n
= PathDirCurl n
| PathDirDir (Dir n)
deriving Show
-- | A predicate to determine the constructor used.
isCurl :: PathDir n -> Bool
isCurl (PathDirDir _) = False
isCurl (PathDirCurl _) = True
type Curl n = n
type Dir n = Direction V2 n
type BasicJoin n = Either (TensionJoin n) (ControlJoin n)
-- | Higher /Tension/ brings the path closer to a straight line
-- between segments. Equivalently, it brings the control points
-- closer to the endpoints. @TensionAmt@ introduces a fixed tension.
-- @TensionAtLeast@ introduces a tension which will be increased if by
-- so doing, an inflection point can be eliminated.
data Tension n
= TensionAmt n
| TensionAtLeast n
deriving Show
getTension :: Tension n -> n
getTension (TensionAmt t) = t
getTension (TensionAtLeast t) = t
-- | Two tensions and two directions completely determine the control
-- points of a segment.
data TensionJoin n = TJ { _t1 :: Tension n, _t2 :: Tension n }
deriving Show
-- | The two intermediate control points of a segment, specified directly.
data ControlJoin n = CJ { _c1 :: P2 n, _c2 :: P2 n}
deriving Show
makeLenses ''TensionJoin
makeLenses ''ControlJoin
data P
data J
-- | @MFPathData@ is the type manipulated by the metafont combinators.
data MFPathData a n where
MFPathCycle:: MFPathData P n
MFPathEnd :: P2 n -> MFPathData P n
MFPathPt :: P2 n -> MFPathData J n -> MFPathData P n
MFPathJoin :: PathJoin (Maybe (PathDir n)) (Maybe (BasicJoin n)) -> MFPathData P n -> MFPathData J n
-- | @MetafontSegment@ is used internally in solving the metafont
-- equations. It represents a segment with two known endpoints, and a
-- /join/, which may be specified in various ways.
data MetafontSegment d j n = MFS { _x1 :: P2 n, _pj :: (PathJoin d j ), _x2 :: P2 n }
deriving (Functor, Show)
-- | @MFPath@ is the type used internally in solving the metafont
-- equations. The direction and join types are progressively refined
-- until all control points are known. The @loop@ flag affects both
-- the equations to be solved and the type of 'Trail' in the result.
-- If constructing an @MFPath@ in new code, the responsibility rests
-- on the user to ensure that successive @MetafontSegment@s share an
-- endpoint. If this is not true, the result is undefined.
data MFPath d j n = MFP { _loop :: Bool, _segs :: [MetafontSegment d j n] }
deriving Show
-- | MFP is a type synonym to clarify signatures in Metafont.Internal.
-- Note that the type permits segments which are \"overspecified\",
-- having one or both directions specified, and also a 'ControlJoin'.
-- In this case, "Metafont.Internal" ignores the directions.
type MFP n = MFPath (Maybe (PathDir n)) (BasicJoin n) n
-- | MFS is a type synonym to clarify signatures in "Metafont.Internal".
type MFS n = MetafontSegment (Maybe (PathDir n)) (BasicJoin n) n
makeLenses ''MetafontSegment
makeLenses ''MFPath
instance Monoid (PathJoin (Maybe (PathDir n)) (Maybe (BasicJoin n))) where
-- | The default join, with no directions specified, and both tensions 1.
mempty = PJ Nothing Nothing Nothing
l `mappend` r = PJ (c (l^.d1) (r^.d1)) (c (l^.j) (r^.j)) (c (l^.d2) (r^.d2))
where
c a b = case b of
Nothing -> a
Just _ -> b
instance Semigroup (PathJoin (Maybe (PathDir n)) (Maybe (BasicJoin n))) where
(<>) = mappend
| diagrams/diagrams-contrib | src/Diagrams/TwoD/Path/Metafont/Types.hs | bsd-3-clause | 4,322 | 0 | 11 | 938 | 880 | 496 | 384 | -1 | -1 |
--
-- Copyright © 2013-2015 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE TupleSections #-}
-- | Common helpers for git vogue plugins
module Git.Vogue.PluginCommon
(
-- * Output
outputGood,
outputUnfortunate,
outputBad,
lineWrap,
-- * FilePath handling
hsProjects,
forProjects,
-- * Command line parsing
getPluginCommand,
pureSubCommand,
PluginCommand(..),
-- * Utility
forWithKey_,
forWithKey,
) where
import Control.Applicative
import Control.Monad.IO.Class
import Data.Char
import Data.Functor
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Monoid
import Data.Ord
import Options.Applicative
import System.Directory
import System.FilePath
-- | The check went or is going well, this should make the developer happy
outputGood :: MonadIO m => String -> m ()
outputGood = outputWithIcon " \x1b[32m[+]\x1b[0m "
-- | A non-fatal warning of some sort. The developer should be able to ignore
-- this.
outputUnfortunate :: MonadIO m => String -> m ()
outputUnfortunate = outputWithIcon " \x1b[33m[*]\x1b[0m "
-- | If any of these appear, you should probably be exploding and the developer
-- will be sad.
outputBad :: MonadIO m => String -> m ()
outputBad = outputWithIcon " \x1b[31m[-]\x1b[0m "
outputWithIcon :: MonadIO m => String -> String -> m ()
outputWithIcon icon = liftIO . putStrLn . (icon <>) . prependWS
-- | Prepend some whitespace to every line but the first so that subsequent
-- lines line up below a [+] or [-].
prependWS :: String -> String
prependWS "" = ""
prependWS input =
let (x:xs) = lines input
in intercalate "\n" $ x : fmap (" " <>) xs
-- | Convenience for line wrapping long lines.
lineWrap :: Int -> String -> String
lineWrap line_len =
intercalate "\n" . fmap (intercalate "\n" . unfoldr f) . lines
where
f [] = Nothing
f xs = Just . fmap lstrip $ splitAt line_len xs
lstrip = dropWhile isSpace
-- | Helper for traversing a Map with keys
forWithKey_ :: Applicative f => Map k v -> (k -> v -> f ()) -> f ()
forWithKey_ m a = void $ M.traverseWithKey a m
forWithKey :: Applicative f => Map k v -> (k -> v -> f a) -> f (Map k a)
forWithKey = flip M.traverseWithKey
-- | Find .cabal files in hsFiles and arrange children underneath these
-- "headings".
hsProjects
:: [FilePath] -- ^ Files to be checked
-> [FilePath] -- ^ All files
-> Map FilePath [FilePath]
hsProjects check_fs all_fs =
-- We want to stick the subset of files to be checked under the same
-- project headings as we would if we were checking all files. So we mush
-- them together.
--
-- Discard the remainder, the user probably doesn't know what to do with
-- it.
let (complete_proj_map, _) = findProjects (isSuffixOf ".cabal") all_fs
-- Now do the awesome quadratic thing and traverse lists.
proj_map = fmap (filter (`elem` check_fs)) complete_proj_map
-- And finally strip the prefixes of the dirs, so that this looks a bit
-- like a one level trie.
bug = error "BUG: hsProjects: A key was not a prefix of its elements"
in M.mapWithKey (\k -> fmap (fromMaybe bug . stripPrefix k)) proj_map
-- | For the given projects, perform the supplied action on each given relative
-- URLS and having set the current directory to the project.
--
-- This will also take care of printing out a "Checking project in: " message.
forProjects
:: (MonadIO m, Applicative m)
=> Map FilePath [FilePath]
-> ([FilePath] -> m a)
-> m (Map FilePath a)
forProjects projs f = do
cwd <- liftIO $ getCurrentDirectory >>= canonicalizePath
forWithKey projs $ \dir fs -> do
let pdir = "." </> dir
liftIO $ do
putStrLn $ "Checking project in: " <> pdir
setCurrentDirectory pdir
x <- f fs
liftIO $ setCurrentDirectory cwd
return x
-- | Given a predicate to identify a file as being in the "root" of a
-- directory and a bunch of FilePaths, figure out which file paths belong under
-- these roots and "compartmentalize" them. The remainder of possibly
-- un-accounted-for files are the second element returned.
--
-- This is useful for finding files belonging to distinct projects within a
-- repository.
findProjects
:: (FilePath -> Bool)
-> [FilePath]
-> (Map FilePath [FilePath], [FilePath])
findProjects p xs =
-- We start out by putting all of the files in a nested list, splitting
-- up the path.
let all_paths = fmap (splitPath . ('/':)) xs
-- Now we find all of the project roots. Again tacking on the root so
-- that init is safe and everything lines up.
roots = sortBy (comparing length) . fmap (init . splitPath . ('/':)) $
filter p xs
-- Now iterate over the project roots, taking the bits of the whole
-- list as we go.
f current_root (result, remaining) =
let included = isPrefixOf current_root
to_take = filter included remaining
to_leave = filter (not . included) remaining
in ( M.insert (joinPath $ tail current_root) to_take result
, to_leave)
(projects, remainder) = foldr f (mempty, all_paths) roots
-- Now put the broken up paths back together and take the roots off.
in ((fmap . fmap) (joinPath . tail) projects
, fmap (joinPath . tail) remainder)
-- | Parser for plugin arguments
pluginCommandParser :: Parser PluginCommand
pluginCommandParser = subparser
( pureSubCommand "name" CmdName "Get name of plugin"
<> fpCommand "check" CmdCheck "Check for problems"
<> fpCommand "fix" CmdFix "Try to fix problems"
)
-- Helper for plugin commands that take [FilePath]s
fpCommand
:: String
-> ([FilePath] -> [FilePath] -> a)
-> String
-> Mod CommandFields a
fpCommand name ctor desc = command name (info parser (progDesc desc))
where
parser = ctor <$> argument (lines <$> str) (metavar "CHECKABLE_FILES")
<*> argument (lines <$> str) (metavar "ALL_FILES")
-- | Sub-command helper
pureSubCommand :: String -> a -> String -> Mod CommandFields a
pureSubCommand name ctor desc = command name (info (pure ctor) (progDesc desc))
-- | Get the plugin command requested given a header and a description
getPluginCommand :: String -> String -> IO PluginCommand
getPluginCommand hdr desc = execParser parser
where
parser = info (helper <*> pluginCommandParser)
( fullDesc
<> progDesc desc
<> header hdr)
-- | Arguments to the plugin
data PluginCommand
-- | Check the project for problems.
= CmdCheck [FilePath] [FilePath]
-- | Fix problems in the project.
| CmdFix [FilePath] [FilePath]
-- | Report details.
| CmdName
| olorin/git-vogue | lib/Git/Vogue/PluginCommon.hs | bsd-3-clause | 7,195 | 0 | 15 | 1,825 | 1,492 | 798 | 694 | 116 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Servant.Servant.Db
( PackageDB(..)
, AddPackageIfMissing
, GetPackage
) where
import Control.Monad.Reader.Class
import Control.Monad.State.Class
import Data.Acid
import Data.SafeCopy
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import qualified Data.Map.Strict as Map
import Servant.Servant.Types
newtype PackageDB = PackageDB { unPkgDB :: Map.Map String Package }
deriving (Typeable, Generic)
deriveSafeCopy 0 'base ''PackageDB
addPackageIfMissing :: Package -> Update PackageDB ()
addPackageIfMissing pkg = modify go
where
pickOld _ old = old
go (PackageDB db) = PackageDB
$ Map.insertWith pickOld (packageName pkg) pkg db
getPackage :: String -> Query PackageDB (Maybe Package)
getPackage pkgName = ask >>= return . Map.lookup pkgName . unPkgDB
makeAcidic ''PackageDB ['addPackageIfMissing, 'getPackage]
| jkarni/servant-servant | src/Servant/Servant/Db.hs | bsd-3-clause | 1,081 | 0 | 10 | 178 | 268 | 152 | 116 | 29 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
-- | Github API: http://developer.github.com/v3/oauth/
module Main where
import Data.Aeson.TH (defaultOptions, deriveJSON)
import qualified Data.ByteString.Char8 as BS
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Network.HTTP.Conduit
import URI.ByteString.QQ
import URI.ByteString
import Network.OAuth.OAuth2
import Keys
data SiteInfo = SiteInfo { items :: [SiteItem]
, has_more :: Bool
, quota_max :: Integer
, quota_remaining :: Integer
} deriving (Show, Eq)
data SiteItem = SiteItem { new_active_users :: Integer
, total_users :: Integer
, badges_per_minute :: Double
, total_badges :: Integer
, total_votes :: Integer
, total_comments :: Integer
, answers_per_minute :: Double
, questions_per_minute :: Double
, total_answers :: Integer
, total_accepted :: Integer
, total_unanswered :: Integer
, total_questions :: Integer
, api_revision :: Text
} deriving (Show, Eq)
$(deriveJSON defaultOptions ''SiteInfo)
$(deriveJSON defaultOptions ''SiteItem)
main :: IO ()
main = do
BS.putStrLn $ serializeURIRef' $ authorizationUrl stackexchangeKey
putStrLn "visit the url and paste code here: "
code <- fmap (ExchangeToken . T.pack) getLine
mgr <- newManager tlsManagerSettings
token <- fetchAccessToken mgr stackexchangeKey code
print token
case token of
Right at -> siteInfo mgr (accessToken at) >>= print
Left _ -> putStrLn "no access token found yet"
-- | Test API: info
siteInfo :: Manager -> AccessToken -> IO (OAuth2Result SiteInfo)
siteInfo mgr token = authGetJSON mgr token [uri|https://api.stackexchange.com/2.2/info?site=stackoverflow|]
sToBS :: String -> BS.ByteString
sToBS = T.encodeUtf8 . T.pack
| reactormonk/hoauth2 | example/StackExchange/test.hs | bsd-3-clause | 2,512 | 0 | 13 | 955 | 457 | 259 | 198 | 51 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
-- | SendGrid Client
module Email.SendGrid.Client where
import Data.Bifunctor (bimap)
import Data.ByteString.Lazy.Builder
import qualified Data.ByteString as B
-- import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BL8
import Data.Text.Encoding (encodeUtf8)
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import qualified Network.HTTP.Types.Method as NHTM
import qualified Network.HTTP.Types.Status as NHTS
import Email.SendGrid.Types
type Reply = Network.HTTP.Client.Response BL8.ByteString
newtype SendGridUser = SendGridUser B.ByteString deriving (Eq, Show)
newtype SendGridKey = SendGridKey B.ByteString deriving (Eq, Show)
data SendGridCredentials = SendGridCredentials {
apiUser :: SendGridUser
, apiKey :: SendGridKey
} deriving (Eq, Show)
data SendGridError =
SendGridUnknownError B.ByteString
| SendGridWrongCredentials
deriving (Eq, Show)
data SendGridResponseStatus =
SendGridSuccess
| SendGridFailed
deriving (Eq, Show)
data SendGridResponse = SendGridResponse {
sgMessage :: SendGridResponseStatus
, sgErrors :: [SendGridError]
} deriving (Eq, Show)
sendGridMailSendEndpoint :: String
sendGridMailSendEndpoint = "https://api.sendgrid.com/api/mail.send.json"
exampleEmail :: [(B.ByteString, B.ByteString)]
exampleEmail = [ ("to[]", "callen.23dc@gmail.com")
, ("toname[]", "Chris The Coolest")
, ("to[]", "cma@bitemyapp.com")
, ("toname[]", "Chris The Cooler")
, ("subject", "herro, test email")
, ("text", "SendGrid test email yo :)")
, ("from", "cma@bitemyapp.com")
]
serialiseCredentials :: SendGridCredentials -> [(B.ByteString, B.ByteString)]
serialiseCredentials (SendGridCredentials (SendGridUser user) (SendGridKey key))
= [ ("api_user", user)
, ("api_key", key) ]
serialiseEmailAddress :: EmailAddress -> B.ByteString
serialiseEmailAddress (EmailAddress e) = encodeUtf8 e
serialiseRecipientName :: RecipientName -> B.ByteString
serialiseRecipientName (RecipientName r) = encodeUtf8 r
-- fmap (bimap serialiseEmailAddress serialiseRecipientName)
exampleRecipients :: Recipients
exampleRecipients = Recipients
[(EmailAddress "callen@woot.com"
, RecipientName "Chris Allen")]
tuplesToList :: ((a, b), (a, b)) -> [(a, b)]
tuplesToList ((a, b), (c, d)) = [(a, b), (c, d)]
serialiseEmailName :: B.ByteString
-> B.ByteString
-> [(EmailAddress, RecipientName)]
-> [(B.ByteString, B.ByteString)]
serialiseEmailName e n pairs =
pairs >>= (tuplesToList . toTuples)
where toTuples = bimap sEmail sRName
sEmail = (e,) . serialiseEmailAddress
sRName = (n,) . serialiseRecipientName
serialiseRecipients :: Recipients -> [(B.ByteString, B.ByteString)]
serialiseRecipients (Recipients addies) =
serialiseEmailName "to[]" "toname[]" addies
serialiseCc :: CarbonCopies -> [(B.ByteString, B.ByteString)]
serialiseCc (CarbonCopies ccs) =
serialiseEmailName "cc" "ccname" ccs
serialiseBcc :: BlindCarbonCopies -> [(B.ByteString, B.ByteString)]
serialiseBcc (BlindCarbonCopies bccs) =
serialiseEmailName "bcc" "bccname" bccs
serialiseFrom :: FromAddress -> [(B.ByteString, B.ByteString)]
serialiseFrom (FromAddress emailAddy) =
[("from", serialiseEmailAddress emailAddy)]
serialiseSenderName :: SenderName -> [(B.ByteString, B.ByteString)]
serialiseSenderName (SenderName sender) =
[("fromname", encodeUtf8 sender)]
serialiseEmailBody :: EmailBody -> [(B.ByteString, B.ByteString)]
serialiseEmailBody = undefined
serialiseEmailSubject :: EmailSubject -> [(B.ByteString, B.ByteString)]
serialiseEmailSubject = undefined
serialiseEmail :: Email -> [(B.ByteString, B.ByteString)]
serialiseEmail (Email recipients cc bcc fromAddress
senderName emailBody subject)
= undefined
type SendGridEndpoint = String
sendEmail' :: SendGridEndpoint -> SendGridCredentials -> Email -> IO Reply
sendEmail' url creds email = do
initReq <- parseUrl url
let preBody = initReq { method = NHTM.methodPost
, checkStatus = \_ _ _ -> Nothing }
serialisedBody = (serialiseCredentials creds) ++ (serialiseEmail email)
withBody = urlEncodedBody serialisedBody preBody
withManager tlsManagerSettings $ httpLbs withBody
sendEmail :: SendGridCredentials -> Email -> IO SendGridResponse
sendEmail creds email = do
reply <- sendEmail' sendGridMailSendEndpoint creds email
return $ SendGridResponse SendGridSuccess []
| bitemyapp/sendgrid-haskell | src/Email/SendGrid/Client.hs | bsd-3-clause | 4,729 | 0 | 12 | 904 | 1,206 | 694 | 512 | 101 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_EXT_shader_atomic_float - device extension
--
-- == VK_EXT_shader_atomic_float
--
-- [__Name String__]
-- @VK_EXT_shader_atomic_float@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 261
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Contact__]
--
-- - Vikram Kushwaha
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_EXT_shader_atomic_float] @vkushwaha-nv%0A<<Here describe the issue or question you have about the VK_EXT_shader_atomic_float extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2020-07-15
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/EXT/SPV_EXT_shader_atomic_float_add.html SPV_EXT_shader_atomic_float_add>
--
-- - This extension provides API support for
-- <https://github.com/KhronosGroup/GLSL/blob/master/extensions/ext/GLSL_EXT_shader_atomic_float.txt GL_EXT_shader_atomic_float>
--
-- [__Contributors__]
--
-- - Vikram Kushwaha, NVIDIA
--
-- - Jeff Bolz, NVIDIA
--
-- == Description
--
-- This extension allows a shader to contain floating-point atomic
-- operations on buffer, workgroup, and image memory. It also advertises
-- the SPIR-V @AtomicFloat32AddEXT@ and @AtomicFloat64AddEXT@ capabilities
-- that allows atomic addition on floating-points numbers. The supported
-- operations include @OpAtomicFAddEXT@, @OpAtomicExchange@, @OpAtomicLoad@
-- and @OpAtomicStore@.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceShaderAtomicFloatFeaturesEXT'
--
-- == New Enum Constants
--
-- - 'EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME'
--
-- - 'EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT'
--
-- == New SPIR-V Capabilities
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-AtomicFloat32AddEXT AtomicFloat32AddEXT>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-AtomicFloat64AddEXT AtomicFloat64AddEXT>
--
-- == Version History
--
-- - Revision 1, 2020-07-15 (Vikram Kushwaha)
--
-- - Internal revisions
--
-- == See Also
--
-- 'PhysicalDeviceShaderAtomicFloatFeaturesEXT'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_EXT_shader_atomic_float Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_EXT_shader_atomic_float ( PhysicalDeviceShaderAtomicFloatFeaturesEXT(..)
, EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION
, pattern EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION
, EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME
, pattern EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT))
-- | VkPhysicalDeviceShaderAtomicFloatFeaturesEXT - Structure describing
-- features supported by VK_EXT_shader_atomic_float
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
-- If the 'PhysicalDeviceShaderAtomicFloatFeaturesEXT' structure is
-- included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2',
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceShaderAtomicFloatFeaturesEXT' /can/ also be
-- used in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_shader_atomic_float VK_EXT_shader_atomic_float>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceShaderAtomicFloatFeaturesEXT = PhysicalDeviceShaderAtomicFloatFeaturesEXT
{ -- | #features-shaderBufferFloat32Atomics# @shaderBufferFloat32Atomics@
-- indicates whether shaders /can/ perform 32-bit floating-point load,
-- store and exchange atomic operations on storage buffers.
shaderBufferFloat32Atomics :: Bool
, -- | #features-shaderBufferFloat32AtomicAdd# @shaderBufferFloat32AtomicAdd@
-- indicates whether shaders /can/ perform 32-bit floating-point add atomic
-- operations on storage buffers.
shaderBufferFloat32AtomicAdd :: Bool
, -- | #features-shaderBufferFloat64Atomics# @shaderBufferFloat64Atomics@
-- indicates whether shaders /can/ perform 64-bit floating-point load,
-- store and exchange atomic operations on storage buffers.
shaderBufferFloat64Atomics :: Bool
, -- | #features-shaderBufferFloat64AtomicAdd# @shaderBufferFloat64AtomicAdd@
-- indicates whether shaders /can/ perform 64-bit floating-point add atomic
-- operations on storage buffers.
shaderBufferFloat64AtomicAdd :: Bool
, -- | #features-shaderSharedFloat32Atomics# @shaderSharedFloat32Atomics@
-- indicates whether shaders /can/ perform 32-bit floating-point load,
-- store and exchange atomic operations on shared memory.
shaderSharedFloat32Atomics :: Bool
, -- | #features-shaderSharedFloat32AtomicAdd# @shaderSharedFloat32AtomicAdd@
-- indicates whether shaders /can/ perform 32-bit floating-point add atomic
-- operations on shared memory.
shaderSharedFloat32AtomicAdd :: Bool
, -- | #features-shaderSharedFloat64Atomics# @shaderSharedFloat64Atomics@
-- indicates whether shaders /can/ perform 64-bit floating-point load,
-- store and exchange atomic operations on shared memory.
shaderSharedFloat64Atomics :: Bool
, -- | #features-shaderSharedFloat64AtomicAdd# @shaderSharedFloat64AtomicAdd@
-- indicates whether shaders /can/ perform 64-bit floating-point add atomic
-- operations on shared memory.
shaderSharedFloat64AtomicAdd :: Bool
, -- | #features-shaderImageFloat32Atomics# @shaderImageFloat32Atomics@
-- indicates whether shaders /can/ perform 32-bit floating-point load,
-- store and exchange atomic image operations.
shaderImageFloat32Atomics :: Bool
, -- | #features-shaderImageFloat32AtomicAdd# @shaderImageFloat32AtomicAdd@
-- indicates whether shaders /can/ perform 32-bit floating-point add atomic
-- image operations.
shaderImageFloat32AtomicAdd :: Bool
, -- | #features-sparseImageFloat32Atomics# @sparseImageFloat32Atomics@
-- indicates whether 32-bit floating-point load, store and exchange atomic
-- operations /can/ be used on sparse images.
sparseImageFloat32Atomics :: Bool
, -- | #features-sparseImageFloat32AtomicAdd# @sparseImageFloat32AtomicAdd@
-- indicates whether 32-bit floating-point add atomic operations /can/ be
-- used on sparse images.
sparseImageFloat32AtomicAdd :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceShaderAtomicFloatFeaturesEXT)
#endif
deriving instance Show PhysicalDeviceShaderAtomicFloatFeaturesEXT
instance ToCStruct PhysicalDeviceShaderAtomicFloatFeaturesEXT where
withCStruct x f = allocaBytes 64 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceShaderAtomicFloatFeaturesEXT{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (shaderBufferFloat32Atomics))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (shaderBufferFloat32AtomicAdd))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (shaderBufferFloat64Atomics))
poke ((p `plusPtr` 28 :: Ptr Bool32)) (boolToBool32 (shaderBufferFloat64AtomicAdd))
poke ((p `plusPtr` 32 :: Ptr Bool32)) (boolToBool32 (shaderSharedFloat32Atomics))
poke ((p `plusPtr` 36 :: Ptr Bool32)) (boolToBool32 (shaderSharedFloat32AtomicAdd))
poke ((p `plusPtr` 40 :: Ptr Bool32)) (boolToBool32 (shaderSharedFloat64Atomics))
poke ((p `plusPtr` 44 :: Ptr Bool32)) (boolToBool32 (shaderSharedFloat64AtomicAdd))
poke ((p `plusPtr` 48 :: Ptr Bool32)) (boolToBool32 (shaderImageFloat32Atomics))
poke ((p `plusPtr` 52 :: Ptr Bool32)) (boolToBool32 (shaderImageFloat32AtomicAdd))
poke ((p `plusPtr` 56 :: Ptr Bool32)) (boolToBool32 (sparseImageFloat32Atomics))
poke ((p `plusPtr` 60 :: Ptr Bool32)) (boolToBool32 (sparseImageFloat32AtomicAdd))
f
cStructSize = 64
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 28 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 32 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 36 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 40 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 44 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 48 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 52 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 56 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 60 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceShaderAtomicFloatFeaturesEXT where
peekCStruct p = do
shaderBufferFloat32Atomics <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
shaderBufferFloat32AtomicAdd <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
shaderBufferFloat64Atomics <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32))
shaderBufferFloat64AtomicAdd <- peek @Bool32 ((p `plusPtr` 28 :: Ptr Bool32))
shaderSharedFloat32Atomics <- peek @Bool32 ((p `plusPtr` 32 :: Ptr Bool32))
shaderSharedFloat32AtomicAdd <- peek @Bool32 ((p `plusPtr` 36 :: Ptr Bool32))
shaderSharedFloat64Atomics <- peek @Bool32 ((p `plusPtr` 40 :: Ptr Bool32))
shaderSharedFloat64AtomicAdd <- peek @Bool32 ((p `plusPtr` 44 :: Ptr Bool32))
shaderImageFloat32Atomics <- peek @Bool32 ((p `plusPtr` 48 :: Ptr Bool32))
shaderImageFloat32AtomicAdd <- peek @Bool32 ((p `plusPtr` 52 :: Ptr Bool32))
sparseImageFloat32Atomics <- peek @Bool32 ((p `plusPtr` 56 :: Ptr Bool32))
sparseImageFloat32AtomicAdd <- peek @Bool32 ((p `plusPtr` 60 :: Ptr Bool32))
pure $ PhysicalDeviceShaderAtomicFloatFeaturesEXT
(bool32ToBool shaderBufferFloat32Atomics) (bool32ToBool shaderBufferFloat32AtomicAdd) (bool32ToBool shaderBufferFloat64Atomics) (bool32ToBool shaderBufferFloat64AtomicAdd) (bool32ToBool shaderSharedFloat32Atomics) (bool32ToBool shaderSharedFloat32AtomicAdd) (bool32ToBool shaderSharedFloat64Atomics) (bool32ToBool shaderSharedFloat64AtomicAdd) (bool32ToBool shaderImageFloat32Atomics) (bool32ToBool shaderImageFloat32AtomicAdd) (bool32ToBool sparseImageFloat32Atomics) (bool32ToBool sparseImageFloat32AtomicAdd)
instance Storable PhysicalDeviceShaderAtomicFloatFeaturesEXT where
sizeOf ~_ = 64
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceShaderAtomicFloatFeaturesEXT where
zero = PhysicalDeviceShaderAtomicFloatFeaturesEXT
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
type EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION"
pattern EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION :: forall a . Integral a => a
pattern EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION = 1
type EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME = "VK_EXT_shader_atomic_float"
-- No documentation found for TopLevel "VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME"
pattern EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME = "VK_EXT_shader_atomic_float"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_EXT_shader_atomic_float.hs | bsd-3-clause | 14,106 | 0 | 14 | 2,267 | 2,299 | 1,341 | 958 | -1 | -1 |
module Main ( main ) where
import Debug.Trace
import Control.Exception ( evaluate )
main :: IO ()
main = do
putStrLn "The test is successful if the word 'Evaluated' appears only once below:"
evaluate $ let x = trace "Evaluated" (1 + 1) in x + (trace "Evaluated" (1 + 1)) + x
return () | thoughtpolice/cse-ghc-plugin | tests/Traced.hs | bsd-3-clause | 298 | 0 | 15 | 68 | 106 | 54 | 52 | 8 | 1 |
--------------------------------------------------------------------
-- !
-- Module : Text.TDoc.QQ
-- Copyright : (c) Nicolas Pouillard 2009-2011
-- License : BSD3
--
-- Maintainer : Nicolas Pouillard <nicolas.pouillard@gmail.com>
--
--------------------------------------------------------------------
{-# LANGUAGE TemplateHaskell, FlexibleContexts #-}
module Text.TDoc.QQ (
-- * frquotes support
frQQ, frTop, frAntiq) where
import qualified Language.Haskell.TH as TH
import Language.Haskell.TH.Quote
import Text.TDoc (spanDoc, Star, Span, SpanTag(..), ToChildren(..), ChildOf(..))
import Data.Char (isSpace)
import Data.Monoid
frTop :: SpanTag t => Star t Span
frTop = spanDoc
frAntiq :: ToChildren a t father => a -> [ChildOf t father]
frAntiq = toChildren
expandingQQExpr :: String -> TH.ExpQ
expandingQQExpr = chunk . stripIndents
where
chunk x | null x = TH.varE 'mempty
| otherwise = TH.varE 'toChildren `TH.appE` TH.stringE x
stripIndents :: String -> String
stripIndents = go
where go (x:xs) | isSpace x = ' ' : go (dropWhile isSpace xs)
| otherwise = x:go xs
go "" = ""
quasiQuoter :: String -> QuasiQuoter
quasiQuoter qqName =
QuasiQuoter (err "expressions") (err "patterns")
-- if GHC7
(err "types") (err "declarations")
-- endif
where err kind _ = error $ qqName ++ ": not available in " ++ kind
frQQ :: QuasiQuoter
frQQ = (quasiQuoter "Text.TDoc.QQ.frQQ"){quoteExp = expandingQQExpr }
| np/tdoc | Text/TDoc/QQ.hs | bsd-3-clause | 1,507 | 0 | 11 | 303 | 404 | 224 | 180 | 28 | 2 |
-- | Extra functions to help DFM deal with operators tree.
module Youtan.Regex.OperatorsExtra where
import Control.Monad.State
import Youtan.Regex.Operators ( Counter(..), Operator(..), OperatorID, initID, nextFreeID )
-- | Replaces counters in a tree with 'KleeneStar' keeping the input language.
simplifyCounter :: Operator -> Operator
simplifyCounter ( Disjunction i oper1 oper2 )
= Disjunction i ( simplifyCounter oper1 ) ( simplifyCounter oper2 )
simplifyCounter ( Concatenation i oper1 oper2 )
= Concatenation i ( simplifyCounter oper1 ) ( simplifyCounter oper2 )
simplifyCounter ( Counts i c oper ) = case c of
KleeneStar -> Counts i c o
OneOrMore -> Concatenation initID o ( Counts i KleeneStar o )
ZeroOrOne -> Disjunction i o ( Empty initID )
where
o = simplifyCounter oper
simplifyCounter ( Group oper ) = Group ( simplifyCounter oper )
simplifyCounter oper = oper
-- | Assigns id to each and every single node (except for 'Group') of a tree.
assignIDs :: Operator -> State OperatorID Operator
assignIDs o = case o of
Empty _ -> Empty <$> nextID
Literal _ c -> Literal <$> nextID <*> return c
Concatenation _ oper1 oper2 ->
Concatenation <$> nextID <*> assignIDs oper1 <*> assignIDs oper2
Disjunction _ oper1 oper2 ->
Disjunction <$> nextID <*> assignIDs oper1 <*> assignIDs oper2
Counts _ c oper ->
Counts <$> nextID <*> return c <*> assignIDs oper
CharClass _ c -> CharClass <$> nextID <*> return c
Group oper -> Group <$> assignIDs oper
where
nextID :: State OperatorID OperatorID
nextID = modify nextFreeID >> get
-- TODO: Replace me with data fields.
-- | Returns id of operator.
operID :: Operator -> OperatorID
operID ( Empty i ) = i
operID ( Literal i _ ) = i
operID ( Disjunction i _ _ ) = i
operID ( Concatenation i _ _ ) = i
operID ( Counts i _ _ ) = i
operID ( CharClass i _ ) = i
operID ( Group oper ) = operID oper
| triplepointfive/Youtan | src/Youtan/Regex/OperatorsExtra.hs | bsd-3-clause | 1,903 | 0 | 10 | 391 | 592 | 296 | 296 | 37 | 7 |
import qualified TUDMensa as T
main = T.tudMensa T.defaultOpts
| dschoepe/tud-mensa | Main.hs | bsd-3-clause | 64 | 1 | 6 | 10 | 22 | 11 | 11 | 2 | 1 |
{- Data/Singletons/Util.hs
(c) Richard Eisenberg 2013
eir@cis.upenn.edu
This file contains helper functions internal to the singletons package.
Users of the package should not need to consult this file.
-}
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, RankNTypes,
TemplateHaskell, GeneralizedNewtypeDeriving,
MultiParamTypeClasses, StandaloneDeriving,
UndecidableInstances, MagicHash, UnboxedTuples,
LambdaCase, NoMonomorphismRestriction #-}
module Data.Singletons.Util where
import Prelude hiding ( exp, foldl, concat, mapM, any, pred )
import Language.Haskell.TH.Syntax hiding ( lift )
import Language.Haskell.TH.Desugar
import Data.Char
import Control.Monad hiding ( mapM )
import Control.Monad.Writer hiding ( mapM )
import Control.Monad.Reader hiding ( mapM )
import qualified Data.Map as Map
import Data.List.NonEmpty (NonEmpty)
import Data.Map ( Map )
import Data.Foldable
import Data.Traversable
import Data.Generics
import Control.Monad.Fail ( MonadFail )
-- The list of types that singletons processes by default
basicTypes :: [Name]
basicTypes = [ ''Maybe
, ''[]
, ''Either
, ''NonEmpty
] ++ boundedBasicTypes
boundedBasicTypes :: [Name]
boundedBasicTypes =
[ ''(,)
, ''(,,)
, ''(,,,)
, ''(,,,,)
, ''(,,,,,)
, ''(,,,,,,)
] ++ enumBasicTypes
enumBasicTypes :: [Name]
enumBasicTypes = [ ''Bool, ''Ordering, ''() ]
-- like reportWarning, but generalized to any Quasi
qReportWarning :: Quasi q => String -> q ()
qReportWarning = qReport False
-- like reportError, but generalized to any Quasi
qReportError :: Quasi q => String -> q ()
qReportError = qReport True
-- | Generate a new Unique
qNewUnique :: DsMonad q => q Int
qNewUnique = do
Name _ flav <- qNewName "x"
case flav of
NameU n -> return n
_ -> error "Internal error: `qNewName` didn't return a NameU"
checkForRep :: Quasi q => [Name] -> q ()
checkForRep names =
when (any ((== "Rep") . nameBase) names)
(fail $ "A data type named <<Rep>> is a special case.\n" ++
"Promoting it will not work as expected.\n" ++
"Please choose another name for your data type.")
checkForRepInDecls :: Quasi q => [DDec] -> q ()
checkForRepInDecls decls =
checkForRep (allNamesIn decls)
tysOfConFields :: DConFields -> [DType]
tysOfConFields (DNormalC stys) = map snd stys
tysOfConFields (DRecC vstys) = map (\(_,_,ty) -> ty) vstys
-- extract the name and number of arguments to a constructor
extractNameArgs :: DCon -> (Name, Int)
extractNameArgs = liftSnd length . extractNameTypes
-- extract the name and types of constructor arguments
extractNameTypes :: DCon -> (Name, [DType])
extractNameTypes (DCon _ _ n fields _) = (n, tysOfConFields fields)
extractName :: DCon -> Name
extractName (DCon _ _ n _ _) = n
-- is an identifier uppercase?
isUpcase :: Name -> Bool
isUpcase n = let first = head (nameBase n) in isUpper first || first == ':'
-- make an identifier uppercase
upcase :: Name -> Name
upcase = mkName . toUpcaseStr noPrefix
-- make an identifier uppercase and return it as a String
toUpcaseStr :: (String, String) -- (alpha, symb) prefixes to prepend
-> Name -> String
toUpcaseStr (alpha, symb) n
| isHsLetter first
= upcase_alpha
| otherwise
= upcase_symb
where
str = nameBase n
first = head str
upcase_alpha = alpha ++ (toUpper first) : tail str
upcase_symb
| first == ':'
|| first == '$' -- special case to avoid name clashes. See #29
= symb ++ str
| otherwise
= symb ++ ':' : str
noPrefix :: (String, String)
noPrefix = ("", "")
-- make an identifier lowercase
locase :: Name -> Name
locase n =
let str = nameBase n
first = head str in
if isHsLetter first
then mkName ((toLower first) : tail str)
else mkName (tail str) -- remove the ":"
-- put an uppercase prefix on a name. Takes two prefixes: one for identifiers
-- and one for symbols
prefixUCName :: String -> String -> Name -> Name
prefixUCName pre tyPre n = case (nameBase n) of
(':' : rest) -> mkName (tyPre ++ rest)
alpha -> mkName (pre ++ alpha)
-- put a lowercase prefix on a name. Takes two prefixes: one for identifiers
-- and one for symbols
prefixLCName :: String -> String -> Name -> Name
prefixLCName pre tyPre n =
let str = nameBase n
first = head str in
if isHsLetter first
then mkName (pre ++ str)
else mkName (tyPre ++ str)
suffixName :: String -> String -> Name -> Name
suffixName ident symb n =
let str = nameBase n
first = head str in
if isHsLetter first
then mkName (str ++ ident)
else mkName (str ++ symb)
-- convert a number into both alphanumeric and symoblic forms
uniquePrefixes :: String -- alphanumeric prefix
-> String -- symbolic prefix
-> Int
-> (String, String) -- (alphanum, symbolic)
uniquePrefixes alpha symb n = (alpha ++ n_str, symb ++ convert n_str)
where
n_str = show n
convert [] = []
convert (d : ds) =
let d' = case d of
'0' -> '!'
'1' -> '#'
'2' -> '$'
'3' -> '%'
'4' -> '&'
'5' -> '*'
'6' -> '+'
'7' -> '.'
'8' -> '/'
'9' -> '>'
_ -> error "non-digit in show #"
in d' : convert ds
-- extract the kind from a TyVarBndr
extractTvbKind :: DTyVarBndr -> Maybe DKind
extractTvbKind (DPlainTV _) = Nothing
extractTvbKind (DKindedTV _ k) = Just k
-- extract the name from a TyVarBndr.
extractTvbName :: DTyVarBndr -> Name
extractTvbName (DPlainTV n) = n
extractTvbName (DKindedTV n _) = n
tvbToType :: DTyVarBndr -> DType
tvbToType = DVarT . extractTvbName
inferMaybeKindTV :: Name -> Maybe DKind -> DTyVarBndr
inferMaybeKindTV n Nothing = DPlainTV n
inferMaybeKindTV n (Just k) = DKindedTV n k
resultSigToMaybeKind :: DFamilyResultSig -> Maybe DKind
resultSigToMaybeKind DNoSig = Nothing
resultSigToMaybeKind (DKindSig k) = Just k
resultSigToMaybeKind (DTyVarSig (DPlainTV _)) = Nothing
resultSigToMaybeKind (DTyVarSig (DKindedTV _ k)) = Just k
-- Get argument types from an arrow type. Removing ForallT is an
-- important preprocessing step required by promoteType.
unravel :: DType -> ([DTyVarBndr], [DPred], [DType], DType)
unravel (DForallT tvbs cxt ty) =
let (tvbs', cxt', tys, res) = unravel ty in
(tvbs ++ tvbs', cxt ++ cxt', tys, res)
unravel (DAppT (DAppT DArrowT t1) t2) =
let (tvbs, cxt, tys, res) = unravel t2 in
(tvbs, cxt, t1 : tys, res)
unravel t = ([], [], [], t)
-- Reconstruct arrow kind from the list of kinds
ravel :: [DType] -> DType -> DType
ravel [] res = res
ravel (h:t) res = DAppT (DAppT DArrowT h) (ravel t res)
-- count the number of arguments in a type
countArgs :: DType -> Int
countArgs ty = length args
where (_, _, args, _) = unravel ty
-- changes all TyVars not to be NameU's. Workaround for GHC#11812
noExactTyVars :: Data a => a -> a
noExactTyVars = everywhere go
where
go :: Data a => a -> a
go = mkT fix_tvb `extT` fix_ty `extT` fix_inj_ann
no_exact_name :: Name -> Name
no_exact_name (Name (OccName occ) (NameU unique)) = mkName (occ ++ show unique)
no_exact_name n = n
fix_tvb (DPlainTV n) = DPlainTV (no_exact_name n)
fix_tvb (DKindedTV n k) = DKindedTV (no_exact_name n) k
fix_ty (DVarT n) = DVarT (no_exact_name n)
fix_ty ty = ty
fix_inj_ann (InjectivityAnn lhs rhs)
= InjectivityAnn (no_exact_name lhs) (map no_exact_name rhs)
substKind :: Map Name DKind -> DKind -> DKind
substKind = substType
substType :: Map Name DType -> DType -> DType
substType subst ty | Map.null subst = ty
substType subst (DForallT tvbs cxt inner_ty)
= DForallT tvbs' cxt' inner_ty'
where
(subst', tvbs') = mapAccumL subst_tvb subst tvbs
cxt' = map (substPred subst') cxt
inner_ty' = substType subst' inner_ty
subst_tvb s tvb@(DPlainTV n) = (Map.delete n s, tvb)
subst_tvb s (DKindedTV n k) = (Map.delete n s, DKindedTV n (substKind s k))
substType subst (DAppT ty1 ty2) = substType subst ty1 `DAppT` substType subst ty2
substType subst (DSigT ty ki) = substType subst ty `DSigT` substType subst ki
substType subst (DVarT n) =
case Map.lookup n subst of
Just ki -> ki
Nothing -> DVarT n
substType _ ty@(DConT {}) = ty
substType _ ty@(DArrowT) = ty
substType _ ty@(DLitT {}) = ty
substType _ ty@DWildCardT = ty
substType _ ty@DStarT = ty
substPred :: Map Name DType -> DPred -> DPred
substPred subst pred | Map.null subst = pred
substPred subst (DAppPr pred ty) =
DAppPr (substPred subst pred) (substType subst ty)
substPred subst (DSigPr pred ki) = DSigPr (substPred subst pred) ki
substPred _ pred@(DVarPr {}) = pred
substPred _ pred@(DConPr {}) = pred
substPred _ pred@DWildCardPr = pred
substKindInPred :: Map Name DKind -> DPred -> DPred
substKindInPred subst pred | Map.null subst = pred
substKindInPred subst (DAppPr pred ty) =
DAppPr (substKindInPred subst pred) (substType subst ty)
substKindInPred subst (DSigPr pred ki) = DSigPr (substKindInPred subst pred)
(substKind subst ki)
substKindInPred _ pred@(DVarPr {}) = pred
substKindInPred _ pred@(DConPr {}) = pred
substKindInPred _ pred@DWildCardPr = pred
substKindInTvb :: Map Name DKind -> DTyVarBndr -> DTyVarBndr
substKindInTvb _ tvb@(DPlainTV _) = tvb
substKindInTvb subst (DKindedTV n ki) = DKindedTV n (substKind subst ki)
addStar :: DKind -> DKind
addStar t = DAppT (DAppT DArrowT t) DStarT
addStar_maybe :: Maybe DKind -> Maybe DKind
addStar_maybe = fmap addStar
-- apply a type to a list of types
foldType :: DType -> [DType] -> DType
foldType = foldl DAppT
-- apply an expression to a list of expressions
foldExp :: DExp -> [DExp] -> DExp
foldExp = foldl DAppE
-- is a function type?
isFunTy :: DType -> Bool
isFunTy (DAppT (DAppT DArrowT _) _) = True
isFunTy (DForallT _ _ _) = True
isFunTy _ = False
-- choose the first non-empty list
orIfEmpty :: [a] -> [a] -> [a]
orIfEmpty [] x = x
orIfEmpty x _ = x
emptyMatches :: [DMatch]
emptyMatches = [DMatch DWildPa (DAppE (DVarE 'error) (DLitE (StringL errStr)))]
where errStr = "Empty case reached -- this should be impossible"
-- build a pattern match over several expressions, each with only one pattern
multiCase :: [DExp] -> [DPat] -> DExp -> DExp
multiCase [] [] body = body
multiCase scruts pats body =
DCaseE (mkTupleDExp scruts) [DMatch (mkTupleDPat pats) body]
-- Make a desugar function into a TH function.
wrapDesugar :: (Desugar th ds, DsMonad q) => (th -> ds -> q ds) -> th -> q th
wrapDesugar f th = do
ds <- desugar th
fmap sweeten $ f th ds
-- a monad transformer for writing a monoid alongside returning a Q
newtype QWithAux m q a = QWA { runQWA :: WriterT m q a }
deriving ( Functor, Applicative, Monad, MonadTrans
, MonadWriter m, MonadReader r
, MonadFail )
-- make a Quasi instance for easy lifting
instance (Quasi q, Monoid m) => Quasi (QWithAux m q) where
qNewName = lift `comp1` qNewName
qReport = lift `comp2` qReport
qLookupName = lift `comp2` qLookupName
qReify = lift `comp1` qReify
qReifyInstances = lift `comp2` qReifyInstances
qLocation = lift qLocation
qRunIO = lift `comp1` qRunIO
qAddDependentFile = lift `comp1` qAddDependentFile
qReifyRoles = lift `comp1` qReifyRoles
qReifyAnnotations = lift `comp1` qReifyAnnotations
qReifyModule = lift `comp1` qReifyModule
qAddTopDecls = lift `comp1` qAddTopDecls
qAddModFinalizer = lift `comp1` qAddModFinalizer
qGetQ = lift qGetQ
qPutQ = lift `comp1` qPutQ
qReifyFixity = lift `comp1` qReifyFixity
qReifyConStrictness = lift `comp1` qReifyConStrictness
qIsExtEnabled = lift `comp1` qIsExtEnabled
qExtsEnabled = lift qExtsEnabled
qRecover exp handler = do
(result, aux) <- lift $ qRecover (evalForPair exp) (evalForPair handler)
tell aux
return result
instance (DsMonad q, Monoid m) => DsMonad (QWithAux m q) where
localDeclarations = lift localDeclarations
-- helper functions for composition
comp1 :: (b -> c) -> (a -> b) -> a -> c
comp1 = (.)
comp2 :: (c -> d) -> (a -> b -> c) -> a -> b -> d
comp2 f g a b = f (g a b)
-- run a computation with an auxiliary monoid, discarding the monoid result
evalWithoutAux :: Quasi q => QWithAux m q a -> q a
evalWithoutAux = liftM fst . runWriterT . runQWA
-- run a computation with an auxiliary monoid, returning only the monoid result
evalForAux :: Quasi q => QWithAux m q a -> q m
evalForAux = execWriterT . runQWA
-- run a computation with an auxiliary monoid, return both the result
-- of the computation and the monoid result
evalForPair :: QWithAux m q a -> q (a, m)
evalForPair = runWriterT . runQWA
-- in a computation with an auxiliary map, add a binding to the map
addBinding :: (Quasi q, Ord k) => k -> v -> QWithAux (Map.Map k v) q ()
addBinding k v = tell (Map.singleton k v)
-- in a computation with an auxiliar list, add an element to the list
addElement :: Quasi q => elt -> QWithAux [elt] q ()
addElement elt = tell [elt]
-- lift concatMap into a monad
-- could this be more efficient?
concatMapM :: (Monad monad, Monoid monoid, Traversable t)
=> (a -> monad monoid) -> t a -> monad monoid
concatMapM fn list = do
bss <- mapM fn list
return $ fold bss
-- make a one-element list
listify :: a -> [a]
listify = (:[])
fstOf3 :: (a,b,c) -> a
fstOf3 (a,_,_) = a
liftFst :: (a -> b) -> (a, c) -> (b, c)
liftFst f (a, c) = (f a, c)
liftSnd :: (a -> b) -> (c, a) -> (c, b)
liftSnd f (c, a) = (c, f a)
snocView :: [a] -> ([a], a)
snocView [] = error "snocView nil"
snocView [x] = ([], x)
snocView (x : xs) = liftFst (x:) (snocView xs)
partitionWith :: (a -> Either b c) -> [a] -> ([b], [c])
partitionWith f = go [] []
where go bs cs [] = (reverse bs, reverse cs)
go bs cs (a:as) =
case f a of
Left b -> go (b:bs) cs as
Right c -> go bs (c:cs) as
partitionWithM :: Monad m => (a -> m (Either b c)) -> [a] -> m ([b], [c])
partitionWithM f = go [] []
where go bs cs [] = return (reverse bs, reverse cs)
go bs cs (a:as) = do
fa <- f a
case fa of
Left b -> go (b:bs) cs as
Right c -> go bs (c:cs) as
partitionLetDecs :: [DDec] -> ([DLetDec], [DDec])
partitionLetDecs = partitionWith (\case DLetDec ld -> Left ld
dec -> Right dec)
mapAndUnzip3M :: Monad m => (a -> m (b,c,d)) -> [a] -> m ([b],[c],[d])
mapAndUnzip3M _ [] = return ([],[],[])
mapAndUnzip3M f (x:xs) = do
(r1, r2, r3) <- f x
(rs1, rs2, rs3) <- mapAndUnzip3M f xs
return (r1:rs1, r2:rs2, r3:rs3)
-- is it a letter or underscore?
isHsLetter :: Char -> Bool
isHsLetter c = isLetter c || c == '_'
| int-index/singletons | src/Data/Singletons/Util.hs | bsd-3-clause | 15,210 | 0 | 14 | 3,843 | 5,122 | 2,715 | 2,407 | 331 | 12 |
module Emit where
import Syntax
import Codegen
import LLVM.Module
import LLVM.AST as AST
import LLVM.Context
import Control.Monad.Except
runEmit :: ExceptT String IO String -> IO String
runEmit e = do
result <- runExceptT e
case result of
Right code -> return code
Left error -> putStrLn error >> return ""
emitInContext :: AST.Module -> Context -> IO String
emitInContext fileModule ctx =
runEmit $ withModuleFromAST ctx fileModule moduleLLVMAssembly
makeModule :: FilePath -> AST.Module
makeModule filepath =
defaultModule {
moduleName = filepath,
moduleSourceFileName = filepath
}
emit :: FilePath -> Program -> IO String
emit filepath ast = do
let fileModule = makeModule filepath
let finalModule =
runLLVM fileModule . mapM codegenTop . programClasses $ ast
withContext $ emitInContext finalModule
| poiuj/pfcc | src/Emit.hs | bsd-3-clause | 846 | 0 | 13 | 162 | 255 | 127 | 128 | 27 | 2 |
module ParserUtil ( runGet
, runGetMaybe
, parseManyLazily
, parseManyLazyByteStringLazily
, parseKeepRaw
, match
, match_
, eof
) where
import qualified Control.Monad as M
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString as BS
import qualified Data.Serialize.Get as Get
import Control.Applicative ((<$>), (*>), (<*>), pure)
runGet :: Get.Get a -> BS.ByteString -> a
runGet p s = either error id $ Get.runGet p s
{-# INLINE runGet #-}
runGetMaybe :: Get.Get a -> BS.ByteString -> Maybe a
runGetMaybe p s = either (const Nothing) Just $ Get.runGet p s
{-# INLINE runGetMaybe #-}
parseManyLazily :: Get.Get a -> BS.ByteString -> [a]
parseManyLazily p s | BS.null s = []
| otherwise = case Get.runGetState p s 0 of
Right (r, rest) -> r : parseManyLazily p rest
Left _ -> []
{-# INLINE parseManyLazily #-}
parseManyLazyByteStringLazily :: Get.Get a -> BSL.ByteString -> [a]
parseManyLazyByteStringLazily p = concatMap (parseManyLazily p) . BSL.toChunks
match :: Eq a => Get.Get a -> a -> Get.Get a
match p test = do
result <- p
if result == test
then return result
else fail ""
{-# INLINE match #-}
match_ :: Eq a => Get.Get a -> a -> Get.Get ()
match_ p test = match p test *> pure ()
{-# INLINE match_ #-}
parseKeepRaw :: Get.Get a -> Get.Get (BS.ByteString, a)
parseKeepRaw g = do
(len, r) <- Get.lookAhead $ do
(res,after) <- Get.lookAhead $ (,) <$> g <*> Get.remaining
total <- Get.remaining
return (total-after, res)
bs <- Get.getBytes len
return (bs, r)
{-# INLINE parseKeepRaw #-}
eof :: Get.Get ()
eof = do
empty <- Get.isEmpty
M.unless empty $ fail "expected eof"
{-# INLINE eof #-}
| benma/blockchain-parser-hs | src/ParserUtil.hs | bsd-3-clause | 1,869 | 0 | 15 | 527 | 646 | 336 | 310 | 51 | 2 |
{-|
This is a module of cross-platform file handling for Unix\/Mac\/Windows.
The standard module "System.Directory" and "System.FilePath" have
following shortcomings:
* getModificationTime exists in "System.Directory". But getAccessTime,
getChangeTime, getCreationTime do not exist.
* getModificationTime returns obsoleted type, 'ClockTime'. It should
return modern type, 'UTCTime', I believe.
* Some file functions are missing. A function to tell the link counter,
for instance.
* Path separator is not unified. Even though Windows accepts \'\/\' as a
file separator, getCurrentDirectory in "System.Directory" returns \'\\\'
as a file separator. So, we need to specify regular expression like
this: \"[\/\\\\]foo[\/\\\\]bar[\/\\\\]baz\".
* getHomeDirectory returns @HOMEDRIVE@\/@HOMEPATH@ instead of the @HOME@
environment variable on Windows.
This module aims to resolve these problems and provides:
* 'getModificationTime', 'getAccessTime', 'getChangeTime', and
'getCreationTime'. They return 'UTCTime'.
* 'isSymlink', 'getLinkCount', and 'hasSubDirectories'.
* \'\/\' as the single 'pathSeparator'. For instance,
'getCurrentDirectory' returns a path whose separator is \'\/\'
even on Windows.
* 'getHomeDirectory2' which refers the @HOME@ environment variable.
* Necessary functions in "System.Directory" and "System.FilePath".
-}
module System.EasyFile (
-- * Actions on directories
createDirectory
, createDirectoryIfMissing
, removeDirectory
, removeDirectoryRecursive
, renameDirectory
, getDirectoryContents
, getCurrentDirectory
, setCurrentDirectory
-- * Pre-defined directories
, getHomeDirectory
, getHomeDirectory2 -- missing
, getAppUserDataDirectory
, getUserDocumentsDirectory
, getTemporaryDirectory
-- * Actions on files
, removeFile
, renameFile
, copyFile
, canonicalizePath
-- , makeRelativeToCurrentDirectory -- xxx
-- , findExecutable -- xxx
-- * Existence tests
, doesFileExist
, doesDirectoryExist
-- * Permissions
, Permissions(..)
, getPermissions
, setPermissions
, copyPermissions
-- * Timestamps
, getCreationTime
, getChangeTime
, getModificationTime
, getAccessTime
-- * Size
, getFileSize
-- * File\/directory information
, isSymlink
, getLinkCount
, hasSubDirectories
, module System.EasyFile.FilePath
) where
----------------------------------------------------------------
import System.EasyFile.Directory
import System.EasyFile.FilePath
import System.EasyFile.Missing
| kazu-yamamoto/easy-file | System/EasyFile.hs | bsd-3-clause | 2,530 | 0 | 5 | 408 | 143 | 100 | 43 | 36 | 0 |
module Jerimum.Storage.PostgreSQL.Setup
( Context(..)
, setup
, destroy
, findSegment
, openSegment
, closeSegment
, addSchemas
) where
import Control.Monad
import qualified Data.Map as M
import Data.Monoid
import qualified Data.Text as T
import Data.UUID.Types
import qualified Database.PostgreSQL.Simple as PQ
import Database.PostgreSQL.Simple.Types (Identifier (..))
import qualified Jerimum.Storage.PostgreSQL.Schemas.EventType as EventType
import Jerimum.Storage.PostgreSQL.SqlMonad
data Context = Context
{ segmentId :: UUID
, databaseName :: T.Text
, schemasTable :: Identifier
, eventsTable :: Identifier
, knownSchemas :: M.Map EventType.Version UUID
} deriving (Show)
addSchemas :: Context -> M.Map EventType.Version UUID -> Context
addSchemas ctx newSchemas =
ctx {knownSchemas = M.union (knownSchemas ctx) newSchemas}
createSegmentTable :: PQ.Query
createSegmentTable =
"CREATE TABLE IF NOT EXISTS segments" <> " (" <>
" segment_id uuid not null primary key" <>
" , dbname text not null" <>
" , lsn_lower pg_lsn" <>
" , lsn_upper pg_lsn" <>
" , time_lower bigint" <>
" , time_upper bigint" <>
" , is_open boolean not null" <>
" CHECK ((lsn_upper - lsn_lower) < 1073741824)" <>
" , EXCLUDE (is_open with =) WHERE (is_open)" <>
" );"
createSchemasMasterTable :: PQ.Query
createSchemasMasterTable =
"CREATE TABLE IF NOT EXISTS schemas" <> " (" <>
" segment_id uuid not null" <>
" , schema_id uuid not null" <>
" , schema_version bytea not null" <>
" , schema_type smallint" <>
" , table_schema text" <>
" , table_name text" <>
" , table_cols text[]" <>
" , table_types bytea" <>
" , message_prefix text" <>
" , message_transactional boolean" <>
" , updated_at timestamptz" <>
" );"
createEventsMasterTable :: PQ.Query
createEventsMasterTable =
"CREATE TABLE IF NOT EXISTS events" <> "(" <> " segment_id uuid not null" <>
", lsn pg_lsn not null" <>
", xno integer not null" <>
", len integer not null" <>
", timestamp bigint not null" <>
", schema_ids uuid[]" <>
", bin_events bytea" <>
", updated_at timestamptz" <>
");"
setup :: SqlMonad ()
setup =
performSQL $ \conn -> do
_ <- PQ.execute_ conn createSegmentTable
_ <- PQ.execute_ conn createSchemasMasterTable
void $ PQ.execute_ conn createEventsMasterTable
destroy :: SqlMonad ()
destroy =
performSQL $ \conn -> do
_ <- PQ.execute_ conn "DROP TABLE IF EXISTS events CASCADE"
_ <- PQ.execute_ conn "DROP TABLE IF EXISTS schemas CASCADE"
void $ PQ.execute_ conn "DROP TABLE IF EXISTS segments CASCADE"
closeSegment :: UUID -> SqlMonad ()
closeSegment uuid =
let query =
"UPDATE segments" <> " SET is_open = false" <> " WHERE segment_id = ?"
in performSQL $ \conn -> void $ PQ.execute conn query [uuid]
findSegment :: T.Text -> SqlMonad (Maybe Context)
findSegment dbname =
let query =
"SELECT segment_id FROM segments" <> " WHERE is_open AND dbname = ?"
params = [dbname]
in do results <- performSQL $ \conn -> PQ.query conn query params
case results of
[PQ.Only (Just segmentId)] ->
pure (Just $ makeContext dbname segmentId)
_ -> pure Nothing
makeContext :: T.Text -> UUID -> Context
makeContext dbname uuid =
let suffix = T.replace "-" "" (toText uuid)
in Context
uuid
dbname
(Identifier $ "schemas_" <> suffix)
(Identifier $ "events_" <> suffix)
M.empty
openSegment :: T.Text -> UUID -> SqlMonad Context
openSegment dbname uuid = do
performSQL $ \conn ->
PQ.withTransaction conn $ do
insertSegment conn
createSchemasTable conn
createEventsTable conn
pure context
where
context = makeContext dbname uuid
insertSegment conn =
let query =
"INSERT INTO segments " <> " ( segment_id, dbname, is_open )" <>
" VALUES (?, ?, true)"
in void $ PQ.execute conn query (uuid, dbname)
createSchemasTable conn =
let query =
"CREATE TABLE ?" <> " ( CHECK (segment_id = ?)" <>
" , PRIMARY KEY (schema_id)" <>
" , UNIQUE (schema_version)" <>
" , FOREIGN KEY (segment_id) REFERENCES segments (segment_id)" <>
" )" <>
" INHERITS (schemas)"
in void $ PQ.execute conn query (schemasTable context, uuid)
createEventsTable conn =
let query =
"CREATE TABLE ?" <> " ( CHECK (segment_id = ?)" <>
" , PRIMARY KEY (lsn, xno)" <>
" , FOREIGN KEY (segment_id) REFERENCES segments (segment_id)" <>
" )" <>
" INHERITS (events)"
in void $ PQ.execute conn query (eventsTable context, uuid)
| dgvncsz0f/nws | src/Jerimum/Storage/PostgreSQL/Setup.hs | bsd-3-clause | 5,022 | 0 | 17 | 1,472 | 1,060 | 551 | 509 | 133 | 2 |
-- Can get Pythagorean triplets via 2ab, a^2-b^2,a^2+b^2 trick
-- Brute-force some values of a and b
main :: IO ()
main = print answer
where possibleTrips :: [(Int,Int)]
possibleTrips = [(a,b) | a <- [1..500], b <- [1..500]]
passedTrips = filter (\(a,b) -> a>b && (2*a*b)+(a*a-b*b)+(a*a+b*b) == 1000) possibleTrips
x = fst $ head passedTrips
y = snd $ head passedTrips
answer = (2*x*y)*(x*x-y*y)*(x*x+y*y)
| akerber47/haskalah | test/files/euler/9.hs | bsd-3-clause | 462 | 6 | 18 | 121 | 249 | 131 | 118 | 8 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE BangPatterns #-}
module Language.Fixpoint.Solver.Eliminate
(eliminateAll) where
import Language.Fixpoint.Types
import Language.Fixpoint.Types.Names (existSymbol)
import Language.Fixpoint.Types.Visitor (kvars)
import Language.Fixpoint.Solver.Deps (depNonCuts, deps)
import Language.Fixpoint.Misc (fst3)
import Language.Fixpoint.Solver.Solution (Solution, mkJVar)
import qualified Data.HashMap.Strict as M
import Data.List (foldl')
import Control.Arrow (first, second)
import Control.DeepSeq (($!!))
--------------------------------------------------------------
eliminateAll :: SInfo a -> (Solution, SInfo a)
eliminateAll !fi = {-# SCC "eliminateAll" #-} foldl' eliminate (M.empty, fi) nonCuts
where
nonCuts = depNonCuts $ deps fi
--------------------------------------------------------------
eliminate :: (Solution, SInfo a) -> KVar -> (Solution, SInfo a)
eliminate (!s, !fi) k = (M.insert k (mkJVar orPred) s, fi { cm = remainingCs , ws = M.delete k $ ws fi })
where
relevantCs = M.filter ( elem k . kvars . crhs) (cm fi)
remainingCs = M.filter (notElem k . kvars . crhs) (cm fi)
kvWfC = ws fi M.! k
be = bs fi
kDom = domain be kvWfC
orPred = {-# SCC "orPred" #-} POr $!! extractPred kDom be <$> M.elems relevantCs
extractPred :: [Symbol] -> BindEnv -> SimpC a -> Pred
extractPred kDom be sc = renameQuantified (subcId sc) kSol
where
env = clhs be sc
binds = second sr_sort <$> env
nonFuncBinds = filter (nonFunction be . fst) binds
lhsPreds = bindPred <$> env
suPreds = substPreds kDom $ crhs sc
kSol = PExist nonFuncBinds $ PAnd (lhsPreds ++ suPreds)
-- x:{v:int|v=10} -> (x=10)
bindPred :: (Symbol, SortedReft) -> Pred
bindPred (sym, sr) = subst1 (reftPred rft) sub
where
rft = sr_reft sr
sub = (reftBind rft, eVar sym)
-- k0[v:=e1][x:=e2] -> [v = e1, x = e2]
substPreds :: [Symbol] -> Pred -> [Pred]
substPreds dom (PKVar _ (Su subs)) = [PAtom Eq (eVar sym) e | (sym, e) <- M.toList subs , sym `elem` dom]
nonFunction :: BindEnv -> Symbol -> Bool
nonFunction be sym = sym `notElem` funcs
where
funcs = [sym | (_, sym, sr) <- bindEnvToList be, isFunctionSortedReft sr]
domain :: BindEnv -> WfC a -> [Symbol]
domain be wfc = (fst3 $ wrft wfc) : map fst (envCs be $ wenv wfc)
renameQuantified :: Integer -> Pred -> Pred
renameQuantified i (PExist bs p) = PExist bs' p'
where
su = substFromQBinds i bs
bs' = (first $ subst su) <$> bs
p' = subst su p
substFromQBinds :: Integer -> [(Symbol, Sort)] -> Subst
substFromQBinds i bs = Su $ M.fromList [(s, EVar $ existSymbol s i) | s <- fst <$> bs]
| gridaphobe/liquid-fixpoint | src/Language/Fixpoint/Solver/Eliminate.hs | bsd-3-clause | 2,798 | 0 | 11 | 659 | 978 | 528 | 450 | 51 | 1 |
-- | @TemplateHaskell@ utilities for generating lens fields.
module Extended.Lens.TH
( fieldsVerboseLensRules
) where
import Universum
import Data.Char (toUpper)
import Data.List (stripPrefix)
import Language.Haskell.TH.Syntax (Name, mkName, nameBase)
import Lens.Micro.Platform (DefName (MethodName), LensRules,
camelCaseFields, lensField, makeLensesWith)
-- | A field namer for 'fieldsVerboseLensRules'.
verboseFieldsNamer :: Name -> [Name] -> Name -> [DefName]
verboseFieldsNamer _ _ fieldName = maybeToList $ do
fieldUnprefixed@(x:xs) <- stripPrefix "_" (nameBase fieldName)
let className = "HasPoly" ++ toUpper x : xs
let methodName = fieldUnprefixed
pure (MethodName (mkName className) (mkName methodName))
-- | Custom rules for generating lenses. This is similar to
-- @makeFields@ but generated type classes have names like @HasPolyFoo@
-- instead of @HasFoo@ so they supposed to be used by introducing new
-- constraint aliases. See 'Importify.Environment' for details.
fieldsVerboseLensRules :: LensRules
fieldsVerboseLensRules = camelCaseFields & lensField .~ verboseFieldsNamer
| serokell/importify | src/Extended/Lens/TH.hs | mit | 1,262 | 0 | 13 | 307 | 226 | 128 | 98 | 16 | 1 |
-- | Types describing runtime errors related to DB.
module Pos.DB.Error
( DBError (..)
) where
import Formatting (bprint, int, stext, (%))
import qualified Formatting.Buildable
import Universum
data DBError =
-- | Structure of DB is malformed (e. g. data is inconsistent,
-- something is missing, etc.)
DBMalformed !Text
| DBUnexpectedVersionTag !Word8 !Word8 -- ^ The first field is the expected version
-- tag. The second is the one received.
deriving (Show)
instance Exception DBError
-- TODO Make it cardanoException
instance Buildable DBError where
build (DBMalformed msg) = bprint ("malformed DB ("%stext%")") msg
build (DBUnexpectedVersionTag w1 w2) =
bprint ("unexpected version tag (Expected version tag: "%int%". Got: "%int%")")
w1
w2
| input-output-hk/pos-haskell-prototype | db/src/Pos/DB/Error.hs | mit | 899 | 0 | 11 | 264 | 163 | 92 | 71 | 22 | 0 |
module NFA.Nerode.Congruent.Quiz where
import qualified NFA.Nerode.Congruent.Config as C
import qualified NFA.Nerode.Congruent.Instance as I
import qualified NFA.Roll
import NFA.Property
import NFA.Infinite
import qualified Convert.Language
import qualified Convert.Input
import NFA.Nerode.Congruent.Check
import Inter.Quiz
import Inter.Types
import Autolib.Set
import Autolib.NFA
import Autolib.Util.Zufall
instance NFAC c Int => Generator
Nerode_Congruent ( C.Config c ) ( I.Instance c ) where
generator p conf key = do
a <- NFA.Roll.roll
[ Alphabet $ C.alphabet conf
, Max_Size $ C.nondet_automaton_size conf
]
let d = Autolib.NFA.minimize0 a
-- gibt wenigstens einen, weil Automat vollständig ist
i = pre_infinite_states d
q <- eins $ setToList i
let ws = some_shortest $ d { finals = mkSet [ q ] }
-- gibt wenigstens eins, weil Sprache unendlich ist
w <- eins ws
return $ I.Instance
{ I.language = Convert.Language.Language
{ Convert.Language.implementation =
Convert.Input.NFA a
, Convert.Language.description = Nothing
}
, I.goal = w
, I.wanted = C.wanted conf
, I.minimal_length = 2 * length w
}
instance Project
Nerode_Congruent ( I.Instance c ) ( I.Instance c ) where
project p x = x
make :: Make
make = quiz Nerode_Congruent C.example
| florianpilz/autotool | src/NFA/Nerode/Congruent/Quiz.hs | gpl-2.0 | 1,552 | 0 | 15 | 487 | 390 | 216 | 174 | -1 | -1 |
{- DATX02-17-26, automated assessment of imperative programs.
- Copyright, 2017, see AUTHORS.md.
-
- This program is free software; you can redistribute it and/or
- modify it under the terms of the GNU General Public License
- as published by the Free Software Foundation; either version 2
- of the License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-}
{-# LANGUAGE TemplateHaskell, QuasiQuotes, LambdaCase #-}
-- Useful TemplateHaskell utilities.
module Util.TH (
-- * Various derivators.
stdDerive
, stdDerives
, deriveLens
, recordIx
, recordIxs
) where
import Data.List (uncons)
import Control.Monad (zipWithM)
import Control.Lens (makeLenses, makePrisms)
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Util.Monad (traverseS)
-- | Standalone derive, given:
-- + constraint constructor :: (* -> Constraint) -> * -> Constraint,
-- + class to derive,
-- + type to derive for.
stdDerive :: Name -> Name -> Name -> DecQ
stdDerive constr clazz typ = do
p <- varT <$> newName "p"
let tconstr = pure $ ConT constr
let tclazz = pure $ ConT clazz
let ttyp = pure $ ConT typ
standaloneDerivD (cxt [[t|$tconstr $tclazz $p|]])
[t| $tclazz ($ttyp $p) |]
-- | Standalone derive:
-- + given constraint constr,
-- + given set of classes,
-- + given the types,
-- all the classes for all the types.
stdDerives :: Name -> [Name] -> [Name] -> DecsQ
stdDerives constr clazzes types =
sequence $ stdDerive <$> [constr] <*> clazzes <*> types
-- | deriveLens: derive lens & prisms for a "set" of types.
deriveLens :: Traversable t => t Name -> DecsQ
deriveLens = fmap concat . mapM (\n -> (++) <$> makeLenses n <*> makePrisms n)
-- | See 'recordIx'.
recordIxs :: Traversable t => t Name -> DecsQ
recordIxs = traverseS recordIx
-- | Derive the relative field indices, starting from 0,
-- of all constructors in the given
-- record newtype or datatype.
recordIx :: Name -> DecsQ
recordIx typeN = do
var <- reify typeN
dec <- case var of
TyConI dec -> pure dec
_ -> fail "recordIx: Expected type constructor name"
cons <- case dec of
DataD _ _ _ _ cons _ -> pure cons
NewtypeD _ _ _ _ con _ -> pure [con]
_ -> fail "recordIx: Expected data or newtype type-constructor"
flip traverseS cons $ \case
RecC _ vbt -> zipWithM recCtorIx [0..] $ ixName <$> vbt
x -> pure []
recCtorIx :: Integer -> Name -> DecQ
recCtorIx ix fname = pure $
ValD (VarP fname) (NormalB (LitE (IntegerL ix))) []
ixName :: VarBangType -> Name
ixName (n, _, _) = mkName . (++ "Ix") . removeUS . nameBase $ n
removeUS :: String -> String
removeUS = \case
[] -> []
('_':t) -> t
xs -> xs | Centril/DATX02-17-26 | libsrc/Util/TH.hs | gpl-2.0 | 3,154 | 0 | 14 | 683 | 681 | 359 | 322 | 51 | 5 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeFamilies #-}
module Rehi.Utils.ArgList where
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (split)
import Data.Monoid (Monoid)
import GHC.Exts(IsString, fromString, IsList, Item, fromList, toList)
newtype ArgList = ArgList { getArgList :: [ByteString] } deriving (Show,Monoid)
instance IsString ArgList where
fromString = ArgList . split ' ' . fromString
instance IsList ArgList where
type Item ArgList = ByteString
fromList = ArgList
toList = getArgList
| max630/git-rehi | app/Rehi/Utils/ArgList.hs | gpl-2.0 | 548 | 0 | 8 | 79 | 143 | 86 | 57 | 14 | 0 |
{- DATX02-17-26, automated assessment of imperative programs.
- Copyright, 2017, see AUTHORS.md.
-
- This program is free software; you can redistribute it and/or
- modify it under the terms of the GNU General Public License
- as published by the Free Software Foundation; either version 2
- of the License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-}
{-# LANGUAGE LambdaCase #-}
-- | Normalizers for transforming floats in to doubles.
module Norm.FloatToDouble ( normFloatToDoubleVars, normFloatToDoubleRet ) where
import Norm.NormCS
-- TODO allocate stages. At the moment chosen arbitrarily.
stage :: Int
stage = 15
-- | Rule to transform float vars into double vars
-- > float x; => double x;
normFloatToDoubleVars :: NormCUR
normFloatToDoubleVars = makeRule' "unsafe.coerce_float_to_double.vardecl"
[stage] execFloatToDoubleVars
-- | Rule to transform methods with float return type into double return type.
-- > public static float x(){} => public static double x(){}
normFloatToDoubleRet :: NormCUR
normFloatToDoubleRet = makeRule' "unsafe.coerce_float_to_double.method"
[stage + 1] execFloatToDoubleRet
-- | Transforms float variable inits, decls and references in to doubles
execFloatToDoubleVars :: NormCUA
execFloatToDoubleVars = normEvery $ \case
VMType t (PrimT FloatT) -> change $ VMType t (PrimT DoubleT)
x -> unique x
-- | Transforms float method return types in to double return types
execFloatToDoubleRet :: NormCUA
execFloatToDoubleRet = normEvery $ \case
MethodDecl (Just (PrimT FloatT)) n p b ->
change $ MethodDecl (Just (PrimT DoubleT)) n p b
x -> unique x
| DATX02-17-26/DATX02-17-26 | libsrc/Norm/FloatToDouble.hs | gpl-2.0 | 2,197 | 0 | 14 | 479 | 217 | 118 | 99 | 20 | 2 |
module GameLogic.Action.ModifyPlayer where
import Control.Lens
import GameLogic.Data.Facade
decreaseGamePlayerFree :: Int -> Int -> MaybeGameState ()
decreaseGamePlayerFree playerInd cost = do
curFree <- use $ playerOfGame playerInd . free
when (cost > curFree) $ fail "Not enough free"
playerOfGame playerInd . free -= cost
helpPlayer :: Int -> GameState ()
helpPlayer playerInd = fromMaybeState $ decreaseGamePlayerFree playerInd (-10)
| EPashkin/gamenumber-freegame | src_gl/GameLogic/Action/ModifyPlayer.hs | gpl-3.0 | 454 | 0 | 10 | 73 | 134 | 67 | 67 | -1 | -1 |
{-# LANGUAGE TemplateHaskellQuotes #-}
-- Trac #2632
module MkData where
import Language.Haskell.TH
op :: Num v => v -> v -> v
op a b = a + b
decl1 = [d| func = 0 `op` 3 |]
decl2 = [d| op x y = x
func = 0 `op` 3 |]
| mpickering/ghc-exactprint | tests/examples/ghc8/T2632.hs | bsd-3-clause | 233 | 0 | 7 | 70 | 64 | 40 | 24 | -1 | -1 |
-- | Operations on the 'Area' type that involve random numbers.
module Game.LambdaHack.Server.DungeonGen.AreaRnd
( -- * Picking points inside areas
xyInArea, mkRoom, mkVoidRoom
-- * Choosing connections
, connectGrid, randomConnection
-- * Plotting corridors
, Corridor, connectPlaces
) where
import Control.Exception.Assert.Sugar
import Data.Maybe
import qualified Data.Set as S
import Game.LambdaHack.Common.Point
import Game.LambdaHack.Common.Random
import Game.LambdaHack.Common.Vector
import Game.LambdaHack.Server.DungeonGen.Area
-- Picking random points inside areas
-- | Pick a random point within an area.
xyInArea :: Area -> Rnd Point
xyInArea area = do
let (x0, y0, x1, y1) = fromArea area
rx <- randomR (x0, x1)
ry <- randomR (y0, y1)
return $! Point rx ry
-- | Create a random room according to given parameters.
mkRoom :: (X, Y) -- ^ minimum size
-> (X, Y) -- ^ maximum size
-> Area -- ^ the containing area, not the room itself
-> Rnd Area
mkRoom (xm, ym) (xM, yM) area = do
let (x0, y0, x1, y1) = fromArea area
let !_A = assert (xm <= x1 - x0 + 1 && ym <= y1 - y0 + 1) ()
let aW = (xm, ym, min xM (x1 - x0 + 1), min yM (y1 - y0 + 1))
areaW = fromMaybe (assert `failure` aW) $ toArea aW
Point xW yW <- xyInArea areaW -- roll size
let a1 = (x0, y0, max x0 (x1 - xW + 1), max y0 (y1 - yW + 1))
area1 = fromMaybe (assert `failure` a1) $ toArea a1
Point rx1 ry1 <- xyInArea area1 -- roll top-left corner
let a3 = (rx1, ry1, rx1 + xW - 1, ry1 + yW - 1)
area3 = fromMaybe (assert `failure` a3) $ toArea a3
return $! area3
-- | Create a void room, i.e., a single point area within the designated area.
mkVoidRoom :: Area -> Rnd Area
mkVoidRoom area = do
-- Pass corridors closer to the middle of the grid area, if possible.
let core = fromMaybe area $ shrink area
pxy <- xyInArea core
return $! trivialArea pxy
-- Choosing connections between areas in a grid
-- | Pick a subset of connections between adjacent areas within a grid until
-- there is only one connected component in the graph of all areas.
connectGrid :: (X, Y) -> Rnd [(Point, Point)]
connectGrid (nx, ny) = do
let unconnected = S.fromList [ Point x y
| x <- [0..nx-1], y <- [0..ny-1] ]
-- Candidates are neighbours that are still unconnected. We start with
-- a random choice.
rx <- randomR (0, nx-1)
ry <- randomR (0, ny-1)
let candidates = S.fromList [Point rx ry]
connectGrid' (nx, ny) unconnected candidates []
connectGrid' :: (X, Y) -> S.Set Point -> S.Set Point
-> [(Point, Point)]
-> Rnd [(Point, Point)]
connectGrid' (nx, ny) unconnected candidates acc
| S.null candidates = return $! map sortPoint acc
| otherwise = do
c <- oneOf (S.toList candidates)
-- potential new candidates:
let ns = S.fromList $ vicinityCardinal nx ny c
nu = S.delete c unconnected -- new unconnected
-- (new candidates, potential connections):
(nc, ds) = S.partition (`S.member` nu) ns
new <- if S.null ds
then return id
else do
d <- oneOf (S.toList ds)
return ((c, d) :)
connectGrid' (nx, ny) nu
(S.delete c (candidates `S.union` nc)) (new acc)
-- | Sort the sequence of two points, in the derived lexicographic order.
sortPoint :: (Point, Point) -> (Point, Point)
sortPoint (a, b) | a <= b = (a, b)
| otherwise = (b, a)
-- | Pick a single random connection between adjacent areas within a grid.
randomConnection :: (X, Y) -> Rnd (Point, Point)
randomConnection (nx, ny) =
assert (nx > 1 && ny > 0 || nx > 0 && ny > 1 `blame` "wrong connection"
`twith` (nx, ny)) $ do
rb <- oneOf [False, True]
if rb || ny <= 1
then do
rx <- randomR (0, nx-2)
ry <- randomR (0, ny-1)
return (Point rx ry, Point (rx+1) ry)
else do
rx <- randomR (0, nx-1)
ry <- randomR (0, ny-2)
return (Point rx ry, Point rx (ry+1))
-- Plotting individual corridors between two areas
-- | The choice of horizontal and vertical orientation.
data HV = Horiz | Vert
-- | The coordinates of consecutive fields of a corridor.
type Corridor = [Point]
-- | Create a corridor, either horizontal or vertical, with
-- a possible intermediate part that is in the opposite direction.
mkCorridor :: HV -- ^ orientation of the starting section
-> Point -- ^ starting point
-> Point -- ^ ending point
-> Area -- ^ the area containing the intermediate point
-> Rnd Corridor -- ^ straight sections of the corridor
mkCorridor hv (Point x0 y0) (Point x1 y1) b = do
Point rx ry <- xyInArea b
return $! map (uncurry Point) $ case hv of
Horiz -> [(x0, y0), (rx, y0), (rx, y1), (x1, y1)]
Vert -> [(x0, y0), (x0, ry), (x1, ry), (x1, y1)]
-- | Try to connect two interiors of places with a corridor.
-- Choose entrances at least 4 or 3 tiles distant from the edges, if the place
-- is big enough. Note that with @pfence == FNone@, the area considered
-- is the strict interior of the place, without the outermost tiles.
connectPlaces :: (Area, Area) -> (Area, Area) -> Rnd Corridor
connectPlaces (sa, so) (ta, to) = do
let (_, _, sx1, sy1) = fromArea sa
(_, _, sox1, soy1) = fromArea so
(tx0, ty0, _, _) = fromArea ta
(tox0, toy0, _, _) = fromArea to
let !_A = assert (sx1 <= tx0 || sy1 <= ty0 `blame` (sa, ta)) ()
let !_A = assert (sx1 <= sox1 || sy1 <= soy1 `blame` (sa, so)) ()
let !_A = assert (tx0 >= tox0 || ty0 >= toy0 `blame` (ta, to)) ()
let trim area =
let (x0, y0, x1, y1) = fromArea area
trim4 (v0, v1) | v1 - v0 < 6 = (v0, v1)
| v1 - v0 < 8 = (v0 + 3, v1 - 3)
| otherwise = (v0 + 4, v1 - 4)
(nx0, nx1) = trim4 (x0, x1)
(ny0, ny1) = trim4 (y0, y1)
in fromMaybe (assert `failure` area) $ toArea (nx0, ny0, nx1, ny1)
Point sx sy <- xyInArea $ trim so
Point tx ty <- xyInArea $ trim to
let hva sarea tarea = do
let (_, _, zsx1, zsy1) = fromArea sarea
(ztx0, zty0, _, _) = fromArea tarea
xa = (zsx1+2, min sy ty, ztx0-2, max sy ty)
ya = (min sx tx, zsy1+2, max sx tx, zty0-2)
xya = (zsx1+2, zsy1+2, ztx0-2, zty0-2)
case toArea xya of
Just xyarea -> fmap (\hv -> (hv, Just xyarea)) (oneOf [Horiz, Vert])
Nothing ->
case toArea xa of
Just xarea -> return (Horiz, Just xarea)
Nothing -> return (Vert, toArea ya) -- Vertical bias.
(hvOuter, areaOuter) <- hva so to
(hv, area) <- case areaOuter of
Just arenaOuter -> return (hvOuter, arenaOuter)
Nothing -> do
-- TODO: let mkCorridor only pick points on the floor fence
(hvInner, aInner) <- hva sa ta
let yell = assert `failure` (sa, so, ta, to, areaOuter, aInner)
areaInner = fromMaybe yell aInner
return (hvInner, areaInner)
-- We cross width one places completely with the corridor, for void
-- rooms and others (e.g., one-tile wall room then becomes a door, etc.).
let (p0, p1) = case hv of
Horiz -> (Point sox1 sy, Point tox0 ty)
Vert -> (Point sx soy1, Point tx toy0)
-- The condition imposed on mkCorridor are tricky: there might not always
-- exist a good intermediate point if the places are allowed to be close
-- together and then we let the intermediate part degenerate.
mkCorridor hv p0 p1 area
| Concomitant/LambdaHack | Game/LambdaHack/Server/DungeonGen/AreaRnd.hs | bsd-3-clause | 7,635 | 0 | 20 | 2,153 | 2,639 | 1,412 | 1,227 | -1 | -1 |
{-# LANGUAGE CPP #-}
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Safe #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Set
-- Copyright : (c) Daan Leijen 2002
-- License : BSD-style
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of sets.
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
-- > import Data.Set (Set)
-- > import qualified Data.Set as Set
--
-- The implementation of 'Set' is based on /size balanced/ binary trees (or
-- trees of /bounded balance/) as described by:
--
-- * Stephen Adams, \"/Efficient sets: a balancing act/\",
-- Journal of Functional Programming 3(4):553-562, October 1993,
-- <http://www.swiss.ai.mit.edu/~adams/BB/>.
--
-- * J. Nievergelt and E.M. Reingold,
-- \"/Binary search trees of bounded balance/\",
-- SIAM journal of computing 2(1), March 1973.
--
-- Note that the implementation is /left-biased/ -- the elements of a
-- first argument are always preferred to the second, for example in
-- 'union' or 'insert'. Of course, left-biasing can only be observed
-- when equality is an equivalence relation instead of structural
-- equality.
-----------------------------------------------------------------------------
module Data.Set (
-- * Strictness properties
-- $strictness
-- * Set type
#if !defined(TESTING)
Set -- instance Eq,Ord,Show,Read,Data,Typeable
#else
Set(..)
#endif
-- * Operators
, (\\)
-- * Query
, S.null
, size
, member
, notMember
, lookupLT
, lookupGT
, lookupLE
, lookupGE
, isSubsetOf
, isProperSubsetOf
-- * Construction
, empty
, singleton
, insert
, delete
-- * Combine
, union
, unions
, difference
, intersection
-- * Filter
, S.filter
, partition
, split
, splitMember
, splitRoot
-- * Indexed
, lookupIndex
, findIndex
, elemAt
, deleteAt
-- * Map
, S.map
, mapMonotonic
-- * Folds
, S.foldr
, S.foldl
-- ** Strict folds
, foldr'
, foldl'
-- ** Legacy folds
, fold
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, maxView
, minView
-- * Conversion
-- ** List
, elems
, toList
, fromList
-- ** Ordered list
, toAscList
, toDescList
, fromAscList
, fromDistinctAscList
-- * Debugging
, showTree
, showTreeWith
, valid
#if defined(TESTING)
-- Internals (for testing)
, bin
, balanced
, link
, merge
#endif
) where
import Data.Set.Base as S
-- $strictness
--
-- This module satisfies the following strictness property:
--
-- * Key arguments are evaluated to WHNF
--
-- Here are some examples that illustrate the property:
--
-- > delete undefined s == undefined
| jwiegley/ghc-release | libraries/containers/Data/Set.hs | gpl-3.0 | 3,728 | 0 | 5 | 1,463 | 274 | 206 | 68 | 57 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.ListPolicies
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Lists all the managed policies that are available to your account, including
-- your own customer managed policies and all AWS managed policies.
--
-- You can filter the list of policies that is returned using the optional 'OnlyAttached', 'Scope', and 'PathPrefix' parameters. For example, to list only the customer
-- managed policies in your AWS account, set 'Scope' to 'Local'. To list only AWS
-- managed policies, set 'Scope' to 'AWS'.
--
-- You can paginate the results using the 'MaxItems' and 'Marker' parameters.
--
-- For more information about managed policies, refer to <http://docs.aws.amazon.com/IAM/latest/UserGuide/policies-managed-vs-inline.html Managed Policies andInline Policies> in the /Using IAM/ guide.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_ListPolicies.html>
module Network.AWS.IAM.ListPolicies
(
-- * Request
ListPolicies
-- ** Request constructor
, listPolicies
-- ** Request lenses
, lpMarker
, lpMaxItems
, lpOnlyAttached
, lpPathPrefix
, lpScope
-- * Response
, ListPoliciesResponse
-- ** Response constructor
, listPoliciesResponse
-- ** Response lenses
, lprIsTruncated
, lprMarker
, lprPolicies
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
data ListPolicies = ListPolicies
{ _lpMarker :: Maybe Text
, _lpMaxItems :: Maybe Nat
, _lpOnlyAttached :: Maybe Bool
, _lpPathPrefix :: Maybe Text
, _lpScope :: Maybe PolicyScopeType
} deriving (Eq, Read, Show)
-- | 'ListPolicies' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lpMarker' @::@ 'Maybe' 'Text'
--
-- * 'lpMaxItems' @::@ 'Maybe' 'Natural'
--
-- * 'lpOnlyAttached' @::@ 'Maybe' 'Bool'
--
-- * 'lpPathPrefix' @::@ 'Maybe' 'Text'
--
-- * 'lpScope' @::@ 'Maybe' 'PolicyScopeType'
--
listPolicies :: ListPolicies
listPolicies = ListPolicies
{ _lpScope = Nothing
, _lpOnlyAttached = Nothing
, _lpPathPrefix = Nothing
, _lpMarker = Nothing
, _lpMaxItems = Nothing
}
-- | Use this parameter only when paginating results, and only in a subsequent
-- request after you've received a response where the results are truncated. Set
-- it to the value of the 'Marker' element in the response you just received.
lpMarker :: Lens' ListPolicies (Maybe Text)
lpMarker = lens _lpMarker (\s a -> s { _lpMarker = a })
-- | Use this parameter only when paginating results to indicate the maximum
-- number of policies you want in the response. If there are additional policies
-- beyond the maximum you specify, the 'IsTruncated' response element is 'true'.
-- This parameter is optional. If you do not include it, it defaults to 100.
lpMaxItems :: Lens' ListPolicies (Maybe Natural)
lpMaxItems = lens _lpMaxItems (\s a -> s { _lpMaxItems = a }) . mapping _Nat
-- | A flag to filter the results to only the attached policies.
--
-- When 'OnlyAttached' is 'true', the returned list contains only the policies that
-- are attached to a user, group, or role. When 'OnlyAttached' is 'false', or when
-- the parameter is not included, all policies are returned.
lpOnlyAttached :: Lens' ListPolicies (Maybe Bool)
lpOnlyAttached = lens _lpOnlyAttached (\s a -> s { _lpOnlyAttached = a })
-- | The path prefix for filtering the results. This parameter is optional. If it
-- is not included, it defaults to a slash (/), listing all policies.
lpPathPrefix :: Lens' ListPolicies (Maybe Text)
lpPathPrefix = lens _lpPathPrefix (\s a -> s { _lpPathPrefix = a })
-- | The scope to use for filtering the results.
--
-- To list only AWS managed policies, set 'Scope' to 'AWS'. To list only the
-- customer managed policies in your AWS account, set 'Scope' to 'Local'.
--
-- This parameter is optional. If it is not included, or if it is set to 'All',
-- all policies are returned.
lpScope :: Lens' ListPolicies (Maybe PolicyScopeType)
lpScope = lens _lpScope (\s a -> s { _lpScope = a })
data ListPoliciesResponse = ListPoliciesResponse
{ _lprIsTruncated :: Maybe Bool
, _lprMarker :: Maybe Text
, _lprPolicies :: List "member" Policy
} deriving (Eq, Read, Show)
-- | 'ListPoliciesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lprIsTruncated' @::@ 'Maybe' 'Bool'
--
-- * 'lprMarker' @::@ 'Maybe' 'Text'
--
-- * 'lprPolicies' @::@ ['Policy']
--
listPoliciesResponse :: ListPoliciesResponse
listPoliciesResponse = ListPoliciesResponse
{ _lprPolicies = mempty
, _lprIsTruncated = Nothing
, _lprMarker = Nothing
}
-- | A flag that indicates whether there are more policies to list. If your
-- results were truncated, you can make a subsequent pagination request using
-- the 'Marker' request parameter to retrieve more policies in the list.
lprIsTruncated :: Lens' ListPoliciesResponse (Maybe Bool)
lprIsTruncated = lens _lprIsTruncated (\s a -> s { _lprIsTruncated = a })
-- | If 'IsTruncated' is 'true', this element is present and contains the value to use
-- for the 'Marker' parameter in a subsequent pagination request.
lprMarker :: Lens' ListPoliciesResponse (Maybe Text)
lprMarker = lens _lprMarker (\s a -> s { _lprMarker = a })
-- | A list of policies.
lprPolicies :: Lens' ListPoliciesResponse [Policy]
lprPolicies = lens _lprPolicies (\s a -> s { _lprPolicies = a }) . _List
instance ToPath ListPolicies where
toPath = const "/"
instance ToQuery ListPolicies where
toQuery ListPolicies{..} = mconcat
[ "Marker" =? _lpMarker
, "MaxItems" =? _lpMaxItems
, "OnlyAttached" =? _lpOnlyAttached
, "PathPrefix" =? _lpPathPrefix
, "Scope" =? _lpScope
]
instance ToHeaders ListPolicies
instance AWSRequest ListPolicies where
type Sv ListPolicies = IAM
type Rs ListPolicies = ListPoliciesResponse
request = post "ListPolicies"
response = xmlResponse
instance FromXML ListPoliciesResponse where
parseXML = withElement "ListPoliciesResult" $ \x -> ListPoliciesResponse
<$> x .@? "IsTruncated"
<*> x .@? "Marker"
<*> x .@? "Policies" .!@ mempty
| romanb/amazonka | amazonka-iam/gen/Network/AWS/IAM/ListPolicies.hs | mpl-2.0 | 7,264 | 0 | 14 | 1,550 | 891 | 539 | 352 | 88 | 1 |
module Negation where
print' :: Double -> Fay ()
print' = print
main :: Fay ()
main = do print' $ (-7/2)
print' $ (-7)/2
print' $ -f x/y
where f n = n * n
x = 5
y = 2
| fpco/fay | tests/negation.hs | bsd-3-clause | 216 | 1 | 10 | 91 | 110 | 57 | 53 | 10 | 1 |
{-# LANGUAGE CPP #-}
module CmmInfo (
mkEmptyContInfoTable,
cmmToRawCmm,
mkInfoTable,
srtEscape,
-- info table accessors
closureInfoPtr,
entryCode,
getConstrTag,
cmmGetClosureType,
infoTable,
infoTableConstrTag,
infoTableSrtBitmap,
infoTableClosureType,
infoTablePtrs,
infoTableNonPtrs,
funInfoTable,
funInfoArity,
-- info table sizes and offsets
stdInfoTableSizeW,
fixedInfoTableSizeW,
profInfoTableSizeW,
maxStdInfoTableSizeW,
maxRetInfoTableSizeW,
stdInfoTableSizeB,
stdSrtBitmapOffset,
stdClosureTypeOffset,
stdPtrsOffset, stdNonPtrsOffset,
) where
#include "HsVersions.h"
import Cmm
import CmmUtils
import CLabel
import SMRep
import Bitmap
import Stream (Stream)
import qualified Stream
import Hoopl
import Maybes
import DynFlags
import Panic
import UniqSupply
import MonadUtils
import Util
import Outputable
import Data.Bits
import Data.Word
-- When we split at proc points, we need an empty info table.
mkEmptyContInfoTable :: CLabel -> CmmInfoTable
mkEmptyContInfoTable info_lbl
= CmmInfoTable { cit_lbl = info_lbl
, cit_rep = mkStackRep []
, cit_prof = NoProfilingInfo
, cit_srt = NoC_SRT }
cmmToRawCmm :: DynFlags -> Stream IO CmmGroup ()
-> IO (Stream IO RawCmmGroup ())
cmmToRawCmm dflags cmms
= do { uniqs <- mkSplitUniqSupply 'i'
; let do_one uniqs cmm = do
case initUs uniqs $ concatMapM (mkInfoTable dflags) cmm of
(b,uniqs') -> return (uniqs',b)
-- NB. strictness fixes a space leak. DO NOT REMOVE.
; return (Stream.mapAccumL do_one uniqs cmms >> return ())
}
-- Make a concrete info table, represented as a list of CmmStatic
-- (it can't be simply a list of Word, because the SRT field is
-- represented by a label+offset expression).
--
-- With tablesNextToCode, the layout is
-- <reversed variable part>
-- <normal forward StgInfoTable, but without
-- an entry point at the front>
-- <code>
--
-- Without tablesNextToCode, the layout of an info table is
-- <entry label>
-- <normal forward rest of StgInfoTable>
-- <forward variable part>
--
-- See includes/rts/storage/InfoTables.h
--
-- For return-points these are as follows
--
-- Tables next to code:
--
-- <srt slot>
-- <standard info table>
-- ret-addr --> <entry code (if any)>
--
-- Not tables-next-to-code:
--
-- ret-addr --> <ptr to entry code>
-- <standard info table>
-- <srt slot>
--
-- * The SRT slot is only there if there is SRT info to record
mkInfoTable :: DynFlags -> CmmDecl -> UniqSM [RawCmmDecl]
mkInfoTable _ (CmmData sec dat)
= return [CmmData sec dat]
mkInfoTable dflags proc@(CmmProc infos entry_lbl live blocks)
--
-- in the non-tables-next-to-code case, procs can have at most a
-- single info table associated with the entry label of the proc.
--
| not (tablesNextToCode dflags)
= case topInfoTable proc of -- must be at most one
-- no info table
Nothing ->
return [CmmProc mapEmpty entry_lbl live blocks]
Just info@CmmInfoTable { cit_lbl = info_lbl } -> do
(top_decls, (std_info, extra_bits)) <-
mkInfoTableContents dflags info Nothing
let
rel_std_info = map (makeRelativeRefTo dflags info_lbl) std_info
rel_extra_bits = map (makeRelativeRefTo dflags info_lbl) extra_bits
--
-- Separately emit info table (with the function entry
-- point as first entry) and the entry code
--
return (top_decls ++
[CmmProc mapEmpty entry_lbl live blocks,
mkDataLits Data info_lbl
(CmmLabel entry_lbl : rel_std_info ++ rel_extra_bits)])
--
-- With tables-next-to-code, we can have many info tables,
-- associated with some of the BlockIds of the proc. For each info
-- table we need to turn it into CmmStatics, and collect any new
-- CmmDecls that arise from doing so.
--
| otherwise
= do
(top_declss, raw_infos) <-
unzip `fmap` mapM do_one_info (mapToList (info_tbls infos))
return (concat top_declss ++
[CmmProc (mapFromList raw_infos) entry_lbl live blocks])
where
do_one_info (lbl,itbl) = do
(top_decls, (std_info, extra_bits)) <-
mkInfoTableContents dflags itbl Nothing
let
info_lbl = cit_lbl itbl
rel_std_info = map (makeRelativeRefTo dflags info_lbl) std_info
rel_extra_bits = map (makeRelativeRefTo dflags info_lbl) extra_bits
--
return (top_decls, (lbl, Statics info_lbl $ map CmmStaticLit $
reverse rel_extra_bits ++ rel_std_info))
-----------------------------------------------------
type InfoTableContents = ( [CmmLit] -- The standard part
, [CmmLit] ) -- The "extra bits"
-- These Lits have *not* had mkRelativeTo applied to them
mkInfoTableContents :: DynFlags
-> CmmInfoTable
-> Maybe Int -- Override default RTS type tag?
-> UniqSM ([RawCmmDecl], -- Auxiliary top decls
InfoTableContents) -- Info tbl + extra bits
mkInfoTableContents dflags
info@(CmmInfoTable { cit_lbl = info_lbl
, cit_rep = smrep
, cit_prof = prof
, cit_srt = srt })
mb_rts_tag
| RTSRep rts_tag rep <- smrep
= mkInfoTableContents dflags info{cit_rep = rep} (Just rts_tag)
-- Completely override the rts_tag that mkInfoTableContents would
-- otherwise compute, with the rts_tag stored in the RTSRep
-- (which in turn came from a handwritten .cmm file)
| StackRep frame <- smrep
= do { (prof_lits, prof_data) <- mkProfLits dflags prof
; let (srt_label, srt_bitmap) = mkSRTLit dflags srt
; (liveness_lit, liveness_data) <- mkLivenessBits dflags frame
; let
std_info = mkStdInfoTable dflags prof_lits rts_tag srt_bitmap liveness_lit
rts_tag | Just tag <- mb_rts_tag = tag
| null liveness_data = rET_SMALL -- Fits in extra_bits
| otherwise = rET_BIG -- Does not; extra_bits is
-- a label
; return (prof_data ++ liveness_data, (std_info, srt_label)) }
| HeapRep _ ptrs nonptrs closure_type <- smrep
= do { let layout = packIntsCLit dflags ptrs nonptrs
; (prof_lits, prof_data) <- mkProfLits dflags prof
; let (srt_label, srt_bitmap) = mkSRTLit dflags srt
; (mb_srt_field, mb_layout, extra_bits, ct_data)
<- mk_pieces closure_type srt_label
; let std_info = mkStdInfoTable dflags prof_lits
(mb_rts_tag `orElse` rtsClosureType smrep)
(mb_srt_field `orElse` srt_bitmap)
(mb_layout `orElse` layout)
; return (prof_data ++ ct_data, (std_info, extra_bits)) }
where
mk_pieces :: ClosureTypeInfo -> [CmmLit]
-> UniqSM ( Maybe StgHalfWord -- Override the SRT field with this
, Maybe CmmLit -- Override the layout field with this
, [CmmLit] -- "Extra bits" for info table
, [RawCmmDecl]) -- Auxiliary data decls
mk_pieces (Constr con_tag con_descr) _no_srt -- A data constructor
= do { (descr_lit, decl) <- newStringLit con_descr
; return ( Just (toStgHalfWord dflags (fromIntegral con_tag))
, Nothing, [descr_lit], [decl]) }
mk_pieces Thunk srt_label
= return (Nothing, Nothing, srt_label, [])
mk_pieces (ThunkSelector offset) _no_srt
= return (Just (toStgHalfWord dflags 0), Just (mkWordCLit dflags (fromIntegral offset)), [], [])
-- Layout known (one free var); we use the layout field for offset
mk_pieces (Fun arity (ArgSpec fun_type)) srt_label
= do { let extra_bits = packIntsCLit dflags fun_type arity : srt_label
; return (Nothing, Nothing, extra_bits, []) }
mk_pieces (Fun arity (ArgGen arg_bits)) srt_label
= do { (liveness_lit, liveness_data) <- mkLivenessBits dflags arg_bits
; let fun_type | null liveness_data = aRG_GEN
| otherwise = aRG_GEN_BIG
extra_bits = [ packIntsCLit dflags fun_type arity
, srt_lit, liveness_lit, slow_entry ]
; return (Nothing, Nothing, extra_bits, liveness_data) }
where
slow_entry = CmmLabel (toSlowEntryLbl info_lbl)
srt_lit = case srt_label of
[] -> mkIntCLit dflags 0
(lit:_rest) -> ASSERT( null _rest ) lit
mk_pieces other _ = pprPanic "mk_pieces" (ppr other)
mkInfoTableContents _ _ _ = panic "mkInfoTableContents" -- NonInfoTable dealt with earlier
packIntsCLit :: DynFlags -> Int -> Int -> CmmLit
packIntsCLit dflags a b = packHalfWordsCLit dflags
(toStgHalfWord dflags (fromIntegral a))
(toStgHalfWord dflags (fromIntegral b))
mkSRTLit :: DynFlags
-> C_SRT
-> ([CmmLit], -- srt_label, if any
StgHalfWord) -- srt_bitmap
mkSRTLit dflags NoC_SRT = ([], toStgHalfWord dflags 0)
mkSRTLit dflags (C_SRT lbl off bitmap) = ([cmmLabelOffW dflags lbl off], bitmap)
-------------------------------------------------------------------------
--
-- Lay out the info table and handle relative offsets
--
-------------------------------------------------------------------------
-- This function takes
-- * the standard info table portion (StgInfoTable)
-- * the "extra bits" (StgFunInfoExtraRev etc.)
-- * the entry label
-- * the code
-- and lays them out in memory, producing a list of RawCmmDecl
-------------------------------------------------------------------------
--
-- Position independent code
--
-------------------------------------------------------------------------
-- In order to support position independent code, we mustn't put absolute
-- references into read-only space. Info tables in the tablesNextToCode
-- case must be in .text, which is read-only, so we doctor the CmmLits
-- to use relative offsets instead.
-- Note that this is done even when the -fPIC flag is not specified,
-- as we want to keep binary compatibility between PIC and non-PIC.
makeRelativeRefTo :: DynFlags -> CLabel -> CmmLit -> CmmLit
makeRelativeRefTo dflags info_lbl (CmmLabel lbl)
| tablesNextToCode dflags
= CmmLabelDiffOff lbl info_lbl 0
makeRelativeRefTo dflags info_lbl (CmmLabelOff lbl off)
| tablesNextToCode dflags
= CmmLabelDiffOff lbl info_lbl off
makeRelativeRefTo _ _ lit = lit
-------------------------------------------------------------------------
--
-- Build a liveness mask for the stack layout
--
-------------------------------------------------------------------------
-- There are four kinds of things on the stack:
--
-- - pointer variables (bound in the environment)
-- - non-pointer variables (bound in the environment)
-- - free slots (recorded in the stack free list)
-- - non-pointer data slots (recorded in the stack free list)
--
-- The first two are represented with a 'Just' of a 'LocalReg'.
-- The last two with one or more 'Nothing' constructors.
-- Each 'Nothing' represents one used word.
--
-- The head of the stack layout is the top of the stack and
-- the least-significant bit.
mkLivenessBits :: DynFlags -> Liveness -> UniqSM (CmmLit, [RawCmmDecl])
-- ^ Returns:
-- 1. The bitmap (literal value or label)
-- 2. Large bitmap CmmData if needed
mkLivenessBits dflags liveness
| n_bits > mAX_SMALL_BITMAP_SIZE dflags -- does not fit in one word
= do { uniq <- getUniqueUs
; let bitmap_lbl = mkBitmapLabel uniq
; return (CmmLabel bitmap_lbl,
[mkRODataLits bitmap_lbl lits]) }
| otherwise -- Fits in one word
= return (mkStgWordCLit dflags bitmap_word, [])
where
n_bits = length liveness
bitmap :: Bitmap
bitmap = mkBitmap dflags liveness
small_bitmap = case bitmap of
[] -> toStgWord dflags 0
[b] -> b
_ -> panic "mkLiveness"
bitmap_word = toStgWord dflags (fromIntegral n_bits)
.|. (small_bitmap `shiftL` bITMAP_BITS_SHIFT dflags)
lits = mkWordCLit dflags (fromIntegral n_bits)
: map (mkStgWordCLit dflags) bitmap
-- The first word is the size. The structure must match
-- StgLargeBitmap in includes/rts/storage/InfoTable.h
-------------------------------------------------------------------------
--
-- Generating a standard info table
--
-------------------------------------------------------------------------
-- The standard bits of an info table. This part of the info table
-- corresponds to the StgInfoTable type defined in
-- includes/rts/storage/InfoTables.h.
--
-- Its shape varies with ticky/profiling/tables next to code etc
-- so we can't use constant offsets from Constants
mkStdInfoTable
:: DynFlags
-> (CmmLit,CmmLit) -- Closure type descr and closure descr (profiling)
-> Int -- Closure RTS tag
-> StgHalfWord -- SRT length
-> CmmLit -- layout field
-> [CmmLit]
mkStdInfoTable dflags (type_descr, closure_descr) cl_type srt_len layout_lit
= -- Parallel revertible-black hole field
prof_info
-- Ticky info (none at present)
-- Debug info (none at present)
++ [layout_lit, type_lit]
where
prof_info
| gopt Opt_SccProfilingOn dflags = [type_descr, closure_descr]
| otherwise = []
type_lit = packHalfWordsCLit dflags (toStgHalfWord dflags (fromIntegral cl_type)) srt_len
-------------------------------------------------------------------------
--
-- Making string literals
--
-------------------------------------------------------------------------
mkProfLits :: DynFlags -> ProfilingInfo -> UniqSM ((CmmLit,CmmLit), [RawCmmDecl])
mkProfLits dflags NoProfilingInfo = return ((zeroCLit dflags, zeroCLit dflags), [])
mkProfLits _ (ProfilingInfo td cd)
= do { (td_lit, td_decl) <- newStringLit td
; (cd_lit, cd_decl) <- newStringLit cd
; return ((td_lit,cd_lit), [td_decl,cd_decl]) }
newStringLit :: [Word8] -> UniqSM (CmmLit, GenCmmDecl CmmStatics info stmt)
newStringLit bytes
= do { uniq <- getUniqueUs
; return (mkByteStringCLit uniq bytes) }
-- Misc utils
-- | Value of the srt field of an info table when using an StgLargeSRT
srtEscape :: DynFlags -> StgHalfWord
srtEscape dflags = toStgHalfWord dflags (-1)
-------------------------------------------------------------------------
--
-- Accessing fields of an info table
--
-------------------------------------------------------------------------
closureInfoPtr :: DynFlags -> CmmExpr -> CmmExpr
-- Takes a closure pointer and returns the info table pointer
closureInfoPtr dflags e = CmmLoad e (bWord dflags)
entryCode :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info pointer (the first word of a closure)
-- and returns its entry code
entryCode dflags e
| tablesNextToCode dflags = e
| otherwise = CmmLoad e (bWord dflags)
getConstrTag :: DynFlags -> CmmExpr -> CmmExpr
-- Takes a closure pointer, and return the *zero-indexed*
-- constructor tag obtained from the info table
-- This lives in the SRT field of the info table
-- (constructors don't need SRTs).
getConstrTag dflags closure_ptr
= CmmMachOp (MO_UU_Conv (halfWordWidth dflags) (wordWidth dflags)) [infoTableConstrTag dflags info_table]
where
info_table = infoTable dflags (closureInfoPtr dflags closure_ptr)
cmmGetClosureType :: DynFlags -> CmmExpr -> CmmExpr
-- Takes a closure pointer, and return the closure type
-- obtained from the info table
cmmGetClosureType dflags closure_ptr
= CmmMachOp (MO_UU_Conv (halfWordWidth dflags) (wordWidth dflags)) [infoTableClosureType dflags info_table]
where
info_table = infoTable dflags (closureInfoPtr dflags closure_ptr)
infoTable :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info pointer (the first word of a closure)
-- and returns a pointer to the first word of the standard-form
-- info table, excluding the entry-code word (if present)
infoTable dflags info_ptr
| tablesNextToCode dflags = cmmOffsetB dflags info_ptr (- stdInfoTableSizeB dflags)
| otherwise = cmmOffsetW dflags info_ptr 1 -- Past the entry code pointer
infoTableConstrTag :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info table pointer (from infoTable) and returns the constr tag
-- field of the info table (same as the srt_bitmap field)
infoTableConstrTag = infoTableSrtBitmap
infoTableSrtBitmap :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info table pointer (from infoTable) and returns the srt_bitmap
-- field of the info table
infoTableSrtBitmap dflags info_tbl
= CmmLoad (cmmOffsetB dflags info_tbl (stdSrtBitmapOffset dflags)) (bHalfWord dflags)
infoTableClosureType :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info table pointer (from infoTable) and returns the closure type
-- field of the info table.
infoTableClosureType dflags info_tbl
= CmmLoad (cmmOffsetB dflags info_tbl (stdClosureTypeOffset dflags)) (bHalfWord dflags)
infoTablePtrs :: DynFlags -> CmmExpr -> CmmExpr
infoTablePtrs dflags info_tbl
= CmmLoad (cmmOffsetB dflags info_tbl (stdPtrsOffset dflags)) (bHalfWord dflags)
infoTableNonPtrs :: DynFlags -> CmmExpr -> CmmExpr
infoTableNonPtrs dflags info_tbl
= CmmLoad (cmmOffsetB dflags info_tbl (stdNonPtrsOffset dflags)) (bHalfWord dflags)
funInfoTable :: DynFlags -> CmmExpr -> CmmExpr
-- Takes the info pointer of a function,
-- and returns a pointer to the first word of the StgFunInfoExtra struct
-- in the info table.
funInfoTable dflags info_ptr
| tablesNextToCode dflags
= cmmOffsetB dflags info_ptr (- stdInfoTableSizeB dflags - sIZEOF_StgFunInfoExtraRev dflags)
| otherwise
= cmmOffsetW dflags info_ptr (1 + stdInfoTableSizeW dflags)
-- Past the entry code pointer
-- Takes the info pointer of a function, returns the function's arity
funInfoArity :: DynFlags -> CmmExpr -> CmmExpr
funInfoArity dflags iptr
= cmmToWord dflags (cmmLoadIndex dflags rep fun_info (offset `div` rep_bytes))
where
fun_info = funInfoTable dflags iptr
rep = cmmBits (widthFromBytes rep_bytes)
(rep_bytes, offset)
| tablesNextToCode dflags = ( pc_REP_StgFunInfoExtraRev_arity pc
, oFFSET_StgFunInfoExtraRev_arity dflags )
| otherwise = ( pc_REP_StgFunInfoExtraFwd_arity pc
, oFFSET_StgFunInfoExtraFwd_arity dflags )
pc = sPlatformConstants (settings dflags)
-----------------------------------------------------------------------------
--
-- Info table sizes & offsets
--
-----------------------------------------------------------------------------
stdInfoTableSizeW :: DynFlags -> WordOff
-- The size of a standard info table varies with profiling/ticky etc,
-- so we can't get it from Constants
-- It must vary in sync with mkStdInfoTable
stdInfoTableSizeW dflags
= fixedInfoTableSizeW
+ if gopt Opt_SccProfilingOn dflags
then profInfoTableSizeW
else 0
fixedInfoTableSizeW :: WordOff
fixedInfoTableSizeW = 2 -- layout, type
profInfoTableSizeW :: WordOff
profInfoTableSizeW = 2
maxStdInfoTableSizeW :: WordOff
maxStdInfoTableSizeW =
1 {- entry, when !tablesNextToCode -}
+ fixedInfoTableSizeW
+ profInfoTableSizeW
maxRetInfoTableSizeW :: WordOff
maxRetInfoTableSizeW =
maxStdInfoTableSizeW
+ 1 {- srt label -}
stdInfoTableSizeB :: DynFlags -> ByteOff
stdInfoTableSizeB dflags = stdInfoTableSizeW dflags * wORD_SIZE dflags
stdSrtBitmapOffset :: DynFlags -> ByteOff
-- Byte offset of the SRT bitmap half-word which is
-- in the *higher-addressed* part of the type_lit
stdSrtBitmapOffset dflags = stdInfoTableSizeB dflags - hALF_WORD_SIZE dflags
stdClosureTypeOffset :: DynFlags -> ByteOff
-- Byte offset of the closure type half-word
stdClosureTypeOffset dflags = stdInfoTableSizeB dflags - wORD_SIZE dflags
stdPtrsOffset, stdNonPtrsOffset :: DynFlags -> ByteOff
stdPtrsOffset dflags = stdInfoTableSizeB dflags - 2 * wORD_SIZE dflags
stdNonPtrsOffset dflags = stdInfoTableSizeB dflags - 2 * wORD_SIZE dflags + hALF_WORD_SIZE dflags
| lukexi/ghc | compiler/cmm/CmmInfo.hs | bsd-3-clause | 20,843 | 2 | 19 | 5,149 | 3,772 | 2,040 | 1,732 | 298 | 7 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Mode.Common
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- Common functions used by modes.
module Yi.Mode.Common (TokenBasedMode, fundamentalMode,
anyExtension, extensionOrContentsMatch,
linearSyntaxMode, hookModes,
applyModeHooks, lookupMode, styleMode,
extensionMatches, shebangParser
) where
import Lens.Micro.Platform ((%~), (&), (.~), (^.))
import Control.Applicative ((<|>))
import Control.Monad (void)
import qualified Data.Attoparsec.Text as P
import Data.Maybe (fromMaybe)
import System.FilePath (takeExtension)
import Yi.Buffer
import qualified Yi.IncrementalParse as IncrParser (scanner)
import Yi.Keymap (YiM)
import Yi.Lexer.Alex
import Yi.MiniBuffer (anyModeByNameM)
import qualified Yi.Rope as R (YiString, toText)
import Yi.Search (makeSimpleSearch)
import Yi.Style (StyleName)
import Yi.Syntax (ExtHL (ExtHL))
import Yi.Syntax.Driver (mkHighlighter)
import Yi.Syntax.OnlineTree (Tree, manyToks)
import Yi.Syntax.Tree (tokenBasedStrokes)
type TokenBasedMode tok = Mode (Tree (Tok tok))
-- TODO: Move this mode to it's own module
-- | The only built in mode of yi
fundamentalMode :: Mode syntax
fundamentalMode = emptyMode
{ modeName = "fundamental"
, modeApplies = modeAlwaysApplies
, modeIndent = const autoIndentB
, modePrettify = const fillParagraph
, modeGotoDeclaration = do
currentPoint <- pointB
currentWord <- readCurrentWordB
currentWordBeginningPoint <- regionStart <$> regionOfB unitWord
_ <- gotoLn 0
word <- return $ makeSimpleSearch currentWord
searchResults <- regexB Forward word
case searchResults of
(declarationRegion : _) -> do
searchPoint <- return $ regionStart declarationRegion
if currentWordBeginningPoint /= searchPoint
then moveTo searchPoint
else moveTo currentPoint
[] -> moveTo currentPoint
}
-- | Creates a 'TokenBasedMode' from a 'Lexer' and a function that
-- turns tokens into 'StyleName'.
linearSyntaxMode' :: Show (l s)
=> Lexer l s (Tok t) i
-> (t -> StyleName)
-> TokenBasedMode t
linearSyntaxMode' scanToken tts = fundamentalMode
& modeHLA .~ ExtHL (mkHighlighter $ IncrParser.scanner manyToks . lexer)
& modeGetStrokesA .~ tokenBasedStrokes tokenToStroke
where
tokenToStroke = fmap tts . tokToSpan
lexer = lexScanner scanToken
-- | Specialised version of 'linearSyntaxMode'' for the common case,
-- wrapping up into a 'Lexer' with 'commonLexer'.
linearSyntaxMode :: Show s => s -- ^ Starting state
-> TokenLexer AlexState s (Tok t) AlexInput
-> (t -> StyleName)
-> TokenBasedMode t
linearSyntaxMode initSt scanToken =
linearSyntaxMode' (commonLexer scanToken initSt)
styleMode :: Show (l s) => StyleLexer l s t i
-> TokenBasedMode t
styleMode l = linearSyntaxMode' (l ^. styleLexer) (l ^. tokenToStyle)
-- | Determines if the file's extension is one of the extensions in the list.
extensionMatches :: [String]
-> FilePath
-> Bool
extensionMatches extensions fileName = extension `elem` extensions'
where extension = takeExtension fileName
extensions' = ['.' : ext | ext <- extensions]
-- | When applied to an extensions list, creates a 'Mode.modeApplies' function.
anyExtension :: [String] -- ^ List of extensions
-> FilePath -- ^ Path to compare against
-> a -- ^ File contents. Currently unused but see
-- 'extensionOrContentsMatch'.
-> Bool
anyExtension extensions fileName _contents
= extensionMatches extensions fileName
-- | When applied to an extensions list and regular expression pattern, creates
-- a 'Mode.modeApplies' function.
extensionOrContentsMatch :: [String] -> P.Parser () -> FilePath -> R.YiString -> Bool
extensionOrContentsMatch extensions parser fileName contents
= extensionMatches extensions fileName || m
where
m = case P.parseOnly parser $ R.toText contents of
Left _ -> False
Right _ -> True
{- | Generate a parser for shebang patterns
the generated parser will match only if the shebang is at the start of a line
==== __Examples__
> shebangParser "runhaskell"
generates a parser that matches "#!\/usr\/bin\/env runhaskell\\n"
(but also "djsjfaj\\n\\n\\n\\r\\n#! \/usr\/bin\/env runhaskell \\ndkasfkda\\n\\r\\nkasfaj")
__Note:__ You can get @("runhaskell" :: Parser String)@ by using the OverloadedStrings extension
> shebangParser "python"
generates a parser that matches "#!\/usr\/bin\/env python\\n"
__Note:__ it doesn't match "#!\/usr\/bin\/env python2\\n" (that's why the newline is required)
It is also possible to use more complex parsers:
> shebangParser ("python" *> ("2" <|> "3" <|> ""))
generates a parser that matches any of:
* "#!\/usr\/bin\/env python\\n"
* "#!\/usr\/bin\/env python2\\n"
* "#!\/usr\/bin\/env python3\\n"
-}
shebangParser :: P.Parser a -> P.Parser ()
shebangParser p = void p'
where
p' = "#!" *> P.skipWhile (== ' ') *> "/usr/bin/env " *> P.skipWhile (== ' ') *> p *> P.skipWhile (== ' ') *> P.endOfLine
<|> P.skip (const True) *> P.skipWhile (not . P.isEndOfLine) *> P.skipWhile P.isEndOfLine *> p'
-- | Adds a hook to all matching hooks in a list
hookModes :: (AnyMode -> Bool) -> BufferM () -> [AnyMode] -> [AnyMode]
hookModes p h = map $ \am@(AnyMode m) ->
if p am then AnyMode (m & modeOnLoadA %~ (>> h)) else am
-- | Apply a list of mode hooks to a list of AnyModes
applyModeHooks :: [(AnyMode -> Bool, BufferM ())] -> [AnyMode] -> [AnyMode]
applyModeHooks hs ms = flip map ms $ \am -> case filter (($ am) . fst) hs of
[] -> am
ls -> onMode (modeOnLoadA %~ \x -> foldr ((>>) . snd) x ls) am
-- | Check whether a mode of the same name is already in modeTable and
-- returns the original mode, if it isn't the case.
lookupMode :: AnyMode -> YiM AnyMode
lookupMode am@(AnyMode m) = fromMaybe am <$> anyModeByNameM (modeName m)
| noughtmare/yi | yi-core/src/Yi/Mode/Common.hs | gpl-2.0 | 6,577 | 0 | 18 | 1,694 | 1,339 | 730 | 609 | 95 | 3 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Mga WebSocket | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/websocket/src/main/javahelp/org/zaproxy/zap/extension/websocket/resources/help_fil_PH/helpset_fil_PH.hs | apache-2.0 | 987 | 87 | 29 | 161 | 402 | 214 | 188 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Luxi (testLuxi) where
import Test.HUnit
import Test.QuickCheck
import Test.QuickCheck.Monadic (monadicIO, run, stop)
import Data.List
import Control.Applicative
import Control.Concurrent (forkIO)
import Control.Exception (bracket)
import qualified Text.JSON as J
import Test.Ganeti.OpCodes ()
import Test.Ganeti.Query.Language (genFilter)
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Test.Ganeti.Types (genReasonTrail)
import Ganeti.BasicTypes
import qualified Ganeti.Luxi as Luxi
import qualified Ganeti.UDSServer as US
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Luxi tests
$(genArbitrary ''Luxi.LuxiReq)
instance Arbitrary Luxi.LuxiOp where
arbitrary = do
lreq <- arbitrary
case lreq of
Luxi.ReqQuery -> Luxi.Query <$> arbitrary <*> genFields <*> genFilter
Luxi.ReqQueryFields -> Luxi.QueryFields <$> arbitrary <*> genFields
Luxi.ReqQueryNodes -> Luxi.QueryNodes <$> listOf genFQDN <*>
genFields <*> arbitrary
Luxi.ReqQueryGroups -> Luxi.QueryGroups <$> arbitrary <*>
arbitrary <*> arbitrary
Luxi.ReqQueryNetworks -> Luxi.QueryNetworks <$> arbitrary <*>
arbitrary <*> arbitrary
Luxi.ReqQueryInstances -> Luxi.QueryInstances <$> listOf genFQDN <*>
genFields <*> arbitrary
Luxi.ReqQueryFilters -> Luxi.QueryFilters <$> arbitrary <*> genFields
Luxi.ReqReplaceFilter -> Luxi.ReplaceFilter <$> genMaybe genUUID <*>
arbitrary <*> arbitrary <*> arbitrary <*>
genReasonTrail
Luxi.ReqDeleteFilter -> Luxi.DeleteFilter <$> genUUID
Luxi.ReqQueryJobs -> Luxi.QueryJobs <$> arbitrary <*> genFields
Luxi.ReqQueryExports -> Luxi.QueryExports <$>
listOf genFQDN <*> arbitrary
Luxi.ReqQueryConfigValues -> Luxi.QueryConfigValues <$> genFields
Luxi.ReqQueryClusterInfo -> pure Luxi.QueryClusterInfo
Luxi.ReqQueryTags -> do
kind <- arbitrary
Luxi.QueryTags kind <$> genLuxiTagName kind
Luxi.ReqSubmitJob -> Luxi.SubmitJob <$> resize maxOpCodes arbitrary
Luxi.ReqSubmitJobToDrainedQueue -> Luxi.SubmitJobToDrainedQueue <$>
resize maxOpCodes arbitrary
Luxi.ReqSubmitManyJobs -> Luxi.SubmitManyJobs <$>
resize maxOpCodes arbitrary
Luxi.ReqWaitForJobChange -> Luxi.WaitForJobChange <$> arbitrary <*>
genFields <*> pure J.JSNull <*>
pure J.JSNull <*> arbitrary
Luxi.ReqPickupJob -> Luxi.PickupJob <$> arbitrary
Luxi.ReqArchiveJob -> Luxi.ArchiveJob <$> arbitrary
Luxi.ReqAutoArchiveJobs -> Luxi.AutoArchiveJobs <$> arbitrary <*>
arbitrary
Luxi.ReqCancelJob -> Luxi.CancelJob <$> arbitrary <*> arbitrary
Luxi.ReqChangeJobPriority -> Luxi.ChangeJobPriority <$> arbitrary <*>
arbitrary
Luxi.ReqSetDrainFlag -> Luxi.SetDrainFlag <$> arbitrary
Luxi.ReqSetWatcherPause -> Luxi.SetWatcherPause <$> arbitrary
-- | Simple check that encoding/decoding of LuxiOp works.
prop_CallEncoding :: Luxi.LuxiOp -> Property
prop_CallEncoding op =
(US.parseCall (US.buildCall (Luxi.strOfOp op) (Luxi.opToArgs op))
>>= uncurry Luxi.decodeLuxiCall) ==? Ok op
-- | Server ping-pong helper.
luxiServerPong :: Luxi.Client -> IO ()
luxiServerPong c = do
msg <- Luxi.recvMsgExt c
case msg of
Luxi.RecvOk m -> Luxi.sendMsg c m >> luxiServerPong c
_ -> return ()
-- | Client ping-pong helper.
luxiClientPong :: Luxi.Client -> [String] -> IO [String]
luxiClientPong c =
mapM (\m -> Luxi.sendMsg c m >> Luxi.recvMsg c)
-- | Monadic check that, given a server socket, we can connect via a
-- client to it, and that we can send a list of arbitrary messages and
-- get back what we sent.
prop_ClientServer :: [[DNSChar]] -> Property
prop_ClientServer dnschars = monadicIO $ do
let msgs = map (map dnsGetChar) dnschars
fpath <- run $ getTempFileName "luxitest"
-- we need to create the server first, otherwise (if we do it in the
-- forked thread) the client could try to connect to it before it's
-- ready
server <- run $ Luxi.getLuxiServer False fpath
-- fork the server responder
_ <- run . forkIO $
bracket
(Luxi.acceptClient server)
(\c -> Luxi.closeClient c >> Luxi.closeServer server)
luxiServerPong
replies <- run $
bracket
(Luxi.getLuxiClient fpath)
Luxi.closeClient
(`luxiClientPong` msgs)
stop $ replies ==? msgs
-- | Check that Python and Haskell define the same Luxi requests list.
case_AllDefined :: Assertion
case_AllDefined = do
py_stdout <- runPython "from ganeti import luxi\n\
\print '\\n'.join(luxi.REQ_ALL)" "" >>=
checkPythonResult
let py_ops = sort $ lines py_stdout
hs_ops = Luxi.allLuxiCalls
extra_py = py_ops \\ hs_ops
extra_hs = hs_ops \\ py_ops
assertBool ("Luxi calls missing from Haskell code:\n" ++
unlines extra_py) (null extra_py)
assertBool ("Extra Luxi calls in the Haskell code:\n" ++
unlines extra_hs) (null extra_hs)
testSuite "Luxi"
[ 'prop_CallEncoding
, 'prop_ClientServer
, 'case_AllDefined
]
| apyrgio/ganeti | test/hs/Test/Ganeti/Luxi.hs | bsd-2-clause | 6,883 | 0 | 16 | 1,628 | 1,222 | 628 | 594 | 111 | 2 |
{-# LANGUAGE CPP, TypeFamilies #-}
-- -----------------------------------------------------------------------------
-- | This is the top-level module in the LLVM code generator.
--
module LlvmCodeGen ( llvmCodeGen, llvmFixupAsm ) where
#include "HsVersions.h"
import Llvm
import LlvmCodeGen.Base
import LlvmCodeGen.CodeGen
import LlvmCodeGen.Data
import LlvmCodeGen.Ppr
import LlvmCodeGen.Regs
import LlvmMangler
import CgUtils ( fixStgRegisters )
import Cmm
import Hoopl
import PprCmm
import BufWrite
import DynFlags
import ErrUtils
import FastString
import Outputable
import UniqSupply
import SysTools ( figureLlvmVersion )
import qualified Stream
import Control.Monad ( when )
import Data.IORef ( writeIORef )
import Data.Maybe ( fromMaybe, catMaybes )
import System.IO
-- -----------------------------------------------------------------------------
-- | Top-level of the LLVM Code generator
--
llvmCodeGen :: DynFlags -> Handle -> UniqSupply
-> Stream.Stream IO RawCmmGroup ()
-> IO ()
llvmCodeGen dflags h us cmm_stream
= do bufh <- newBufHandle h
-- Pass header
showPass dflags "LLVM CodeGen"
-- get llvm version, cache for later use
ver <- (fromMaybe defaultLlvmVersion) `fmap` figureLlvmVersion dflags
writeIORef (llvmVersion dflags) ver
-- warn if unsupported
debugTraceMsg dflags 2
(text "Using LLVM version:" <+> text (show ver))
let doWarn = wopt Opt_WarnUnsupportedLlvmVersion dflags
when (ver < minSupportLlvmVersion && doWarn) $
errorMsg dflags (text "You are using an old version of LLVM that"
<> text " isn't supported anymore!"
$+$ text "We will try though...")
when (ver > maxSupportLlvmVersion && doWarn) $
putMsg dflags (text "You are using a new version of LLVM that"
<> text " hasn't been tested yet!"
$+$ text "We will try though...")
-- run code generation
runLlvm dflags ver bufh us $
llvmCodeGen' (liftStream cmm_stream)
bFlush bufh
llvmCodeGen' :: Stream.Stream LlvmM RawCmmGroup () -> LlvmM ()
llvmCodeGen' cmm_stream
= do -- Preamble
renderLlvm pprLlvmHeader
ghcInternalFunctions
cmmMetaLlvmPrelude
-- Procedures
let llvmStream = Stream.mapM llvmGroupLlvmGens cmm_stream
_ <- Stream.collect llvmStream
-- Declare aliases for forward references
renderLlvm . pprLlvmData =<< generateExternDecls
-- Postamble
cmmUsedLlvmGens
llvmGroupLlvmGens :: RawCmmGroup -> LlvmM ()
llvmGroupLlvmGens cmm = do
-- Insert functions into map, collect data
let split (CmmData s d' ) = return $ Just (s, d')
split (CmmProc h l live g) = do
-- Set function type
let l' = case mapLookup (g_entry g) h of
Nothing -> l
Just (Statics info_lbl _) -> info_lbl
lml <- strCLabel_llvm l'
funInsert lml =<< llvmFunTy live
return Nothing
cdata <- fmap catMaybes $ mapM split cmm
{-# SCC "llvm_datas_gen" #-}
cmmDataLlvmGens cdata
{-# SCC "llvm_procs_gen" #-}
mapM_ cmmLlvmGen cmm
-- -----------------------------------------------------------------------------
-- | Do LLVM code generation on all these Cmms data sections.
--
cmmDataLlvmGens :: [(Section,CmmStatics)] -> LlvmM ()
cmmDataLlvmGens statics
= do lmdatas <- mapM genLlvmData statics
let (gss, tss) = unzip lmdatas
let regGlobal (LMGlobal (LMGlobalVar l ty _ _ _ _) _)
= funInsert l ty
regGlobal _ = return ()
mapM_ regGlobal (concat gss)
gss' <- mapM aliasify $ concat gss
renderLlvm $ pprLlvmData (concat gss', concat tss)
-- | Complete LLVM code generation phase for a single top-level chunk of Cmm.
cmmLlvmGen ::RawCmmDecl -> LlvmM ()
cmmLlvmGen cmm@CmmProc{} = do
-- rewrite assignments to global regs
dflags <- getDynFlag id
let fixed_cmm = {-# SCC "llvm_fix_regs" #-}
fixStgRegisters dflags cmm
dumpIfSetLlvm Opt_D_dump_opt_cmm "Optimised Cmm" (pprCmmGroup [fixed_cmm])
-- generate llvm code from cmm
llvmBC <- withClearVars $ genLlvmProc fixed_cmm
-- allocate IDs for info table and code, so the mangler can later
-- make sure they end up next to each other.
itableSection <- freshSectionId
_codeSection <- freshSectionId
-- pretty print
(docs, ivars) <- fmap unzip $ mapM (pprLlvmCmmDecl itableSection) llvmBC
-- Output, note down used variables
renderLlvm (vcat docs)
mapM_ markUsedVar $ concat ivars
cmmLlvmGen _ = return ()
-- -----------------------------------------------------------------------------
-- | Generate meta data nodes
--
cmmMetaLlvmPrelude :: LlvmM ()
cmmMetaLlvmPrelude = do
metas <- flip mapM stgTBAA $ \(uniq, name, parent) -> do
-- Generate / lookup meta data IDs
tbaaId <- getMetaUniqueId
setUniqMeta uniq tbaaId
parentId <- maybe (return Nothing) getUniqMeta parent
-- Build definition
return $ MetaUnamed tbaaId $ MetaStruct
[ MetaStr name
, case parentId of
Just p -> MetaNode p
Nothing -> MetaVar $ LMLitVar $ LMNullLit i8Ptr
]
renderLlvm $ ppLlvmMetas metas
-- -----------------------------------------------------------------------------
-- | Marks variables as used where necessary
--
cmmUsedLlvmGens :: LlvmM ()
cmmUsedLlvmGens = do
-- LLVM would discard variables that are internal and not obviously
-- used if we didn't provide these hints. This will generate a
-- definition of the form
--
-- @llvm.used = appending global [42 x i8*] [i8* bitcast <var> to i8*, ...]
--
-- Which is the LLVM way of protecting them against getting removed.
ivars <- getUsedVars
let cast x = LMBitc (LMStaticPointer (pVarLift x)) i8Ptr
ty = (LMArray (length ivars) i8Ptr)
usedArray = LMStaticArray (map cast ivars) ty
sectName = Just $ fsLit "llvm.metadata"
lmUsedVar = LMGlobalVar (fsLit "llvm.used") ty Appending sectName Nothing Constant
lmUsed = LMGlobal lmUsedVar (Just usedArray)
if null ivars
then return ()
else renderLlvm $ pprLlvmData ([lmUsed], [])
| green-haskell/ghc | compiler/llvmGen/LlvmCodeGen.hs | bsd-3-clause | 6,448 | 0 | 20 | 1,704 | 1,380 | 685 | 695 | 119 | 3 |
--------------------------------------------------------------------
-- |
-- Module : XMonad.Util.EZConfig
-- Copyright : Devin Mullins <me@twifkak.com>
-- Brent Yorgey <byorgey@gmail.com> (key parsing)
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Devin Mullins <me@twifkak.com>
--
-- Useful helper functions for amending the defaultConfig, and for
-- parsing keybindings specified in a special (emacs-like) format.
--
-- (See also "XMonad.Util.CustomKeys" in xmonad-contrib.)
--
--------------------------------------------------------------------
module XMonad.Util.EZConfig (
-- * Usage
-- $usage
-- * Adding or removing keybindings
additionalKeys, additionalKeysP,
removeKeys, removeKeysP,
additionalMouseBindings, removeMouseBindings,
-- * Emacs-style keybinding specifications
mkKeymap, checkKeymap,
mkNamedKeymap,
parseKey -- used by XMonad.Util.Paste
) where
import XMonad
import XMonad.Actions.Submap
import XMonad.Util.NamedActions
import qualified Data.Map as M
import Data.List (foldl', sortBy, groupBy, nub)
import Data.Ord (comparing)
import Data.Maybe
import Control.Arrow (first, (&&&))
import Text.ParserCombinators.ReadP
-- $usage
-- To use this module, first import it into your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Util.EZConfig
--
-- Then, use one of the provided functions to modify your
-- configuration. You can use 'additionalKeys', 'removeKeys',
-- 'additionalMouseBindings', and 'removeMouseBindings' to easily add
-- and remove keybindings or mouse bindings. You can use 'mkKeymap'
-- to create a keymap using emacs-style keybinding specifications
-- like @\"M-x\"@ instead of @(modMask, xK_x)@, or 'additionalKeysP'
-- and 'removeKeysP' to easily add or remove emacs-style keybindings.
-- If you use emacs-style keybindings, the 'checkKeymap' function is
-- provided, suitable for adding to your 'startupHook', which can warn
-- you of any parse errors or duplicate bindings in your keymap.
--
-- For more information and usage examples, see the documentation
-- provided with each exported function, and check the xmonad config
-- archive (<http://haskell.org/haskellwiki/Xmonad/Config_archive>)
-- for some real examples of use.
-- |
-- Add or override keybindings from the existing set. Example use:
--
-- > main = xmonad $ defaultConfig { terminal = "urxvt" }
-- > `additionalKeys`
-- > [ ((mod1Mask, xK_m ), spawn "echo 'Hi, mom!' | dzen2 -p 4")
-- > , ((mod1Mask, xK_BackSpace), withFocused hide) -- N.B. this is an absurd thing to do
-- > ]
--
-- This overrides the previous definition of mod-m.
--
-- Note that, unlike in xmonad 0.4 and previous, you can't use modMask to refer
-- to the modMask you configured earlier. You must specify mod1Mask (or
-- whichever), or add your own @myModMask = mod1Mask@ line.
additionalKeys :: XConfig a -> [((ButtonMask, KeySym), X ())] -> XConfig a
additionalKeys conf keyList =
conf { keys = \cnf -> M.union (M.fromList keyList) (keys conf cnf) }
-- | Like 'additionalKeys', except using short @String@ key
-- descriptors like @\"M-m\"@ instead of @(modMask, xK_m)@, as
-- described in the documentation for 'mkKeymap'. For example:
--
-- > main = xmonad $ defaultConfig { terminal = "urxvt" }
-- > `additionalKeysP`
-- > [ ("M-m", spawn "echo 'Hi, mom!' | dzen2 -p 4")
-- > , ("M-<Backspace>", withFocused hide) -- N.B. this is an absurd thing to do
-- > ]
additionalKeysP :: XConfig l -> [(String, X ())] -> XConfig l
additionalKeysP conf keyList =
conf { keys = \cnf -> M.union (mkKeymap cnf keyList) (keys conf cnf) }
-- |
-- Remove standard keybindings you're not using. Example use:
--
-- > main = xmonad $ defaultConfig { terminal = "urxvt" }
-- > `removeKeys` [(mod1Mask .|. shiftMask, n) | n <- [xK_1 .. xK_9]]
removeKeys :: XConfig a -> [(ButtonMask, KeySym)] -> XConfig a
removeKeys conf keyList =
conf { keys = \cnf -> keys conf cnf `M.difference` M.fromList (zip keyList $ repeat ()) }
-- | Like 'removeKeys', except using short @String@ key descriptors
-- like @\"M-m\"@ instead of @(modMask, xK_m)@, as described in the
-- documentation for 'mkKeymap'. For example:
--
-- > main = xmonad $ defaultConfig { terminal = "urxvt" }
-- > `removeKeysP` ["M-S-" ++ [n] | n <- ['1'..'9']]
removeKeysP :: XConfig l -> [String] -> XConfig l
removeKeysP conf keyList =
conf { keys = \cnf -> keys conf cnf `M.difference` mkKeymap cnf (zip keyList $ repeat (return ())) }
-- | Like 'additionalKeys', but for mouse bindings.
additionalMouseBindings :: XConfig a -> [((ButtonMask, Button), Window -> X ())] -> XConfig a
additionalMouseBindings conf mouseBindingsList =
conf { mouseBindings = \cnf -> M.union (M.fromList mouseBindingsList) (mouseBindings conf cnf) }
-- | Like 'removeKeys', but for mouse bindings.
removeMouseBindings :: XConfig a -> [(ButtonMask, Button)] -> XConfig a
removeMouseBindings conf mouseBindingList =
conf { mouseBindings = \cnf -> mouseBindings conf cnf `M.difference`
M.fromList (zip mouseBindingList $ repeat ()) }
--------------------------------------------------------------
-- Keybinding parsing ---------------------------------------
--------------------------------------------------------------
-- | Given a config (used to determine the proper modifier key to use)
-- and a list of @(String, X ())@ pairs, create a key map by parsing
-- the key sequence descriptions contained in the Strings. The key
-- sequence descriptions are \"emacs-style\": @M-@, @C-@, @S-@, and
-- @M\#-@ denote mod, control, shift, and mod1-mod5 (where @\#@ is
-- replaced by the appropriate number) respectively. Note that if
-- you want to make a keybinding using \'alt\' even though you use a
-- different key (like the \'windows\' key) for \'mod\', you can use
-- something like @\"M1-x\"@ for alt+x (check the output of @xmodmap@
-- to see which mod key \'alt\' is bound to). Some special keys can
-- also be specified by enclosing their name in angle brackets.
--
-- For example, @\"M-C-x\"@ denotes mod+ctrl+x; @\"S-\<Escape\>\"@
-- denotes shift-escape; @\"M1-C-\<Delete\>\"@ denotes alt+ctrl+delete
-- (assuming alt is bound to mod1, which is common).
--
-- Sequences of keys can also be specified by separating the key
-- descriptions with spaces. For example, @\"M-x y \<Down\>\"@ denotes the
-- sequence of keys mod+x, y, down. Submaps (see
-- "XMonad.Actions.Submap") will be automatically generated to
-- correctly handle these cases.
--
-- So, for example, a complete key map might be specified as
--
-- > keys = \c -> mkKeymap c $
-- > [ ("M-S-<Return>", spawn $ terminal c)
-- > , ("M-x w", spawn "xmessage 'woohoo!'") -- type mod+x then w to pop up 'woohoo!'
-- > , ("M-x y", spawn "xmessage 'yay!'") -- type mod+x then y to pop up 'yay!'
-- > , ("M-S-c", kill)
-- > ]
--
-- Alternatively, you can use 'additionalKeysP' to automatically
-- create a keymap and add it to your config.
--
-- Here is a complete list of supported special keys. Note that a few
-- keys, such as the arrow keys, have synonyms. If there are other
-- special keys you would like to see supported, feel free to submit a
-- patch, or ask on the xmonad mailing list; adding special keys is
-- quite simple.
--
-- > <Backspace>
-- > <Tab>
-- > <Return>
-- > <Pause>
-- > <Scroll_lock>
-- > <Sys_Req>
-- > <Print>
-- > <Escape>, <Esc>
-- > <Delete>
-- > <Home>
-- > <Left>, <L>
-- > <Up>, <U>
-- > <Right>, <R>
-- > <Down>, <D>
-- > <Page_Up>
-- > <Page_Down>
-- > <End>
-- > <Insert>
-- > <Break>
-- > <Space>
-- > <F1>-<F24>
-- > <KP_Space>
-- > <KP_Tab>
-- > <KP_Enter>
-- > <KP_F1>
-- > <KP_F2>
-- > <KP_F3>
-- > <KP_F4>
-- > <KP_Home>
-- > <KP_Left>
-- > <KP_Up>
-- > <KP_Right>
-- > <KP_Down>
-- > <KP_Prior>
-- > <KP_Page_Up>
-- > <KP_Next>
-- > <KP_Page_Down>
-- > <KP_End>
-- > <KP_Begin>
-- > <KP_Insert>
-- > <KP_Delete>
-- > <KP_Equal>
-- > <KP_Multiply>
-- > <KP_Add>
-- > <KP_Separator>
-- > <KP_Subtract>
-- > <KP_Decimal>
-- > <KP_Divide>
-- > <KP_0>-<KP_9>
--
-- Long list of multimedia keys. Please note that not all keys may be
-- present in your particular setup although most likely they will do.
--
-- > <XF86ModeLock>
-- > <XF86MonBrightnessUp>
-- > <XF86MonBrightnessDown>
-- > <XF86KbdLightOnOff>
-- > <XF86KbdBrightnessUp>
-- > <XF86KbdBrightnessDown>
-- > <XF86Standby>
-- > <XF86AudioLowerVolume>
-- > <XF86AudioMute>
-- > <XF86AudioRaiseVolume>
-- > <XF86AudioPlay>
-- > <XF86AudioStop>
-- > <XF86AudioPrev>
-- > <XF86AudioNext>
-- > <XF86HomePage>
-- > <XF86Mail>
-- > <XF86Start>
-- > <XF86Search>
-- > <XF86AudioRecord>
-- > <XF86Calculator>
-- > <XF86Memo>
-- > <XF86ToDoList>
-- > <XF86Calendar>
-- > <XF86PowerDown>
-- > <XF86ContrastAdjust>
-- > <XF86RockerUp>
-- > <XF86RockerDown>
-- > <XF86RockerEnter>
-- > <XF86Back>
-- > <XF86Forward>
-- > <XF86Stop>
-- > <XF86Refresh>
-- > <XF86PowerOff>
-- > <XF86WakeUp>
-- > <XF86Eject>
-- > <XF86ScreenSaver>
-- > <XF86WWW>
-- > <XF86Sleep>
-- > <XF86Favorites>
-- > <XF86AudioPause>
-- > <XF86AudioMedia>
-- > <XF86MyComputer>
-- > <XF86VendorHome>
-- > <XF86LightBulb>
-- > <XF86Shop>
-- > <XF86History>
-- > <XF86OpenURL>
-- > <XF86AddFavorite>
-- > <XF86HotLinks>
-- > <XF86BrightnessAdjust>
-- > <XF86Finance>
-- > <XF86Community>
-- > <XF86AudioRewind>
-- > <XF86XF86BackForward>
-- > <XF86Launch0>-<XF86Launch9>, <XF86LaunchA>-<XF86LaunchF>
-- > <XF86ApplicationLeft>
-- > <XF86ApplicationRight>
-- > <XF86Book>
-- > <XF86CD>
-- > <XF86Calculater>
-- > <XF86Clear>
-- > <XF86Close>
-- > <XF86Copy>
-- > <XF86Cut>
-- > <XF86Display>
-- > <XF86DOS>
-- > <XF86Documents>
-- > <XF86Excel>
-- > <XF86Explorer>
-- > <XF86Game>
-- > <XF86Go>
-- > <XF86iTouch>
-- > <XF86LogOff>
-- > <XF86Market>
-- > <XF86Meeting>
-- > <XF86MenuKB>
-- > <XF86MenuPB>
-- > <XF86MySites>
-- > <XF86New>
-- > <XF86News>
-- > <XF86OfficeHome>
-- > <XF86Open>
-- > <XF86Option>
-- > <XF86Paste>
-- > <XF86Phone>
-- > <XF86Q>
-- > <XF86Reply>
-- > <XF86Reload>
-- > <XF86RotateWindows>
-- > <XF86RotationPB>
-- > <XF86RotationKB>
-- > <XF86Save>
-- > <XF86ScrollUp>
-- > <XF86ScrollDown>
-- > <XF86ScrollClick>
-- > <XF86Send>
-- > <XF86Spell>
-- > <XF86SplitScreen>
-- > <XF86Support>
-- > <XF86TaskPane>
-- > <XF86Terminal>
-- > <XF86Tools>
-- > <XF86Travel>
-- > <XF86UserPB>
-- > <XF86User1KB>
-- > <XF86User2KB>
-- > <XF86Video>
-- > <XF86WheelButton>
-- > <XF86Word>
-- > <XF86Xfer>
-- > <XF86ZoomIn>
-- > <XF86ZoomOut>
-- > <XF86Away>
-- > <XF86Messenger>
-- > <XF86WebCam>
-- > <XF86MailForward>
-- > <XF86Pictures>
-- > <XF86Music>
-- > <XF86TouchpadToggle>
-- > <XF86_Switch_VT_1>-<XF86_Switch_VT_12>
-- > <XF86_Ungrab>
-- > <XF86_ClearGrab>
-- > <XF86_Next_VMode>
-- > <XF86_Prev_VMode>
mkKeymap :: XConfig l -> [(String, X ())] -> M.Map (KeyMask, KeySym) (X ())
mkKeymap c = M.fromList . mkSubmaps . readKeymap c
mkNamedKeymap :: XConfig l -> [(String, NamedAction)] -> [((KeyMask, KeySym), NamedAction)]
mkNamedKeymap c = mkNamedSubmaps . readKeymap c
-- | Given a list of pairs of parsed key sequences and actions,
-- group them into submaps in the appropriate way.
mkNamedSubmaps :: [([(KeyMask, KeySym)], NamedAction)] -> [((KeyMask, KeySym), NamedAction)]
mkNamedSubmaps = mkSubmaps' submapName
mkSubmaps :: [ ([(KeyMask,KeySym)], X ()) ] -> [((KeyMask, KeySym), X ())]
mkSubmaps = mkSubmaps' $ submap . M.fromList
mkSubmaps' :: (Ord a) => ([(a, c)] -> c) -> [([a], c)] -> [(a, c)]
mkSubmaps' subm binds = map combine gathered
where gathered = groupBy fstKey
. sortBy (comparing fst)
$ binds
combine [([k],act)] = (k,act)
combine ks = (head . fst . head $ ks,
subm . mkSubmaps' subm $ map (first tail) ks)
fstKey = (==) `on` (head . fst)
on :: (a -> a -> b) -> (c -> a) -> c -> c -> b
op `on` f = \x y -> f x `op` f y
-- | Given a configuration record and a list of (key sequence
-- description, action) pairs, parse the key sequences into lists of
-- @(KeyMask,KeySym)@ pairs. Key sequences which fail to parse will
-- be ignored.
readKeymap :: XConfig l -> [(String, t)] -> [([(KeyMask, KeySym)], t)]
readKeymap c = mapMaybe (maybeKeys . first (readKeySequence c))
where maybeKeys (Nothing,_) = Nothing
maybeKeys (Just k, act) = Just (k, act)
-- | Parse a sequence of keys, returning Nothing if there is
-- a parse failure (no parse, or ambiguous parse).
readKeySequence :: XConfig l -> String -> Maybe [(KeyMask, KeySym)]
readKeySequence c = listToMaybe . parses
where parses = map fst . filter (null.snd) . readP_to_S (parseKeySequence c)
-- | Parse a sequence of key combinations separated by spaces, e.g.
-- @\"M-c x C-S-2\"@ (mod+c, x, ctrl+shift+2).
parseKeySequence :: XConfig l -> ReadP [(KeyMask, KeySym)]
parseKeySequence c = sepBy1 (parseKeyCombo c) (many1 $ char ' ')
-- | Parse a modifier-key combination such as "M-C-s" (mod+ctrl+s).
parseKeyCombo :: XConfig l -> ReadP (KeyMask, KeySym)
parseKeyCombo c = do mods <- many (parseModifier c)
k <- parseKey
return (foldl' (.|.) 0 mods, k)
-- | Parse a modifier: either M- (user-defined mod-key),
-- C- (control), S- (shift), or M#- where # is an integer
-- from 1 to 5 (mod1Mask through mod5Mask).
parseModifier :: XConfig l -> ReadP KeyMask
parseModifier c = (string "M-" >> return (modMask c))
+++ (string "C-" >> return controlMask)
+++ (string "S-" >> return shiftMask)
+++ do _ <- char 'M'
n <- satisfy (`elem` ['1'..'5'])
_ <- char '-'
return $ indexMod (read [n] - 1)
where indexMod = (!!) [mod1Mask,mod2Mask,mod3Mask,mod4Mask,mod5Mask]
-- | Parse an unmodified basic key, like @\"x\"@, @\"<F1>\"@, etc.
parseKey :: ReadP KeySym
parseKey = parseRegular +++ parseSpecial
-- | Parse a regular key name (represented by itself).
parseRegular :: ReadP KeySym
parseRegular = choice [ char s >> return k
| (s,k) <- zip ['!'..'~'] [xK_exclam..xK_asciitilde]
]
-- | Parse a special key name (one enclosed in angle brackets).
parseSpecial :: ReadP KeySym
parseSpecial = do _ <- char '<'
key <- choice [ string name >> return k
| (name,k) <- keyNames
]
_ <- char '>'
return key
-- | A list of all special key names and their associated KeySyms.
keyNames :: [(String, KeySym)]
keyNames = functionKeys ++ specialKeys ++ multimediaKeys
-- | A list pairing function key descriptor strings (e.g. @\"<F2>\"@) with
-- the associated KeySyms.
functionKeys :: [(String, KeySym)]
functionKeys = [ ('F' : show n, k)
| (n,k) <- zip ([1..24] :: [Int]) [xK_F1..] ]
-- | A list of special key names and their corresponding KeySyms.
specialKeys :: [(String, KeySym)]
specialKeys = [ ("Backspace" , xK_BackSpace)
, ("Tab" , xK_Tab)
, ("Return" , xK_Return)
, ("Pause" , xK_Pause)
, ("Scroll_lock", xK_Scroll_Lock)
, ("Sys_Req" , xK_Sys_Req)
, ("Print" , xK_Print)
, ("Escape" , xK_Escape)
, ("Esc" , xK_Escape)
, ("Delete" , xK_Delete)
, ("Home" , xK_Home)
, ("Left" , xK_Left)
, ("Up" , xK_Up)
, ("Right" , xK_Right)
, ("Down" , xK_Down)
, ("L" , xK_Left)
, ("U" , xK_Up)
, ("R" , xK_Right)
, ("D" , xK_Down)
, ("Page_Up" , xK_Page_Up)
, ("Page_Down" , xK_Page_Down)
, ("End" , xK_End)
, ("Insert" , xK_Insert)
, ("Break" , xK_Break)
, ("Space" , xK_space)
, ("KP_Space" , xK_KP_Space)
, ("KP_Tab" , xK_KP_Tab)
, ("KP_Enter" , xK_KP_Enter)
, ("KP_F1" , xK_KP_F1)
, ("KP_F2" , xK_KP_F2)
, ("KP_F3" , xK_KP_F3)
, ("KP_F4" , xK_KP_F4)
, ("KP_Home" , xK_KP_Home)
, ("KP_Left" , xK_KP_Left)
, ("KP_Up" , xK_KP_Up)
, ("KP_Right" , xK_KP_Right)
, ("KP_Down" , xK_KP_Down)
, ("KP_Prior" , xK_KP_Prior)
, ("KP_Page_Up" , xK_KP_Page_Up)
, ("KP_Next" , xK_KP_Next)
, ("KP_Page_Down", xK_KP_Page_Down)
, ("KP_End" , xK_KP_End)
, ("KP_Begin" , xK_KP_Begin)
, ("KP_Insert" , xK_KP_Insert)
, ("KP_Delete" , xK_KP_Delete)
, ("KP_Equal" , xK_KP_Equal)
, ("KP_Multiply", xK_KP_Multiply)
, ("KP_Add" , xK_KP_Add)
, ("KP_Separator", xK_KP_Separator)
, ("KP_Subtract", xK_KP_Subtract)
, ("KP_Decimal" , xK_KP_Decimal)
, ("KP_Divide" , xK_KP_Divide)
, ("KP_0" , xK_KP_0)
, ("KP_1" , xK_KP_1)
, ("KP_2" , xK_KP_2)
, ("KP_3" , xK_KP_3)
, ("KP_4" , xK_KP_4)
, ("KP_5" , xK_KP_5)
, ("KP_6" , xK_KP_6)
, ("KP_7" , xK_KP_7)
, ("KP_8" , xK_KP_8)
, ("KP_9" , xK_KP_9)
]
-- | List of multimedia keys. If X server does not know about some
-- | keysym it's omitted from list. (stringToKeysym returns noSymbol in this case)
multimediaKeys :: [(String, KeySym)]
multimediaKeys = filter ((/= noSymbol) . snd) . map (id &&& stringToKeysym) $
[ "XF86ModeLock"
, "XF86MonBrightnessUp"
, "XF86MonBrightnessDown"
, "XF86KbdLightOnOff"
, "XF86KbdBrightnessUp"
, "XF86KbdBrightnessDown"
, "XF86Standby"
, "XF86AudioLowerVolume"
, "XF86AudioMute"
, "XF86AudioRaiseVolume"
, "XF86AudioPlay"
, "XF86AudioStop"
, "XF86AudioPrev"
, "XF86AudioNext"
, "XF86HomePage"
, "XF86Mail"
, "XF86Start"
, "XF86Search"
, "XF86AudioRecord"
, "XF86Calculator"
, "XF86Memo"
, "XF86ToDoList"
, "XF86Calendar"
, "XF86PowerDown"
, "XF86ContrastAdjust"
, "XF86RockerUp"
, "XF86RockerDown"
, "XF86RockerEnter"
, "XF86Back"
, "XF86Forward"
, "XF86Stop"
, "XF86Refresh"
, "XF86PowerOff"
, "XF86WakeUp"
, "XF86Eject"
, "XF86ScreenSaver"
, "XF86WWW"
, "XF86Sleep"
, "XF86Favorites"
, "XF86AudioPause"
, "XF86AudioMedia"
, "XF86MyComputer"
, "XF86VendorHome"
, "XF86LightBulb"
, "XF86Shop"
, "XF86History"
, "XF86OpenURL"
, "XF86AddFavorite"
, "XF86HotLinks"
, "XF86BrightnessAdjust"
, "XF86Finance"
, "XF86Community"
, "XF86AudioRewind"
, "XF86BackForward"
, "XF86Launch0"
, "XF86Launch1"
, "XF86Launch2"
, "XF86Launch3"
, "XF86Launch4"
, "XF86Launch5"
, "XF86Launch6"
, "XF86Launch7"
, "XF86Launch8"
, "XF86Launch9"
, "XF86LaunchA"
, "XF86LaunchB"
, "XF86LaunchC"
, "XF86LaunchD"
, "XF86LaunchE"
, "XF86LaunchF"
, "XF86ApplicationLeft"
, "XF86ApplicationRight"
, "XF86Book"
, "XF86CD"
, "XF86Calculater"
, "XF86Clear"
, "XF86Close"
, "XF86Copy"
, "XF86Cut"
, "XF86Display"
, "XF86DOS"
, "XF86Documents"
, "XF86Excel"
, "XF86Explorer"
, "XF86Game"
, "XF86Go"
, "XF86iTouch"
, "XF86LogOff"
, "XF86Market"
, "XF86Meeting"
, "XF86MenuKB"
, "XF86MenuPB"
, "XF86MySites"
, "XF86New"
, "XF86News"
, "XF86OfficeHome"
, "XF86Open"
, "XF86Option"
, "XF86Paste"
, "XF86Phone"
, "XF86Q"
, "XF86Reply"
, "XF86Reload"
, "XF86RotateWindows"
, "XF86RotationPB"
, "XF86RotationKB"
, "XF86Save"
, "XF86ScrollUp"
, "XF86ScrollDown"
, "XF86ScrollClick"
, "XF86Send"
, "XF86Spell"
, "XF86SplitScreen"
, "XF86Support"
, "XF86TaskPane"
, "XF86Terminal"
, "XF86Tools"
, "XF86Travel"
, "XF86UserPB"
, "XF86User1KB"
, "XF86User2KB"
, "XF86Video"
, "XF86WheelButton"
, "XF86Word"
, "XF86Xfer"
, "XF86ZoomIn"
, "XF86ZoomOut"
, "XF86Away"
, "XF86Messenger"
, "XF86WebCam"
, "XF86MailForward"
, "XF86Pictures"
, "XF86Music"
, "XF86TouchpadToggle"
, "XF86_Switch_VT_1"
, "XF86_Switch_VT_2"
, "XF86_Switch_VT_3"
, "XF86_Switch_VT_4"
, "XF86_Switch_VT_5"
, "XF86_Switch_VT_6"
, "XF86_Switch_VT_7"
, "XF86_Switch_VT_8"
, "XF86_Switch_VT_9"
, "XF86_Switch_VT_10"
, "XF86_Switch_VT_11"
, "XF86_Switch_VT_12"
, "XF86_Ungrab"
, "XF86_ClearGrab"
, "XF86_Next_VMode"
, "XF86_Prev_VMode" ]
-- | Given a configuration record and a list of (key sequence
-- description, action) pairs, check the key sequence descriptions
-- for validity, and warn the user (via a popup xmessage window) of
-- any unparseable or duplicate key sequences. This function is
-- appropriate for adding to your @startupHook@, and you are highly
-- encouraged to do so; otherwise, duplicate or unparseable
-- keybindings will be silently ignored.
--
-- For example, you might do something like this:
--
-- > main = xmonad $ myConfig
-- >
-- > myKeymap = [("S-M-c", kill), ...]
-- > myConfig = defaultConfig {
-- > ...
-- > keys = \c -> mkKeymap c myKeymap
-- > startupHook = return () >> checkKeymap myConfig myKeymap
-- > ...
-- > }
--
-- NOTE: the @return ()@ in the example above is very important!
-- Otherwise, you might run into problems with infinite mutual
-- recursion: the definition of myConfig depends on the definition of
-- startupHook, which depends on the definition of myConfig, ... and
-- so on. Actually, it's likely that the above example in particular
-- would be OK without the @return ()@, but making @myKeymap@ take
-- @myConfig@ as a parameter would definitely lead to
-- problems. Believe me. It, uh, happened to my friend. In... a
-- dream. Yeah. In any event, the @return () >>@ introduces enough
-- laziness to break the deadlock.
--
checkKeymap :: XConfig l -> [(String, a)] -> X ()
checkKeymap conf km = warn (doKeymapCheck conf km)
where warn ([],[]) = return ()
warn (bad,dup) = spawn $ "xmessage 'Warning:\n"
++ msg "bad" bad ++ "\n"
++ msg "duplicate" dup ++ "'"
msg _ [] = ""
msg m xs = m ++ " keybindings detected: " ++ showBindings xs
showBindings = unwords . map (("\""++) . (++"\""))
-- | Given a config and a list of (key sequence description, action)
-- pairs, check the key sequence descriptions for validity,
-- returning a list of unparseable key sequences, and a list of
-- duplicate key sequences.
doKeymapCheck :: XConfig l -> [(String,a)] -> ([String], [String])
doKeymapCheck conf km = (bad,dups)
where ks = map ((readKeySequence conf &&& id) . fst) km
bad = nub . map snd . filter (isNothing . fst) $ ks
dups = map (snd . head)
. filter ((>1) . length)
. groupBy ((==) `on` fst)
. sortBy (comparing fst)
. map (first fromJust)
. filter (isJust . fst)
$ ks
| markus1189/xmonad-contrib-710 | XMonad/Util/EZConfig.hs | bsd-3-clause | 26,039 | 0 | 16 | 8,344 | 3,684 | 2,278 | 1,406 | 327 | 3 |
module Test13 where
f (x:xs) = x : xs
g = 1 : [1,2]
| kmate/HaRe | old/testing/refacFunDef/Test13.hs | bsd-3-clause | 55 | 0 | 7 | 17 | 38 | 22 | 16 | 3 | 1 |
{-# LANGUAGE TypeFamilyDependencies #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeOperators #-}
module T14164 where
data G (x :: a) = GNil | GCons (G x)
type family F (xs :: [a]) (g :: G (z :: a)) = (res :: [a]) | res -> a where
F (x:xs) GNil = x:xs
F (x:xs) (GCons rest) = x:F xs rest
| sdiehl/ghc | testsuite/tests/indexed-types/should_compile/T14164.hs | bsd-3-clause | 313 | 0 | 9 | 84 | 134 | 81 | 53 | -1 | -1 |
-- Original test case for #11627 (space_leak_001.hs)
import Data.List
main :: IO ()
main = print $ length $ show (foldl' (*) 1 [1..100000] :: Integer)
| ezyang/ghc | testsuite/tests/profiling/should_run/T11627a.hs | bsd-3-clause | 153 | 1 | 9 | 28 | 56 | 29 | 27 | 3 | 1 |
{-# LANGUAGE TypeFamilies, RankNTypes #-}
module T10899 where
class C a where
type F a
type F a = forall m. m a
| ezyang/ghc | testsuite/tests/indexed-types/should_fail/T10899.hs | bsd-3-clause | 118 | 0 | 8 | 29 | 35 | 20 | 15 | 5 | 0 |
{-# LANGUAGE TemplateHaskell, EmptyCase #-}
-- Trac #2431: empty case expression
-- now accepted
module Main where
import Language.Haskell.TH
f :: Int
f = $(caseE (litE $ CharL 'a') [])
main = print f
| urbanslug/ghc | testsuite/tests/th/TH_emptycase.hs | bsd-3-clause | 217 | 0 | 10 | 50 | 52 | 30 | 22 | 6 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleInstances, MultiParamTypeClasses, TypeFamilies, FlexibleContexts, UndecidableInstances #-}
-- | This module exports 'RuleM', the monad used for writing
-- simplification rules.
module Futhark.Optimise.Simplifier.RuleM
( RuleM
, simplify
, cannotSimplify
, liftMaybe
)
where
import Control.Applicative
import Control.Monad.State
import Control.Monad.Trans.Maybe
import Futhark.Representation.AST
import Futhark.MonadFreshNames
import Futhark.Binder
-- | The monad in which simplification rules are evaluated.
newtype RuleM m a = RuleM (MaybeT m a)
deriving (Functor, Applicative, Monad)
instance MonadFreshNames m => MonadFreshNames (RuleM m) where
getNameSource = RuleM . lift $ getNameSource
putNameSource = RuleM . lift . putNameSource
instance (Monad m, HasScope t m) => HasScope t (RuleM m) where
lookupType = RuleM . lift . lookupType
askScope = RuleM . lift $ askScope
instance (Monad m, LocalScope t m) => LocalScope t (RuleM m) where
localScope types (RuleM m) = RuleM $ do
x <- lift $ localScope types $ runMaybeT m
MaybeT $ return x
instance MonadBinder m => MonadBinder (RuleM m) where
type Lore (RuleM m) = Lore m
mkExpAttrM pat e = RuleM $ lift $ mkExpAttrM pat e
mkLetNamesM names e = RuleM $ lift $ mkLetNamesM names e
mkBodyM bnds res = RuleM $ lift $ mkBodyM bnds res
addStm = RuleM . lift . addStm
collectStms (RuleM m) = RuleM $ MaybeT $ do
(x, bnds) <- collectStms $ runMaybeT m
case x of Nothing -> return Nothing
Just x' -> return $ Just (x', bnds)
certifying cs (RuleM m) = RuleM $ MaybeT $
certifying cs $ runMaybeT m
instance MonadBinder m => Alternative (RuleM m) where
empty = RuleM $ MaybeT $ return Nothing
RuleM m1 <|> RuleM m2 = RuleM $ do
(x, bnds) <- lift $ collectStms $ runMaybeT m1
case x of Nothing -> m2
Just x' -> do lift $ mapM_ addStm bnds
return x'
-- | Execute a 'RuleM' action. If succesful, returns the result and a
-- list of new bindings. Even if the action fail, there may still be
-- a monadic effect - particularly, the name source may have been
-- modified.
simplify :: MonadBinder m =>
RuleM m a
-> m (Maybe (a, [Stm (Lore m)]))
simplify (RuleM m) = do
(x, bnds) <- collectStms $ runMaybeT m
case x of
Just x' -> return $ Just (x', bnds)
Nothing -> return Nothing
cannotSimplify :: Monad m => RuleM m a
cannotSimplify = fail "Cannot simplify"
liftMaybe :: Monad m => Maybe a -> RuleM m a
liftMaybe Nothing = fail "Nothing"
liftMaybe (Just x) = return x
| ihc/futhark | src/Futhark/Optimise/Simplifier/RuleM.hs | isc | 2,642 | 0 | 15 | 608 | 841 | 424 | 417 | 56 | 2 |
-- Logic module
-- Functions for updating game state and responding to user input
module Logic(updateGameState, handleEvent) where
import State
import Piece
import Playfield
import Graphics.Gloss
import Graphics.Gloss.Interface.Pure.Game -- for Event
import System.Random
-- Piece falling velocity, in cells/second
pieceVelocity :: Float
pieceVelocity = 10
acceleratedPieceVelocity :: Float
acceleratedPieceVelocity = 30
effectivePieceVelocity :: State -> Float
effectivePieceVelocity s | accelerate s = acceleratedPieceVelocity | otherwise = pieceVelocity
-- Time to wait before dropping piece again
effectivePiecePeriod :: State -> Float
effectivePiecePeriod s = 1.0 / (effectivePieceVelocity s)
handleEvent :: Event -> State -> State
handleEvent (EventKey (SpecialKey KeyLeft) Down _ _) s = movePiece (-2) s
handleEvent (EventKey (SpecialKey KeyRight) Down _ _) s = movePiece 2 s
handleEvent (EventKey (SpecialKey KeyDown) Down _ _) s = s {accelerate = True}
handleEvent (EventKey (SpecialKey KeyDown) Up _ _) s = s {accelerate = False}
handleEvent (EventKey (Char 'a') Down _ _) s = rotateCW s
handleEvent (EventKey (Char 's') Down _ _) s = rotateCCW s
handleEvent _ s = s
-- Moves the falling piece horizontally, if possible
movePiece :: Int -> State -> State
movePiece offset s
| canPieceBeAt (piece s) piecePos' (well s) = s {piecePos = piecePos'}
| otherwise = s
where
piecePos' = (fst (piecePos s) + offset, snd (piecePos s))
-- Transforms the falling piece, if possible
transformPiece :: (Piece -> Piece) -> State -> State
transformPiece transform s
| canPieceBeAt piece' (piecePos s) (well s) = s {piece = piece'}
| otherwise = s
where
piece' = transform (piece s)
-- Rotates the falling piece clockwise, if possible
rotateCW :: State -> State
rotateCW = transformPiece pieceCW -- I feel SO badass for doing this!
rotateCCW = transformPiece pieceCCW
-- Update function passed to gloss
updateGameState :: Float -> State -> State
updateGameState t s = unityStyleUpdate (s {time = (time s + t), deltaTime = t}) -- ok, after all gloss passes dt to us
-- my update function
unityStyleUpdate :: State -> State
unityStyleUpdate s
| secondsToNextMove stateWithUpdatedClocks <= 0 = applyMove stateWithUpdatedClocks {secondsToNextMove = effectivePiecePeriod s}
| otherwise = stateWithUpdatedClocks
where
stateWithUpdatedClocks = s {secondsToNextMove = (secondsToNextMove s) - (deltaTime s)}
-- Refactored from applyMove. We also needed it to move left-right and rotate a piece
canPieceBeAt :: Piece -> (Int, Int) -> Well -> Bool
canPieceBeAt piece coord well = insidePlayfield && (not colliding)
where
insidePlayfield = validPos coord piece
colliding = pieceCollides piece coord well
-- Moves the current piece one cell down
applyMove :: State -> State
applyMove s
| nextPosInvalid = handleFullRows (fixPiece s)
| otherwise = s {piecePos = piecePos'}
where
nextPosInvalid = not (canPieceBeAt (piece s) piecePos' (well s))
piecePos' = (fst (piecePos s), snd (piecePos s) - 2)
-- Fixes the falling piece to its current position and resets the piece to a new one
fixPiece :: State -> State
fixPiece s
| ((snd (piecePos s)) > (-2)) = resetGameState s -- reset game state when 'fixing' a piece that overflows the well
| otherwise = s
{ well = renderPiece (piece s) (piecePos s) (well s)
, piece = randomPiece (fst reseed)
, piecePos = (0, 0)
, randomSeed = snd reseed
, accelerate = False -- We don't want acceleration to affect next falling piece
}
where
reseed :: (Double, StdGen)
reseed = randomR (0.0, 1.0) (randomSeed s)
-- Removes filled rows and changes the score accordingly
handleFullRows :: State -> State
handleFullRows s = s {well = fst result, score = (score s) + linesToScore (snd result)}
where result = clearAndCountFilledRows (well s)
-- Finally, it can't be called "Tetris" without the scoring system
linesToScore :: Int -> Int
linesToScore 0 = 0
linesToScore 1 = 40
linesToScore 2 = 100
linesToScore 3 = 300
linesToScore 4 = 1200
linesToScore _ = error "Invalid cleared Line count" | mgeorgoulopoulos/TetrisHaskellWeekend | Logic.hs | mit | 4,308 | 0 | 13 | 948 | 1,199 | 630 | 569 | 74 | 1 |
-- Tenn1518's XMonad configuration
import XMonad
import XMonad.Actions.Warp
import XMonad.Actions.Commands
import XMonad.Actions.RotSlaves
import XMonad.Util.EZConfig
import XMonad.Util.Ungrab
import XMonad.Util.Loggers
import qualified XMonad.StackSet as W
import XMonad.Hooks.ManageHelpers
import XMonad.Hooks.EwmhDesktops
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.StatusBar
import XMonad.Hooks.StatusBar.PP
import XMonad.Hooks.DynamicLog
import XMonad.Layout.NoBorders
import XMonad.Layout.Spacing
import XMonad.Layout.TwoPane
import XMonad.Layout.TwoPanePersistent
main :: IO ()
main = xmonad . ewmhFullscreen . ewmh . docks . withSB mySB $ myConf
where
mySB = statusBarProp "xmobar ~/.config/xmobar/xmobarrc" (pure myXmobarPP)
myConf = def
{ modMask = mod4Mask
, terminal = "alacritty -e zsh -c 'tmux a || tmux'"
, focusFollowsMouse = False
, borderWidth = 3
, normalBorderColor = "#000000"
, focusedBorderColor = "#bd93f9"
, layoutHook = myLayoutHook
, manageHook = myManageHook
, logHook = dynamicLog
}
`additionalKeysP`
[ ("M-S-e" , spawn "emacsclient -c -n -a 'emacs'")
, ("M-<Space>" , spawn "rofi -modi windowcd,run -show combi --combi-modi windowcd,drun")
, ("M-<Tab>" , sendMessage NextLayout)
, ("M-S-<Tab>" , sendMessage FirstLayout)
, ("M-S-;" , commands >>= runCommand)
-- Banish or beckon cursor, akin to Stump
, ("M-S-b" , banishScreen LowerRight)
, ("M-b" , warpToWindow 0.5 0.5)
-- Rotate slave windows (TODO: don't clobber monitor keybinding)
, ("M-r" , rotSlavesUp)
, ("M-S-r" , rotSlavesDown)
-- Gaps
, ("M-g" , toggleWindowSpacingEnabled)
, ("M-[" , decScreenWindowSpacing 2)
, ("M-]" , incScreenWindowSpacing 2)
-- brightness
, ("<XF86MonBrightnessUp>" , spawn "brightnessctl s +4%")
, ("<XF86MonBrightnessDown>", spawn "brightnessctl s 4%-")
-- volume
, ("<XF86AudioLowerVolume>" , spawn "pamixer -d 4; vol-info")
, ("<XF86AudioRaiseVolume>" , spawn "pamixer -i 4; vol-info")
, ("<XF86AudioMute>" , spawn "pamixer -t; vol-info")
]
commands :: X [(String, X())]
commands = defaultCommands
-- sending XMonad state to XMobar
myXmobarPP :: PP
myXmobarPP = def
{ ppLayout = const ""
, ppCurrent = wrap " " "" . xmobarBorder "Bottom" "#8be9fd" 3
, ppHidden = white . wrap " " ""
, ppHiddenNoWindows = lowWhite . wrap " " ""
, ppTitle = shorten 50
, ppSep = " · "
}
where
white, lowWhite :: String -> String
white = xmobarColor "#f8f8f2" ""
lowWhite = xmobarColor "#bbbbbb" ""
myLayoutHook = smartBorders
$ avoidStruts
$ spacingWithEdge 3 (myTall
||| myTwoPane)
||| Full
where
-- Two panes, new windows split slave pane
myTall = Tall nmaster delta ratio
-- Two splits, new windows swap into slave pane
myTwoPane = TwoPane delta gratio
nmaster = 1
delta = 3/100
ratio = 1/2
gratio = 56/100
-- If adding more in the future:
-- myManageHook = (otherStuff) <+> (fmap not isDialog --> doF avoidMaster)
myManageHook = fmap not isDialog --> doF avoidMaster
-- Windows do not displace master window when it is focused
avoidMaster :: W.StackSet i l a s sd -> W.StackSet i l a s sd
avoidMaster = W.modify' $ \c -> case c of
W.Stack t [] (r:rs) -> W.Stack t [r] rs
otherwise -> c
| Tenn1518/dotfiles | config/xmonad/xmonad.hs | mit | 3,744 | 5 | 12 | 1,101 | 797 | 459 | 338 | 78 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : GitBak
-- Copyright : (c) Agorgianitis Loukas, 2015
-- License : MIT
--
-- Maintainer : Agorgianitis Loukas <agorglouk@gmail.com>
-- Stability : experimental
-- Portability : portable
--
-- Main of gitbak executable
--
-----------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Applicative
import Control.Monad
import Data.Aeson
import Options.Applicative
import System.FilePath
import System.Directory
import System.IO
import System.Process
import System.Exit
import qualified Control.Lens as Ln
import qualified Network.Wreq as Wr
import qualified Data.ByteString.Char8 as B8
---------------------------------------------------------------------------
-- Command Line Options
---------------------------------------------------------------------------
-- Helper
withInfo :: Parser a -> String -> ParserInfo a
opts `withInfo` desc = info (helper <*> opts) $ progDesc desc
data Options = Options { user :: String, ghAPIKey :: Maybe String }
-- Main parser
parseOptions :: Parser Options
parseOptions = Options
<$> argument str (metavar "USER" <> help "The GitHub user to clone repos from")
<*> optional (argument str (metavar "APIKEY" <> help "The optional GitHub API key to enable more API requests per minute"))
-- The main description generator
parseOptionsInfo :: ParserInfo Options
parseOptionsInfo = info (helper <*> parseOptions)
(fullDesc
<> header "GitBak - A GitHub mass clone utility")
---------------------------------------------------------------------------
-- Deserializing
---------------------------------------------------------------------------
-- The data structure that holds a fetched repo information
data RepoInfo = RepoInfo { repoName :: String , repoLink :: String } deriving Show
instance FromJSON RepoInfo where
parseJSON (Object v) = RepoInfo <$>
v .: "name" <*>
v .: "clone_url"
parseJSON _ = mempty
---------------------------------------------------------------------------
-- Actions
---------------------------------------------------------------------------
-- Gathers a RepoInfo list for the given username, using the optional API key
gatherRepoList :: String -> Maybe String -> IO [RepoInfo]
gatherRepoList name apiKey = do
let initUrl = "https://api.github.com/users/" ++ name ++ "/repos"
let opts = Wr.defaults
let opts2 = case apiKey of
Just key -> opts Ln.& Wr.header "Authorization" Ln..~ [B8.pack $ "token " ++ key]
Nothing -> opts
let getRepoLinks url progress =
Wr.getWith opts2 url >>= (\x ->
let body = x Ln.^. Wr.responseBody
restLink = x Ln.^? Wr.responseLink "rel" "next" . Wr.linkURL
in case decode body :: Maybe [RepoInfo] of
Nothing -> return []
Just v -> do
let newProgress = progress + length v
putStr $ "\rTotal repos: " ++ show newProgress
hFlush stdout
rest <- case restLink of
Nothing -> return []
Just l -> getRepoLinks (B8.unpack l) newProgress
return $ v ++ rest)
putStr $ "Total repos: " ++ show (0 :: Int)
hFlush stdout
repoLinks <- getRepoLinks initUrl 0
putStr "\n"
return repoLinks
-- Clones a git repository using the git executable
cloneGitRepo :: RepoInfo -> IO ExitCode
cloneGitRepo inf = system $ "git clone " ++ repoLink inf ++ " " ++ repoName inf
-- Returns a list of all the given directory contents recursively
getRecursiveContents :: FilePath -> IO [FilePath]
getRecursiveContents topdir = do
names <- getDirectoryContents topdir
let properNames = filter (`notElem` [".", ".."]) names
paths <- mapM (\x -> do
let path = topdir </> x
isDir <- doesDirectoryExist path
if isDir
then (++ [path]) <$> getRecursiveContents path
else return [path]
) properNames
return (concat paths)
-- Sets write permission of a file or folder to true
makeWritable :: FilePath -> IO ()
makeWritable path = do
p <- getPermissions path
setPermissions path (p {writable = True})
-- Archives a given folder using the tar util
zipFolder :: FilePath -> IO ExitCode
zipFolder name =
system $ "tar czvf " ++ name ++ ".tar.gz " ++ name
---------------------------------------------------------------------------
-- Entrypoint
---------------------------------------------------------------------------
main :: IO ()
main = do
opts <- execParser parseOptionsInfo
putStrLn "Fetching repos..."
repos <- gatherRepoList (user opts) (ghAPIKey opts)
forM_ repos (\x -> do
let name = repoName x
-- Clone
putStrLn $ "Cloning " ++ name ++ "..."
_ <- cloneGitRepo x
-- Zip
putStrLn $ "Archiving " ++ name ++ "..."
_ <- zipFolder name
-- Delete cloned folder
getRecursiveContents name >>= mapM_ makeWritable
removeDirectoryRecursive name)
| ElArtista/GitBak | src/Main.hs | mit | 5,319 | 0 | 27 | 1,303 | 1,150 | 586 | 564 | -1 | -1 |
-- Section 6
import Data.List
import qualified Data.Map as Map
import Data.Char
numUniques :: (Eq a) => [a] -> Int
numUniques = length . nub
wordNums :: String -> [(String, Int)]
wordNums = map (\ws -> (head ws, length ws)) . group . sort . words
digitSum :: Int -> Int
digitSum = sum .map digitToInt . show
firstToInt :: Int -> Maybe Int
firstToInt n = find (\x -> digitSum x == n) [1..]
findKey :: (Eq k) => k -> [(k, v)] -> v
findKey key xs = snd . head .filter (\(k, v) -> key == k) $ xs
betterfindKey :: (Eq k) => k -> [(k, v)] -> Maybe v
betterfindKey key [] = Nothing
betterfindKey key ((k,v):xs)
| key == k = Just v
| otherwise = betterfindKey key xs
findKey' :: (Eq k) => k -> [(k,v)] -> Maybe v
findKey' key xs = foldr
(\(k,v) acc -> if key == k then Just v else acc)
Nothing xs
phoneBook :: Map.Map String String
phoneBook = Map.fromList $
[("betty", "555-1938")
,("bonnie", "42-2332")
,("paty","44420-323")
,("fasn","78-203")
,("frontia","7-23-32")
]
string2digits :: String -> [Int]
string2digits = map digitToInt . filter isDigit
phoneBookToMap :: (Ord k) => [(k,String)] -> Map.Map k String
phoneBookToMap xs = Map.fromListWith add xs
where add number1 number2 = number1 ++ ", " ++ number2
| ymkjp/Algorithms-and-Data-Structures | 6.hs | mit | 1,230 | 11 | 12 | 246 | 600 | 328 | 272 | 34 | 2 |
{-# LANGUAGE GADTs #-}
module Main where
import Test.Framework.Runners.Console
import Test.Framework.Providers.API
import Test.Framework.Providers.HUnit
import Test.HUnit
import Data.List
import Data.Traversable
--import LProperties
import LUnitTests
main = do
unitTests <- LUnitTests.tests
defaultMain [unitTests] --, LProperties.tests]
| joshcough/L5-Haskell | test/main.hs | mit | 348 | 0 | 8 | 43 | 69 | 43 | 26 | 12 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.